krish4950 commited on
Commit
6151bfe
·
1 Parent(s): d6c951b

Updated app.py

Browse files
Files changed (1) hide show
  1. app.py +20 -15
app.py CHANGED
@@ -423,18 +423,19 @@ def prompt_comb(input_file):
423
 
424
  # Define the function to call the model
425
  def call_model(model_name, input_file, prompt_name):
426
- #index=int(prompt_name.split('prompt')[1]) - 1
427
- prompt_combine=prompt_comb(input_file)
 
 
 
 
 
 
 
 
 
 
428
 
429
- if prompt_name=="Basic_v1":
430
- index=0
431
- elif prompt_name=="Basic_v2":
432
- index=1
433
- elif prompt_name=="Generalize_v1":
434
- index=2
435
- elif prompt_name=="Generalize_v2":
436
- index=3
437
- input_message=prompt_combine[index]
438
  # Call the selected model
439
  if model_name == "llama":
440
  output = call_model_llama(input_message)
@@ -444,6 +445,7 @@ def call_model(model_name, input_file, prompt_name):
444
  output = call_model_Gemini(input_message)
445
  else:
446
  output = call_model_Gemini1(input_message)
 
447
  # Decode the output
448
  decoded_string = html.unescape(output)
449
  return decoded_string
@@ -456,14 +458,17 @@ def launch_interface():
456
  prompt_dropdown = gr.Dropdown(["Basic_v1", "Basic_v2", "Generalize_v1", "Generalize_v2"], label="Select Prompt")
457
 
458
  # Define the output component
459
- output_text = gr.Textbox(label="Output")
460
- # Create the Gradio interface
 
461
  interface = gr.Interface(
462
  fn=call_model,
463
  inputs=[model_dropdown, input_file, prompt_dropdown],
464
  outputs=output_text,
465
- title="Part Identification Number(PINs)",
466
- description="Choose a model **(llama-preferred)** and Upload a JSON file, select a prompt **(Generalize_v2-preffered)** , to generalize the PINs.",
 
 
467
  )
468
 
469
  return interface
 
423
 
424
  # Define the function to call the model
425
  def call_model(model_name, input_file, prompt_name):
426
+ prompt_combine = prompt_comb(input_file)
427
+
428
+ if prompt_name == "Basic_v1":
429
+ index = 0
430
+ elif prompt_name == "Basic_v2":
431
+ index = 1
432
+ elif prompt_name == "Generalize_v1":
433
+ index = 2
434
+ elif prompt_name == "Generalize_v2":
435
+ index = 3
436
+
437
+ input_message = prompt_combine[index]
438
 
 
 
 
 
 
 
 
 
 
439
  # Call the selected model
440
  if model_name == "llama":
441
  output = call_model_llama(input_message)
 
445
  output = call_model_Gemini(input_message)
446
  else:
447
  output = call_model_Gemini1(input_message)
448
+
449
  # Decode the output
450
  decoded_string = html.unescape(output)
451
  return decoded_string
 
458
  prompt_dropdown = gr.Dropdown(["Basic_v1", "Basic_v2", "Generalize_v1", "Generalize_v2"], label="Select Prompt")
459
 
460
  # Define the output component
461
+ output_text = gr.Textbox(label="Output", interactive=False)
462
+
463
+ # Create the Gradio interface with dark theme
464
  interface = gr.Interface(
465
  fn=call_model,
466
  inputs=[model_dropdown, input_file, prompt_dropdown],
467
  outputs=output_text,
468
+ title="Part Identification Number (PINs)",
469
+ description="Choose a model **(llama-preferred)** and Upload a JSON file, select a prompt **(Generalize_v2-preferred)**, to generalize the PINs.",
470
+ theme="dark", # Set the theme to dark
471
+
472
  )
473
 
474
  return interface