hponepyae commited on
Commit
d513773
·
verified ·
1 Parent(s): 8ec5a02

Update app.py

Browse files

Edit the response not to include prompt

Files changed (1) hide show
  1. app.py +16 -20
app.py CHANGED
@@ -40,21 +40,19 @@ def get_clinical_code(clinical_text):
40
  """
41
  if not model_loaded:
42
  return "Error: The model could not be loaded. Please check the logs."
43
-
44
  if not clinical_text:
45
  return "Please enter some clinical text."
46
 
47
  # This is our prompt template. It's designed to guide the model
48
  # to perform the specific task of clinical coding.
49
  # We are asking for an ICD-10 code, which is a common standard.
50
- prompt = f"""
51
- <start_of_turn>user
52
- You are an expert medical coder. Your task is to analyze the following clinical text and determine the most appropriate ICD-10 code. Provide only the ICD-10 code and a brief description.
53
- Clinical Text: "{clinical_text}"
54
- Provide the ICD-10 code and a brief description.
55
- <end_of_turn>
56
- <start_of_turn>model
57
- """
58
  # Prepare the input for the model
59
  # It's good practice to ensure the input_ids are on the correct device.
60
  # model.device will give you the device where the model currently resides (GPU if available).
@@ -70,15 +68,16 @@ def get_clinical_code(clinical_text):
70
  temperature=0.7, # A lower temperature makes the output more deterministic
71
  )
72
 
73
- # Decode the output and clean it up
74
- response = tokenizer.decode(outputs[0], skip_special_tokens=True)
75
-
76
- # Extract the relevant part of the response
77
- # The model will output the prompt as well, so we need to remove it.
78
- model_response_start = response.find("<start_of_turn>model") + len("<start_of_turn>model")
79
- clean_response = response[model_response_start:].strip()
80
 
81
- return clean_response
 
 
 
 
82
 
83
  # Create the Gradio Interface
84
  with gr.Blocks(theme=gr.themes.Soft()) as demo:
@@ -89,7 +88,6 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
89
  *Disclaimer: This is a demonstration and not for professional medical use.*
90
  """
91
  )
92
-
93
  with gr.Row():
94
  # Input Textbox
95
  input_text = gr.Textbox(
@@ -97,7 +95,6 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
97
  placeholder="e.g., Patient presents with a severe headache and photophobia...",
98
  lines=10
99
  )
100
-
101
  # Output Textbox
102
  output_text = gr.Textbox(
103
  label="Suggested Clinical Code (ICD-10)",
@@ -112,7 +109,6 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
112
  inputs=input_text,
113
  outputs=output_text
114
  )
115
-
116
  gr.Examples(
117
  examples=[
118
  ["The patient complains of a persistent cough and fever for the past three days. Chest X-ray shows signs of pneumonia."],
 
40
  """
41
  if not model_loaded:
42
  return "Error: The model could not be loaded. Please check the logs."
 
43
  if not clinical_text:
44
  return "Please enter some clinical text."
45
 
46
  # This is our prompt template. It's designed to guide the model
47
  # to perform the specific task of clinical coding.
48
  # We are asking for an ICD-10 code, which is a common standard.
49
+ prompt = f"""<start_of_turn>user
50
+ You are an expert medical coder. Your task is to analyze the following clinical text and determine the most appropriate ICD-10 code. Provide only the ICD-10 code and a brief description.
51
+ Clinical Text: "{clinical_text}"
52
+ Provide the ICD-10 code and a brief description.
53
+ <end_of_turn>
54
+ <start_of_turn>model
55
+ """
 
56
  # Prepare the input for the model
57
  # It's good practice to ensure the input_ids are on the correct device.
58
  # model.device will give you the device where the model currently resides (GPU if available).
 
68
  temperature=0.7, # A lower temperature makes the output more deterministic
69
  )
70
 
71
+ # Decode only the newly generated tokens
72
+ # We slice the outputs tensor to exclude the input_ids (the prompt)
73
+ generated_tokens = outputs[0, input_ids.input_ids.shape[1]:]
74
+ response = tokenizer.decode(generated_tokens, skip_special_tokens=True)
 
 
 
75
 
76
+ # The prompt explicitly asks for "only the ICD-10 code and a brief description."
77
+ # So, we expect the model to start directly with the code and description.
78
+ # No further slicing based on "<start_of_turn>model" should be needed for *this* part.
79
+ # We can just return the decoded response.
80
+ return response.strip()
81
 
82
  # Create the Gradio Interface
83
  with gr.Blocks(theme=gr.themes.Soft()) as demo:
 
88
  *Disclaimer: This is a demonstration and not for professional medical use.*
89
  """
90
  )
 
91
  with gr.Row():
92
  # Input Textbox
93
  input_text = gr.Textbox(
 
95
  placeholder="e.g., Patient presents with a severe headache and photophobia...",
96
  lines=10
97
  )
 
98
  # Output Textbox
99
  output_text = gr.Textbox(
100
  label="Suggested Clinical Code (ICD-10)",
 
109
  inputs=input_text,
110
  outputs=output_text
111
  )
 
112
  gr.Examples(
113
  examples=[
114
  ["The patient complains of a persistent cough and fever for the past three days. Chest X-ray shows signs of pneumonia."],