Spaces:
Sleeping
Sleeping
File size: 1,877 Bytes
9e13fb2 c0ba980 ab0aaa1 c0ba980 9e13fb2 c0ba980 ab0aaa1 c0ba980 ab0aaa1 c0ba980 ab0aaa1 9e13fb2 c0ba980 9e13fb2 c0ba980 ab0aaa1 9e13fb2 c0ba980 9e13fb2 c0ba980 9e13fb2 c0ba980 9e13fb2 c0ba980 9e13fb2 ab0aaa1 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 |
import gradio as gr
from outlines.models.llamacpp import LlamaCpp
from outlines import generate, samplers
from pydantic import BaseModel, Field
from typing import Optional
import json
# Define the output schema
class Medication(BaseModel):
drug_name: str = Field(description="The name of the drug.")
is_generic: bool = Field(description="Indicates if the drug name is a generic drug name.")
strength: Optional[str] = Field(default=None, description="The strength of the drug.")
unit: Optional[str] = Field(default=None, description="The unit of measurement for the drug strength.")
dosage_form: Optional[str] = Field(default=None, description="The form of the drug (e.g., patch, tablet).")
frequency: Optional[str] = Field(default=None, description="The frequency of drug administration.")
route: Optional[str] = Field(default=None, description="The route of administration (e.g., oral, topical).")
is_prn: Optional[bool] = Field(default=None, description="Whether the medication is taken 'as needed'.")
total_daily_dose_mg: Optional[float] = Field(default=None, description="The total daily dose in milligrams.")
# Load your model locally via llama-cpp
model = LlamaCpp(
model_path="/path/to/cmcmaster/drug_parsing_Llama-3.2-1B-Instruct-Q5_K_S-GGUF.gguf", # Change this path
temperature=0.0,
max_tokens=512
)
sampler = samplers.greedy()
# Prepare structured generator
structured_generator = generate.json(model, Medication, sampler = sampler)
def respond(
message,
history: list[tuple[str, str]],
):
try:
medication = structured_generator(message)
response = json.dumps(medication.model_dump(), indent=2)
except Exception as e:
response = f"Error: {str(e)}"
yield response
# Gradio interface
demo = gr.ChatInterface(
respond
)
if __name__ == "__main__":
demo.launch() |