Spaces:
Running
on
Zero
Running
on
Zero
feat: rework ui
Browse files
app.py
CHANGED
@@ -51,6 +51,7 @@ def retrieve(q, k=3):
|
|
51 |
file_sources.append(meta)
|
52 |
return docs, file_sources
|
53 |
|
|
|
54 |
def make_prompt(q, docs):
|
55 |
context = "\n\n".join(f"Title: {d['title']}\nPages: {d['pages']}" for d in docs)
|
56 |
prompt = f"detailed thinking off\n"
|
@@ -63,7 +64,8 @@ def build_markdown_links(file_input):
|
|
63 |
url = f"{github_base_url}/{item['directory']}/{item['source']}"
|
64 |
line = f"**Source {idx}:** [{item['source']}]({url}) on page {item['page']}"
|
65 |
lines.append(line)
|
66 |
-
return "\n\n".join(lines)
|
|
|
67 |
|
68 |
def build_markdown_chunks(docs):
|
69 |
lines = []
|
@@ -72,7 +74,7 @@ def build_markdown_chunks(docs):
|
|
72 |
page = d['title']['page']
|
73 |
text = d['pages']
|
74 |
lines.append(f"**Chunk {idx}:** {title} on page {page}\n\n{text}")
|
75 |
-
return "\n\n".join(lines)
|
76 |
|
77 |
@spaces.GPU()
|
78 |
def qa_fn(question, top_k, temperature, max_tokens):
|
@@ -99,25 +101,29 @@ def qa_fn(question, top_k, temperature, max_tokens):
|
|
99 |
output = output.split("Output:", 1)[1].strip()
|
100 |
return output, file_links, markdown_chunks
|
101 |
|
102 |
-
|
103 |
-
|
104 |
-
|
105 |
-
top_k = gr.Slider(1, 7, value=4, step=1, label="Top-K Documents")
|
106 |
-
temperature = gr.Slider(0.1, 1.0, value=0.6, step=0.05, label="Temperature")
|
107 |
-
max_tokens = gr.Slider(64, 1024, value=512, step=64, label="Max Answer Length")
|
108 |
-
submit = gr.Button("Submit")
|
109 |
-
answer_output = gr.Textbox(label="Answer")
|
110 |
-
link_output = gr.Markdown(label="Source Link")
|
111 |
-
chunks_output = gr.Markdown(label="Used Chunks")
|
112 |
|
113 |
|
114 |
-
|
115 |
-
|
116 |
-
|
117 |
-
|
118 |
-
|
119 |
-
|
120 |
-
|
121 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
122 |
|
123 |
-
|
|
|
|
51 |
file_sources.append(meta)
|
52 |
return docs, file_sources
|
53 |
|
54 |
+
|
55 |
def make_prompt(q, docs):
|
56 |
context = "\n\n".join(f"Title: {d['title']}\nPages: {d['pages']}" for d in docs)
|
57 |
prompt = f"detailed thinking off\n"
|
|
|
64 |
url = f"{github_base_url}/{item['directory']}/{item['source']}"
|
65 |
line = f"**Source {idx}:** [{item['source']}]({url}) on page {item['page']}"
|
66 |
lines.append(line)
|
67 |
+
return "\n# Links to Documents: \n\n".join(lines)
|
68 |
+
|
69 |
|
70 |
def build_markdown_chunks(docs):
|
71 |
lines = []
|
|
|
74 |
page = d['title']['page']
|
75 |
text = d['pages']
|
76 |
lines.append(f"**Chunk {idx}:** {title} on page {page}\n\n{text}")
|
77 |
+
return "\n# Extracted Source Information: \n\n".join(lines)
|
78 |
|
79 |
@spaces.GPU()
|
80 |
def qa_fn(question, top_k, temperature, max_tokens):
|
|
|
101 |
output = output.split("Output:", 1)[1].strip()
|
102 |
return output, file_links, markdown_chunks
|
103 |
|
104 |
+
outputs_answer = gr.Textbox(label="Answer")
|
105 |
+
outputs_link = gr.Markdown(label="Source Link")
|
106 |
+
outputs_chunks = gr.Markdown(label="Used Chunks")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
107 |
|
108 |
|
109 |
+
demo = gr.Interface(
|
110 |
+
fn=qa_fn,
|
111 |
+
inputs=[
|
112 |
+
gr.Textbox(lines=2, label="Your Question"),
|
113 |
+
],
|
114 |
+
additional_inputs_accordion=[
|
115 |
+
gr.Slider(1, 7, value=4, step=1, label="Top-K Documents"),
|
116 |
+
gr.Slider(0.1, 1.0, value=0.6, step=0.05, label="Temperature"),
|
117 |
+
gr.Slider(64, 1024, value=512, step=64, label="Max Answer Length")
|
118 |
+
],
|
119 |
+
outputs=[outputs_answer, outputs_link, outputs_chunks],
|
120 |
+
title="GDPR Legal Assistant",
|
121 |
+
description="Ask any question about GDPR or EDPB documents.",
|
122 |
+
allow_flagging="never",
|
123 |
+
layout="vertical",
|
124 |
+
fill_width = True,
|
125 |
+
live = True,
|
126 |
+
)
|
127 |
|
128 |
+
if __name__ == "__main__":
|
129 |
+
demo.launch(share=True)
|