Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -0,0 +1,129 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Rewriting the app into a Gradio-compatible format
|
2 |
+
import zipfile
|
3 |
+
import os
|
4 |
+
|
5 |
+
# Directory for Gradio version
|
6 |
+
gradio_dir = "/mnt/data/exam-ai-gradio"
|
7 |
+
os.makedirs(gradio_dir, exist_ok=True)
|
8 |
+
|
9 |
+
# Gradio-based app.py content
|
10 |
+
gradio_app_code = '''import gradio as gr
|
11 |
+
from transformers import pipeline
|
12 |
+
from googlesearch import search
|
13 |
+
import requests
|
14 |
+
from bs4 import BeautifulSoup
|
15 |
+
from sentence_transformers import SentenceTransformer
|
16 |
+
import faiss
|
17 |
+
import numpy as np
|
18 |
+
|
19 |
+
# Initialize models
|
20 |
+
summarizer = pipeline("summarization", model="sshleifer/distilbart-cnn-12-6")
|
21 |
+
embed_model = SentenceTransformer("all-MiniLM-L6-v2")
|
22 |
+
|
23 |
+
def summarize_text(text, max_len=512):
|
24 |
+
if not text.strip():
|
25 |
+
return "No content to summarize."
|
26 |
+
try:
|
27 |
+
summary = summarizer(text, max_length=max_len, min_length=100, do_sample=False)
|
28 |
+
return summary[0]["summary_text"]
|
29 |
+
except Exception as e:
|
30 |
+
return f"Summarization error: {e}"
|
31 |
+
|
32 |
+
def search_links(query, max_results=5):
|
33 |
+
try:
|
34 |
+
return list(search(query, num_results=max_results))
|
35 |
+
except Exception as e:
|
36 |
+
return []
|
37 |
+
|
38 |
+
def fetch_page_content(url):
|
39 |
+
try:
|
40 |
+
res = requests.get(url, timeout=10)
|
41 |
+
soup = BeautifulSoup(res.text, "html.parser")
|
42 |
+
return soup.get_text(separator=" ", strip=True)
|
43 |
+
except:
|
44 |
+
return ""
|
45 |
+
|
46 |
+
def embed_chunks(chunks):
|
47 |
+
return embed_model.encode(chunks)
|
48 |
+
|
49 |
+
def create_faiss_index(chunks):
|
50 |
+
embeddings = embed_chunks(chunks)
|
51 |
+
index = faiss.IndexFlatL2(embeddings.shape[1])
|
52 |
+
index.add(np.array(embeddings))
|
53 |
+
return index, embeddings
|
54 |
+
|
55 |
+
def generate_notes(query):
|
56 |
+
query_phrases = [
|
57 |
+
f"{query} exam syllabus 2025",
|
58 |
+
f"{query} exam dates",
|
59 |
+
f"{query} preparation resources",
|
60 |
+
f"{query} important topics"
|
61 |
+
]
|
62 |
+
|
63 |
+
chunks = []
|
64 |
+
docs = []
|
65 |
+
|
66 |
+
for phrase in query_phrases:
|
67 |
+
urls = search_links(phrase, max_results=3)
|
68 |
+
for url in urls:
|
69 |
+
content = fetch_page_content(url)
|
70 |
+
if len(content.strip()) > 200:
|
71 |
+
docs.append(content)
|
72 |
+
|
73 |
+
for doc in docs:
|
74 |
+
chunks.extend([doc[i:i+300] for i in range(0, len(doc), 300)])
|
75 |
+
|
76 |
+
if not chunks:
|
77 |
+
return "⚠️ No content could be retrieved. Please try again with a different query."
|
78 |
+
|
79 |
+
index, _ = create_faiss_index(chunks)
|
80 |
+
prompt = f"important topics and notes for {query} exam"
|
81 |
+
query_vec = embed_chunks([prompt])[0].reshape(1, -1)
|
82 |
+
D, I = index.search(query_vec, k=15)
|
83 |
+
selected = [chunks[i] for i in I[0]]
|
84 |
+
unique_chunks = list(set([c.strip() for c in selected if len(c.strip()) > 200]))
|
85 |
+
combined = "\\n\\n".join(unique_chunks[:10])
|
86 |
+
notes = summarize_text(combined)
|
87 |
+
|
88 |
+
return notes
|
89 |
+
|
90 |
+
iface = gr.Interface(
|
91 |
+
fn=generate_notes,
|
92 |
+
inputs=gr.Textbox(lines=1, placeholder="Enter exam name (e.g., AAI ATC)", label="Exam Name"),
|
93 |
+
outputs=gr.Textbox(lines=15, label="AI-Generated Important Topic Notes"),
|
94 |
+
title="📘 AI Exam Assistant",
|
95 |
+
description="Enter your exam name and get summarized notes with syllabus, dates, topics and resources."
|
96 |
+
)
|
97 |
+
|
98 |
+
if __name__ == "__main__":
|
99 |
+
iface.launch()
|
100 |
+
'''
|
101 |
+
|
102 |
+
# Gradio-specific requirements
|
103 |
+
gradio_requirements = '''gradio
|
104 |
+
transformers
|
105 |
+
torch
|
106 |
+
sentence-transformers
|
107 |
+
faiss-cpu
|
108 |
+
googlesearch-python
|
109 |
+
beautifulsoup4
|
110 |
+
requests
|
111 |
+
'''
|
112 |
+
|
113 |
+
# Save files
|
114 |
+
with open(os.path.join(gradio_dir, "app.py"), "w") as f:
|
115 |
+
f.write(gradio_app_code)
|
116 |
+
|
117 |
+
with open(os.path.join(gradio_dir, "requirements.txt"), "w") as f:
|
118 |
+
f.write(gradio_requirements)
|
119 |
+
|
120 |
+
# Create zip
|
121 |
+
zip_path = "/mnt/data/exam-ai-gradio.zip"
|
122 |
+
with zipfile.ZipFile(zip_path, "w") as zipf:
|
123 |
+
for root, _, files in os.walk(gradio_dir):
|
124 |
+
for file in files:
|
125 |
+
full_path = os.path.join(root, file)
|
126 |
+
arcname = os.path.relpath(full_path, gradio_dir)
|
127 |
+
zipf.write(full_path, arcname=arcname)
|
128 |
+
|
129 |
+
zip_path
|