Pranav0111 commited on
Commit
34a309c
Β·
verified Β·
1 Parent(s): 13ee093

Update chatbot.py

Browse files
Files changed (1) hide show
  1. chatbot.py +51 -21
chatbot.py CHANGED
@@ -1,7 +1,8 @@
 
1
  import streamlit as st
2
  import pandas as pd
3
  import os
4
- from datetime import datetime
5
 
6
  try:
7
  import google.generativeai as genai
@@ -17,20 +18,21 @@ class ChatbotManager:
17
  else:
18
  self.model = None
19
 
 
 
 
 
20
  if 'uploaded_df' not in st.session_state:
21
  st.session_state.uploaded_df = None
22
  if 'chat_history' not in st.session_state:
23
  st.session_state.chat_history = []
24
 
25
- def render_chat_interface(self):
26
- """Render the main chat interface"""
27
- st.header("πŸ“Š Data Analysis Chatbot")
28
-
29
- if not GEMINI_AVAILABLE:
30
- st.warning("Gemini API not available - running in limited mode")
31
 
32
  # File upload section
33
- uploaded_file = st.file_uploader("Choose a CSV file", type="csv")
34
 
35
  if uploaded_file is not None:
36
  self._process_uploaded_file(uploaded_file)
@@ -38,20 +40,32 @@ class ChatbotManager:
38
  # Chat interface
39
  if st.session_state.uploaded_df is not None:
40
  self._render_chat_window()
 
 
41
 
42
  def _process_uploaded_file(self, uploaded_file):
43
  """Process the uploaded CSV file"""
44
  try:
45
- df = pd.read_csv(uploaded_file)
 
 
 
 
46
  st.session_state.uploaded_df = df
47
- st.success("Data successfully loaded!")
48
 
49
  with st.expander("View Data Preview"):
50
  st.dataframe(df.head())
51
 
52
- # Initial analysis
 
 
 
53
  if self.model:
54
- initial_prompt = f"Briefly describe this dataset with {len(df)} rows and {len(df.columns)} columns."
 
 
 
55
  response = self._generate_response(initial_prompt)
56
  st.session_state.chat_history.append({
57
  "role": "assistant",
@@ -63,7 +77,7 @@ class ChatbotManager:
63
 
64
  def _render_chat_window(self):
65
  """Render the chat conversation window"""
66
- st.subheader("Chat About Your Data")
67
 
68
  # Display chat history
69
  for message in st.session_state.chat_history:
@@ -71,7 +85,7 @@ class ChatbotManager:
71
  st.markdown(message["content"])
72
 
73
  # User input
74
- if prompt := st.chat_input("Ask about your data..."):
75
  self._handle_user_input(prompt)
76
 
77
  def _handle_user_input(self, prompt):
@@ -85,7 +99,7 @@ class ChatbotManager:
85
 
86
  # Generate and display assistant response
87
  with st.chat_message("assistant"):
88
- with st.spinner("Thinking..."):
89
  response = self._generate_response(prompt)
90
  st.markdown(response)
91
 
@@ -99,17 +113,33 @@ class ChatbotManager:
99
  if self.model:
100
  # Use Gemini if available
101
  try:
102
- data_summary = f"Data: {len(df)} rows, columns: {', '.join(df.columns)}"
103
- full_prompt = f"{data_summary}\n\nUser question: {prompt}"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
104
  response = self.model.generate_content(full_prompt)
105
  return response.text
106
  except Exception as e:
107
- return f"Gemini error: {str(e)}"
108
  else:
109
  # Fallback basic analysis
110
  if "summary" in prompt.lower():
111
- return f"Basic summary:\n{df.describe().to_markdown()}"
112
  elif "columns" in prompt.lower():
113
- return f"Columns: {', '.join(df.columns)}"
 
 
114
  else:
115
- return "I can provide basic info about columns and summary statistics."
 
1
+ # chatbot.py
2
  import streamlit as st
3
  import pandas as pd
4
  import os
5
+ import tempfile
6
 
7
  try:
8
  import google.generativeai as genai
 
18
  else:
19
  self.model = None
20
 
21
+ self.initialize_chat()
22
+
23
+ def initialize_chat(self):
24
+ """Initialize chat session state variables"""
25
  if 'uploaded_df' not in st.session_state:
26
  st.session_state.uploaded_df = None
27
  if 'chat_history' not in st.session_state:
28
  st.session_state.chat_history = []
29
 
30
+ def render_chat(self):
31
+ """Main chat interface compatible with your pages.py structure"""
32
+ st.header("πŸ’¬ AI Business Mentor")
 
 
 
33
 
34
  # File upload section
35
+ uploaded_file = st.file_uploader("Upload your business data (CSV)", type=['csv'])
36
 
37
  if uploaded_file is not None:
38
  self._process_uploaded_file(uploaded_file)
 
40
  # Chat interface
41
  if st.session_state.uploaded_df is not None:
42
  self._render_chat_window()
43
+ else:
44
+ st.info("Upload a CSV file to chat with your data")
45
 
46
  def _process_uploaded_file(self, uploaded_file):
47
  """Process the uploaded CSV file"""
48
  try:
49
+ with tempfile.NamedTemporaryFile(delete=False, suffix='.csv') as tmp:
50
+ tmp.write(uploaded_file.getvalue())
51
+ tmp_path = tmp.name
52
+
53
+ df = pd.read_csv(tmp_path)
54
  st.session_state.uploaded_df = df
55
+ st.success("Data loaded successfully!")
56
 
57
  with st.expander("View Data Preview"):
58
  st.dataframe(df.head())
59
 
60
+ # Clean up temp file
61
+ os.unlink(tmp_path)
62
+
63
+ # Initial analysis if Gemini is available
64
  if self.model:
65
+ initial_prompt = (
66
+ f"Provide a 2-3 sentence overview of this dataset with {len(df)} rows and {len(df.columns)} columns. "
67
+ "Then suggest 3 specific business insights we could extract."
68
+ )
69
  response = self._generate_response(initial_prompt)
70
  st.session_state.chat_history.append({
71
  "role": "assistant",
 
77
 
78
  def _render_chat_window(self):
79
  """Render the chat conversation window"""
80
+ st.subheader("Chat About Your Business Data")
81
 
82
  # Display chat history
83
  for message in st.session_state.chat_history:
 
85
  st.markdown(message["content"])
86
 
87
  # User input
88
+ if prompt := st.chat_input("Ask about your business data..."):
89
  self._handle_user_input(prompt)
90
 
91
  def _handle_user_input(self, prompt):
 
99
 
100
  # Generate and display assistant response
101
  with st.chat_message("assistant"):
102
+ with st.spinner("Analyzing..."):
103
  response = self._generate_response(prompt)
104
  st.markdown(response)
105
 
 
113
  if self.model:
114
  # Use Gemini if available
115
  try:
116
+ data_summary = (
117
+ f"Dataset shape: {df.shape}\n"
118
+ f"Columns: {', '.join(df.columns)}\n"
119
+ f"First 3 rows:\n{df.head(3).to_markdown()}"
120
+ )
121
+ full_prompt = (
122
+ "You're a business data analyst. The user has uploaded this data:\n"
123
+ f"{data_summary}\n\n"
124
+ f"User question: {prompt}\n\n"
125
+ "Provide a detailed, professional response with actionable insights. "
126
+ "If appropriate, include:\n"
127
+ "- Key statistics\n"
128
+ "- Business implications\n"
129
+ "- Recommended visualizations\n"
130
+ "- Potential next steps"
131
+ )
132
  response = self.model.generate_content(full_prompt)
133
  return response.text
134
  except Exception as e:
135
+ return f"⚠️ Analysis error: {str(e)}"
136
  else:
137
  # Fallback basic analysis
138
  if "summary" in prompt.lower():
139
+ return f"πŸ“Š Basic Statistics:\n{df.describe().to_markdown()}"
140
  elif "columns" in prompt.lower():
141
+ return f"πŸ“‹ Columns:\n{', '.join(df.columns)}"
142
+ elif "missing" in prompt.lower():
143
+ return f"πŸ” Missing Values:\n{df.isnull().sum().to_markdown()}"
144
  else:
145
+ return "πŸ’‘ Ask me about: data summary, columns, or missing values"