khanhamzawiser commited on
Commit
ff48fa7
·
verified ·
1 Parent(s): 8b52d2b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +86 -131
app.py CHANGED
@@ -1,150 +1,105 @@
1
- import gradio as gr
2
- from huggingface_hub import InferenceClient
3
- import psycopg2
4
- import os
 
 
5
  import logging
6
- from datetime import datetime, timezone
7
 
8
  # Set up logging
9
  logging.basicConfig(level=logging.DEBUG)
10
  logger = logging.getLogger(__name__)
11
 
12
- # Hugging Face Zephyr model
13
- client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
14
 
15
- # Use your existing database connection settings
16
- DB_CONFIG = {
17
- "host": "127.0.0.1",
18
- "port": 5434,
19
- "database": "postgres",
20
- "user": "postgres",
21
- "password": "password"
22
- }
23
 
24
- # Query TimescaleDB with improved error handling
25
- def query_timescaledb(sql_query):
26
- try:
27
- logger.debug("Attempting to connect to the database...")
28
- with psycopg2.connect(**DB_CONFIG) as conn:
29
- logger.debug("Connected to the database successfully.")
30
- with conn.cursor() as cur:
31
- cur.execute(sql_query)
32
- result = cur.fetchall()
33
- logger.debug("Query executed successfully.")
34
- return result
35
- except Exception as e:
36
- logger.error(f"DB Error: {e}")
37
- return f"DB Error: {e}"
 
 
 
 
38
 
39
- # Modified to match your table structure
40
- def get_sql_for_question(message):
41
- message = message.lower()
42
-
43
- if "average current" in message:
44
- return """
45
- SELECT AVG("CT_Avg") as avg_current
46
- FROM machine_current_log
47
- WHERE created_at >= NOW() - INTERVAL '1 day';
48
- """, "Here's the average current over the past 24 hours:"
49
 
50
- elif "machine status" in message:
51
- return """
52
- SELECT mac, state, state_duration, fault_status
53
- FROM machine_current_log
54
- WHERE created_at = (
55
- SELECT MAX(created_at)
56
- FROM machine_current_log
57
- )
58
- LIMIT 5;
59
- """, "Here are the latest machine statuses:"
60
-
61
- elif "current readings" in message:
62
- return """
63
- SELECT mac, created_at, "CT1", "CT2", "CT3", "CT_Avg"
64
- FROM machine_current_log
65
- ORDER BY created_at DESC
66
- LIMIT 5;
67
- """, "Here are the latest current readings:"
68
-
69
- elif "fault status" in message:
70
- return """
71
- SELECT fault_status, COUNT(*)
72
- FROM machine_current_log
73
- WHERE created_at >= NOW() - INTERVAL '1 day'
74
- GROUP BY fault_status;
75
- """, "Here's the distribution of fault statuses in the last 24 hours:"
76
-
77
- elif "firmware versions" in message:
78
- return """
79
- SELECT DISTINCT fw_version, COUNT(*)
80
- FROM machine_current_log
81
- GROUP BY fw_version;
82
- """, "Here are the firmware versions in use:"
83
 
84
- return None, None
 
 
 
 
 
85
 
86
- # Respond using LLM + data if relevant
87
- def respond(message, history: list[tuple[str, str]], system_message, max_tokens, temperature, top_p):
88
- sql_query, context_prefix = get_sql_for_question(message)
 
 
 
 
 
89
 
90
- if sql_query:
91
- result = query_timescaledb(sql_query)
92
- if isinstance(result, str): # error case
93
- db_info = result
94
- elif not result:
95
- db_info = "No data found."
96
- else:
97
- # Clean and format result
98
- db_info = "\n".join(str(row) for row in result)
99
 
100
- message = f"{context_prefix}\n{db_info}\n\nAnswer the user's query based on this information."
 
101
 
102
- messages = [{"role": "system", "content": system_message}]
103
- for val in history:
104
- if val[0]:
105
- messages.append({"role": "user", "content": val[0]})
106
- if val[1]:
107
- messages.append({"role": "assistant", "content": val[1]})
108
- messages.append({"role": "user", "content": message})
109
 
110
- response = ""
111
- for message in client.chat_completion(
112
- messages,
113
- max_tokens=max_tokens,
114
- stream=True,
115
- temperature=temperature,
116
- top_p=top_p,
117
- ):
118
- token = message.choices[0].delta.content
119
- response += token
120
- yield response
121
 
122
- # Gradio UI
123
- with gr.Blocks() as demo:
124
- gr.Markdown("## 🤖 Machine Monitoring Assistant")
125
- gr.Markdown(
126
- """
127
- Welcome to the **Machine Monitoring Assistant**. You can ask questions about:
128
- - Current readings (CT1, CT2, CT3, CT_Avg)
129
- - Machine status and state duration
130
- - Fault status
131
- - Firmware versions
132
- """
133
- )
134
-
135
- gr.ChatInterface(
136
- respond,
137
- additional_inputs=[
138
- gr.Textbox(
139
- value="You are an expert AI assistant for machine monitoring. Help users understand machine metrics and status using the latest database values.",
140
- label="System message"
141
- ),
142
- gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
143
- gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
144
- gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)"),
145
- ],
146
- )
147
 
148
- # Run
149
  if __name__ == "__main__":
150
- demo.launch()
 
 
 
 
 
 
 
 
 
 
 
1
+ from sqlalchemy import create_engine, Table, Column, String, Integer, Float, Text, TIMESTAMP, MetaData
2
+ from sqlalchemy.dialects.postgresql import UUID
3
+ from sqlalchemy import text
4
+ from llama_index.core import SQLDatabase
5
+ from llama_index.core.query_engine import NLSQLTableQueryEngine
6
+ from llama_index.llms.huggingface import HuggingFaceLLM
7
  import logging
 
8
 
9
  # Set up logging
10
  logging.basicConfig(level=logging.DEBUG)
11
  logger = logging.getLogger(__name__)
12
 
13
+ # PostgreSQL DB connection (converted from JDBC)
14
+ engine = create_engine("postgresql+psycopg2://postgres:password@localhost:5434/postgres")
15
 
16
+ metadata_obj = MetaData()
 
 
 
 
 
 
 
17
 
18
+ # Define the machine_current_log table
19
+ machine_current_log_table = Table(
20
+ "machine_current_log",
21
+ metadata_obj,
22
+ Column("mac", Text, primary_key=True),
23
+ Column("created_at", TIMESTAMP(timezone=True), primary_key=True),
24
+ Column("CT1", Float),
25
+ Column("CT2", Float),
26
+ Column("CT3", Float),
27
+ Column("CT_Avg", Float),
28
+ Column("total_current", Float),
29
+ Column("state", Text),
30
+ Column("state_duration", Integer),
31
+ Column("fault_status", Text),
32
+ Column("fw_version", Text),
33
+ Column("machineId", UUID),
34
+ Column("hi", Text),
35
+ )
36
 
37
+ # Create the table
38
+ metadata_obj.create_all(engine)
 
 
 
 
 
 
 
 
39
 
40
+ # Convert to TimescaleDB hypertable
41
+ with engine.connect() as conn:
42
+ conn.execute(text("SELECT create_hypertable('machine_current_log', 'created_at', if_not_exists => TRUE);"))
43
+ print("TimescaleDB hypertable created")
44
+ conn.commit()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
45
 
46
+ # Query 1: Get all MAC addresses
47
+ print("\nQuerying all MAC addresses:")
48
+ with engine.connect() as con:
49
+ rows = con.execute(text("SELECT mac from machine_current_log"))
50
+ for row in rows:
51
+ print(row)
52
 
53
+ # Query 2: Get all data and count
54
+ print("\nQuerying all data and count:")
55
+ stmt = text("""
56
+ SELECT mac, created_at, CT1, CT2, CT3, CT_Avg,
57
+ total_current, state, state_duration, fault_status,
58
+ fw_version, machineId
59
+ FROM machine_current_log
60
+ """)
61
 
62
+ with engine.connect() as connection:
63
+ print("hello")
64
+ count_stmt = text("SELECT COUNT(*) FROM machine_current_log")
65
+ count = connection.execute(count_stmt).scalar()
66
+ print(f"Total number of rows in table: {count}")
67
+ results = connection.execute(stmt).fetchall()
68
+ print(results)
 
 
69
 
70
+ # Set up LlamaIndex natural language querying
71
+ sql_database = SQLDatabase(engine)
72
 
73
+ llm = HuggingFaceLLM(
74
+ model_name="HuggingFaceH4/zephyr-7b-beta",
75
+ context_window=2048,
76
+ max_new_tokens=256,
77
+ generate_kwargs={"temperature": 0.7, "top_p": 0.95},
78
+ )
 
79
 
80
+ query_engine = NLSQLTableQueryEngine(
81
+ sql_database=sql_database,
82
+ tables=["machine_current_log"],
83
+ llm=llm
84
+ )
 
 
 
 
 
 
85
 
86
+ def natural_language_query(question: str):
87
+ try:
88
+ response = query_engine.query(question)
89
+ return str(response)
90
+ except Exception as e:
91
+ logger.error(f"Query error: {e}")
92
+ return f"Error processing query: {str(e)}"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
93
 
 
94
  if __name__ == "__main__":
95
+ # Natural language query examples
96
+ print("\nNatural Language Query Examples:")
97
+ questions = [
98
+ "What is the average CT1 reading?",
99
+ "Which machine has the highest total current?",
100
+ "Show me the latest fault status for each machine"
101
+ ]
102
+
103
+ for question in questions:
104
+ print(f"\nQuestion: {question}")
105
+ print("Answer:", natural_language_query(question))