avfranco commited on
Commit
09e908a
·
1 Parent(s): f35f6e7

ea4all-gradio-agents-mcp-hackathon-tools-apm-upload

Browse files
Files changed (1) hide show
  1. ea4all/ea4all_mcp.py +12 -64
ea4all/ea4all_mcp.py CHANGED
@@ -9,12 +9,9 @@ from langchain_core.runnables import RunnableConfig
9
 
10
  from ea4all.src.shared.configuration import BaseConfiguration, APM_MOCK_QNA, PMO_MOCK_QNA
11
  from ea4all.src.ea4all_gra.configuration import AgentConfiguration as gra
12
- import ea4all.src.ea4all_vqa.graph as e4v
13
- import ea4all.src.ea4all_gra.graph as e4t
14
- import ea4all.src.shared.utils as e4u
15
- from ea4all.src.graph import super_graph
16
  from ea4all.src.ea4all_apm.graph import apm_graph
17
  from ea4all.src.ea4all_vqa.graph import diagram_graph
 
18
  from ea4all.src.ea4all_indexer.graph import indexer_graph
19
 
20
  #from ea4all.src.pmo_crew.crew_runner import run_pmo_crew
@@ -53,7 +50,7 @@ config = RunnableConfig(
53
  #stream_mode = "messages"
54
  )
55
 
56
- async def call_indexer_apm(state: State, config: RunnableConfig):
57
  response = await indexer_graph.ainvoke(input={"docs":[]}, config=config)
58
 
59
  #ea4all-qna-agent-conversational-with-memory
@@ -144,68 +141,19 @@ async def run_reference_architecture_agentic_system(business_query: str) -> Asyn
144
  response: High-level architecture blueprint and target diagram.
145
  """
146
 
147
- if len(business_query) < 50:
148
  agent_response = "Please provide a valid Business Requirement content to start!"
149
  yield([agent_response,gr.Tabs(visible=True), gr.Tabs(selected="id_togaf"),None, None, gr.Tabs(visible=False)])
150
  else:
151
- plain_text = e4u.markdown_to_plain_text(business_query)
152
- agent_response = "Generating Architecture Blueprint ---TOGAF VISION TARGET--- \n\nI am working on your request..."
153
- togaf_chain = e4t.togaf_graph
154
- final_diagram = ""
155
- vision_message = ""
156
- try:
157
- async for s in togaf_chain.astream_events(
158
- {
159
- "messages": [
160
- HumanMessage(
161
- content=plain_text
162
- )
163
- ],
164
- "business_query": business_query,
165
- },
166
- config=config,
167
- version="v2"
168
- ):
169
- kind = s["event"]
170
- tags = s.get("tags", [])
171
- name = s['name']
172
-
173
- if "gra_stream" in tags and name == "stream_vision_target":
174
- if kind == "on_chain_stream":
175
- data = s["data"]
176
- # Accumulate the chunk of data
177
- chunk = data.get('chunk')
178
- if chunk is not None and hasattr(chunk, "content"):
179
- vision_message += chunk.content
180
- time.sleep(e4u.CFG.STREAM_SLEEP)
181
- yield([vision_message,gr.Tabs(visible=True), gr.Tabs(selected="id_togaf"),None, None, gr.Tabs(visible=False)])
182
- elif name == "save_diagram" and kind == 'on_chain_end': #MOVED INTO Togaf_Task3
183
- output = s['data'].get('output', {})
184
- final_diagram = output.get('architecture_runway', "")
185
- elif ("assess_business_query" in tags or "assess_landscape" in tags) and kind == 'on_chain_start': ##'on_chat_model_stream':
186
- agent_response += f"\n\n`{tags[-1]}:{name}`"
187
-
188
- yield([agent_response,gr.Tabs(visible=True), gr.Tabs(selected="id_togaf"),None, None, gr.Tabs(visible=False)])
189
-
190
- if vision_message=="":
191
- agent_response = "I cannot generate the Architecture Vision. Please provide a valid Business Requirement content to start!"
192
- yield([agent_response,gr.Tabs(visible=True), gr.Tabs(selected="id_togaf"),None, None, gr.Tabs(visible=False)])
193
- elif "Error" not in final_diagram:
194
- yield([vision_message,gr.Tabs(visible=True), gr.Tabs(selected="id_togaf"),final_diagram, None, gr.Tabs(visible=True)])
195
- else:
196
- yield([vision_message,gr.Tabs(visible=True), gr.Tabs(selected="id_togaf"),None, final_diagram, gr.Tabs(visible=True)])
197
-
198
- except Exception as e:
199
- yield(
200
- [
201
- e.args[-1],
202
- gr.Tabs(visible=True),
203
- gr.Tabs(selected="id_togaf"),
204
- None,
205
- None,
206
- gr.Tabs(visible=False)
207
- ]
208
- )
209
 
210
  async def run_pmo_agentic_system(question:str) -> AsyncGenerator[list, None]:
211
  """
 
9
 
10
  from ea4all.src.shared.configuration import BaseConfiguration, APM_MOCK_QNA, PMO_MOCK_QNA
11
  from ea4all.src.ea4all_gra.configuration import AgentConfiguration as gra
 
 
 
 
12
  from ea4all.src.ea4all_apm.graph import apm_graph
13
  from ea4all.src.ea4all_vqa.graph import diagram_graph
14
+ from ea4all.src.ea4all_togaf.graph import togaf_graph
15
  from ea4all.src.ea4all_indexer.graph import indexer_graph
16
 
17
  #from ea4all.src.pmo_crew.crew_runner import run_pmo_crew
 
50
  #stream_mode = "messages"
51
  )
52
 
53
+ async def call_indexer_apm(config: RunnableConfig):
54
  response = await indexer_graph.ainvoke(input={"docs":[]}, config=config)
55
 
56
  #ea4all-qna-agent-conversational-with-memory
 
141
  response: High-level architecture blueprint and target diagram.
142
  """
143
 
144
+ if len(business_query) < 20:
145
  agent_response = "Please provide a valid Business Requirement content to start!"
146
  yield([agent_response,gr.Tabs(visible=True), gr.Tabs(selected="id_togaf"),None, None, gr.Tabs(visible=False)])
147
  else:
148
+ inputs = {"messages": [{"role": "user", "content": business_query}]} #user response
149
+
150
+ response = await togaf_graph.ainvoke(
151
+ input=inputs,
152
+ config=config
153
+ ) #astream not loading the graph
154
+ yield response.content
155
+
156
+
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
157
 
158
  async def run_pmo_agentic_system(question:str) -> AsyncGenerator[list, None]:
159
  """