avfranco commited on
Commit
c224c4d
·
1 Parent(s): 79aeb13

ea4all-gradio-agents-mcp-hackathon-tools-apm-upload

Browse files
Files changed (1) hide show
  1. ea4all/ea4all_mcp.py +2 -8
ea4all/ea4all_mcp.py CHANGED
@@ -59,6 +59,7 @@ config = RunnableConfig(
59
 
60
  async def call_indexer_apm(config: RunnableConfig):
61
  response = await indexer_graph.ainvoke(input={"docs":[]}, config=config)
 
62
 
63
  #ea4all-qna-agent-conversational-with-memory
64
  async def run_qna_agentic_system(question: str) -> AsyncGenerator[list, None]:
@@ -78,6 +79,7 @@ async def run_qna_agentic_system(question: str) -> AsyncGenerator[list, None]:
78
  format_response = "Hi, how are you today? To start using the EA4ALL MCP Tool, provide the required Inputs!"
79
  chat_memory.append(ChatMessage(role="assistant", content=format_response))
80
  else:
 
81
  response = await apm_graph.ainvoke({"question": question}, config=config)
82
  chat_memory.append(ChatMessage(role="assistant", content=response['generation']))
83
 
@@ -160,8 +162,6 @@ async def run_reference_architecture_agentic_system(business_query: str) -> Asyn
160
  ) #astream not loading the graph
161
  yield response.content
162
 
163
-
164
-
165
  async def run_pmo_agentic_system(question:str) -> AsyncGenerator[list, None]:
166
  """
167
  description:
@@ -374,9 +374,3 @@ with gr.Blocks(title="Your ArchitectGPT",fill_height=True, fill_width=True) as e
374
 
375
  #Set initial state of apm and llm
376
  ea4all_mcp.load(ea4all_agent_init, outputs=[ea4all_agent_metadata,ea4all_chatbot, ea4all_vqa, pmo_chatbot, confluence_list, confluence_df, apm_df, pmo_df], show_api=False)
377
- ea4all_mcp.then(
378
- call_indexer_apm,
379
- inputs=None,
380
- outputs=None,
381
- show_api=False
382
- ) #call indexer graph to load the APM
 
59
 
60
  async def call_indexer_apm(config: RunnableConfig):
61
  response = await indexer_graph.ainvoke(input={"docs":[]}, config=config)
62
+ return response
63
 
64
  #ea4all-qna-agent-conversational-with-memory
65
  async def run_qna_agentic_system(question: str) -> AsyncGenerator[list, None]:
 
79
  format_response = "Hi, how are you today? To start using the EA4ALL MCP Tool, provide the required Inputs!"
80
  chat_memory.append(ChatMessage(role="assistant", content=format_response))
81
  else:
82
+ index = call_indexer_apm(config) #call indexer to update the index
83
  response = await apm_graph.ainvoke({"question": question}, config=config)
84
  chat_memory.append(ChatMessage(role="assistant", content=response['generation']))
85
 
 
162
  ) #astream not loading the graph
163
  yield response.content
164
 
 
 
165
  async def run_pmo_agentic_system(question:str) -> AsyncGenerator[list, None]:
166
  """
167
  description:
 
374
 
375
  #Set initial state of apm and llm
376
  ea4all_mcp.load(ea4all_agent_init, outputs=[ea4all_agent_metadata,ea4all_chatbot, ea4all_vqa, pmo_chatbot, confluence_list, confluence_df, apm_df, pmo_df], show_api=False)