avfranco commited on
Commit
a38c9c4
·
1 Parent(s): 8e63cd2

ea4all-gradio-agents-mcp-hackathon-ui-retrofit

Browse files
ea4all/__main__.py CHANGED
@@ -10,6 +10,7 @@ def main() -> None:
10
  debug=os.getenv("GRADIO_DEBUG", "True").lower() in ("true", "1", "yes"),
11
  ssr_mode=False,
12
  mcp_server=True,
 
13
  )
14
  except Exception as e:
15
  print(f"Error loading: {e}")
 
10
  debug=os.getenv("GRADIO_DEBUG", "True").lower() in ("true", "1", "yes"),
11
  ssr_mode=False,
12
  mcp_server=True,
13
+ inbrowser=os.getenv("GRADIO_INBROWSER", "True").lower() in ("true", "1", "yes"),
14
  )
15
  except Exception as e:
16
  print(f"Error loading: {e}")
ea4all/ea4all_mcp.py CHANGED
@@ -1,68 +1,7 @@
1
- #Added agentic-workflow-collaboration-agents
2
- #Multimodal ChatInterface - not working
3
- #Added new QA Tab
4
- #Added new agent Well-Architected
5
- #Added Supervisor Agent workflow
6
- #ISSUE with VQA fixed
7
- #LLMChain refactored
8
- #Updated with changes as result of ea4all_agent Gradio Space deployment issues
9
- #VQA Safeguardings - JPEG, PNG images only
10
- #Deployed version to Live
11
- #Library import refactoring, ea4all-architecture, empty message
12
- #Bring your own IT Landscape data: discontinued
13
- #Added upload your Business Requirement
14
- #Load user's APM - disabled 2024-06-22
15
- #TEST E2E Togaf Agentic system 2024-06-24
16
- #MIGRATION TO HF Open Source using TGI and Meta-Llama-3-8B-Instruct 2024-06-25
17
- #ADDED GENERATE_ARCHITECTURE_RUNWAY diagram: graphviz 2024-07-03
18
- #REFERENCE ARCHITECTURE DYNAMIC TABS 2024-07-05
19
- #ADDED Business Query grader 2024-07-07
20
- #RCA Togaf Supervisor: increase reliability 2024-07-08 - ISSUE FIXED BY NOW
21
- #EA4ALL-agentic-system-container updated 2024-07-10
22
- ###APM Agentic system: 2024-07-25 - Safety check added
23
- ##Sub-graph node stream 204-07-26
24
- # Stream arbitrary nested content: https://langchain-ai.github.io/langgraph/how-tos/streaming-content/
25
- ## Prompt refinement task_router, user_question_routing, prompt_category 2024-07-27
26
- ## WebSearch Hallucination issue - recursion looping - solution: routing to route_question 2024-07-28
27
- ## Safety_check greetings, msgs, APM Sample Dataset 2024-09-29
28
- # VQA issue - image not recognised 2024-07-30
29
- # Constants IMAGES (Architecture, Overview) 2024-07-31
30
- # About, QnA Examples moved to mock files 2024-08-01 - deployed to build
31
- ## 2024-08-03: VQA Streaming, Diagrams' EDGE nodes changed to END - one task at a time: 2024-08-03
32
- ## VQA Llama-3.2-11B-Vision-Instruct 2024-10-25
33
- #RELEASE 2024-11-15
34
- ## CHANGES 2024-11-22
35
- # MIGRATION to Gradio 5
36
- # Chatbot UI migrated to gr.Chatbot
37
- # run_qna_agentic_system, run_vqa_agentic_system updated: ChatMessage, chat_memory, UI events
38
- # chat_memory VQA missing image - fixed - needs improvement
39
- ## RELEASE 2024-11-23
40
- #pip freeze > requirements.txt to keep libraries synched local and HF Spaces
41
- #gr.Image issue: caused by __main__ root_path=str(Path.cwd())
42
- ## RELEASE 2024-12-09
43
- #Confluence Integration
44
- #Llama-3.2-11B-Vision-Instruct max_token issue <=4096 stills
45
- #Safety-check refinement
46
- #TOGAF Vision streaming
47
- ## Release update 2024-12-11
48
- #EA4ALL Podcast
49
- #2025-02-03 RELEASE V1
50
- ##RETROFIT & INTEGRATION w/ EA4ALL-dev-studio-structure
51
- #2025-02-09
52
- ##UAT EA4ALL-LGS-RETRIEVER-REFACTORED
53
- #2025-03-10
54
- ##AI-Assistant-UI-Message-Stream refactor
55
- #2025-12-04
56
- ## Add EA4ALL-PMO-Demand-Management CrewAI Agents
57
- #2025-05-06
58
- ## Add MCP Server
59
- #2025-05-17
60
- ## Added PMO_MOCK_QNA examples,
61
- ## get_relevant_questions() - moved to utils, constants moved to configuration
62
- #2025-05-19
63
- ## EA4ALL Diagram refactored, vqa_max_tokens updated
64
  #2025-06-04
65
  ## Gradio Agents MCP Hackathon: retrofit to expose EA4ALL Agentic System Agents only
 
 
66
  from langchain.callbacks.tracers import LangChainTracer
67
  from langchain.callbacks.tracers.langchain import wait_for_all_tracers
68
  from langchain_core.messages import HumanMessage
@@ -399,14 +338,14 @@ with gr.Blocks(title="Your ArchitectGPT",fill_height=True, fill_width=True) as e
399
  with gr.Tabs(selected="how_to") as tabs:
400
  with gr.Tab(label="Architect Demand Management"):
401
  with gr.Tab(label="Architect Project Planning", id="pmo_qna_1"):
402
- ea4all_pmo_description = gr.Markdown(value=agentic_pmo_desc)
403
- pmo_chatbot = gr.Chatbot(label="EA4ALL your AI Architect Companion", type="messages")
404
- pmo_prompt = gr.Textbox(lines=1, show_label=False, max_lines=1, submit_btn=True, stop_btn=True,autofocus=True, placeholder="Type your message here or select an example...")
405
- with gr.Accordion("Open for prompt examples", open=False):
406
- pmo_examples = gr.Dropdown(e4u.get_relevant_questions(PMO_MOCK_QNA), value=None,label="Questions", interactive=True)
407
- gr.ClearButton([pmo_chatbot,pmo_prompt], value="Clear", size="sm", visible=False)
408
  with gr.Tab(label="Project Portfolio Sample Dataset", id="id_pmo_ds"):
409
- pmo_df = gr.Dataframe()
410
  with gr.Tab(label="Application Landscape QnA"):
411
  with gr.Tabs() as tabs_apm_qna:
412
  with gr.Tab(label="Connect, Explore, Together", id="app_qna_1"):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  #2025-06-04
2
  ## Gradio Agents MCP Hackathon: retrofit to expose EA4ALL Agentic System Agents only
3
+ ## Greetings message not working
4
+ ## UI exposing too much tools, need to be refactored
5
  from langchain.callbacks.tracers import LangChainTracer
6
  from langchain.callbacks.tracers.langchain import wait_for_all_tracers
7
  from langchain_core.messages import HumanMessage
 
338
  with gr.Tabs(selected="how_to") as tabs:
339
  with gr.Tab(label="Architect Demand Management"):
340
  with gr.Tab(label="Architect Project Planning", id="pmo_qna_1"):
341
+ ea4all_pmo_description = gr.Markdown(value=agentic_pmo_desc)
342
+ pmo_chatbot = gr.Chatbot(label="EA4ALL your AI Architect Companion", type="messages")
343
+ pmo_prompt = gr.Textbox(lines=1, show_label=False, max_lines=1, submit_btn=True, stop_btn=True,autofocus=True, placeholder="Type your message here or select an example...")
344
+ with gr.Accordion("Open for prompt examples", open=False):
345
+ pmo_examples = gr.Dropdown(e4u.get_relevant_questions(PMO_MOCK_QNA), value=None,label="Questions", interactive=True)
346
+ gr.ClearButton([pmo_chatbot,pmo_prompt], value="Clear", size="sm", visible=False)
347
  with gr.Tab(label="Project Portfolio Sample Dataset", id="id_pmo_ds"):
348
+ pmo_df = gr.Dataframe()
349
  with gr.Tab(label="Application Landscape QnA"):
350
  with gr.Tabs() as tabs_apm_qna:
351
  with gr.Tab(label="Connect, Explore, Together", id="app_qna_1"):
ea4all/utils/utils.py CHANGED
@@ -120,9 +120,9 @@ async def ea4all_agent_init(request:gr.Request):
120
 
121
  return (
122
  apm_columns,
123
- [prompt],
124
- [prompt],
125
- [prompt],
126
  gr.Dropdown(choices=page_list['title'].values.tolist()),
127
  gr.DataFrame(value=page_list),
128
  gr.DataFrame(value=df),
 
120
 
121
  return (
122
  apm_columns,
123
+ [{"role": "system", "content": "You are a helpful assistant."}],
124
+ [{"role": "system", "content": "You are a helpful assistant."}],
125
+ [{"role": "system", "content": "You are a helpful assistant."}],
126
  gr.Dropdown(choices=page_list['title'].values.tolist()),
127
  gr.DataFrame(value=page_list),
128
  gr.DataFrame(value=df),