// LLM Client Implementation
let agentClient = null;
let currentModel = null;
let conversationHistory = [];
function initializeClient() {
const apiKey = document.getElementById('apiKey').value;
if (!apiKey) {
showStatus("Please enter an API key", 'error');
return;
}
agentClient = new ConversationalAgentClient(apiKey);
agentClient.populateLLMModels()
.then(models => {
agentClient.updateModelSelect('modelSelect', models.find(m => m.includes("gemini-2.5")));
currentModel = document.getElementById('modelSelect').value;
showStatus(`Loaded ${models.length} models. Default: ${currentModel}`);
})
.catch(error => {
showStatus(`Error fetching models: ${error.message}`, 'error');
});
}
function addMessageEntry(direction, source, destination, content) {
const flowDiv = document.getElementById('messageFlow');
const timestamp = new Date().toLocaleTimeString();
const entry = document.createElement('div');
entry.className = `message-entry ${direction}`;
entry.innerHTML = `
${content}
`;
flowDiv.appendChild(entry);
flowDiv.scrollTop = flowDiv.scrollHeight;
}
// LLM Client Classes
class BaseAgentClient {
constructor(apiKey, apiUrl = 'https://llm.synapse.thalescloud.io/v1/') {
this.apiKey = apiKey;
this.apiUrl = apiUrl;
this.models = [];
this.tools = [];
this.maxCallsPerMinute = 4;
this.callTimestamps = [];
}
setTools(tools) {
this.tools = tools;
}
async fetchLLMModels() {
if (!this.apiKey) throw new Error("API Key is not set.");
console.log("Fetching models from:", this.apiUrl + 'models');
try {
const response = await fetch(this.apiUrl + 'models', {
method: 'GET',
headers: {
'Authorization': `Bearer ${this.apiKey}`
}
});
if (!response.ok) {
const errorText = await response.text();
console.error("Fetch models error response:", errorText);
throw new Error(`HTTP error! Status: ${response.status} - ${errorText}`);
}
const data = await response.json();
console.log("Models fetched:", data.data);
const filteredModels = data.data
.map(model => model.id)
.filter(id => !id.toLowerCase().includes('embed') && !id.toLowerCase().includes('image'));
return filteredModels;
} catch (error) {
console.error('Error fetching LLM models:', error);
throw new Error(`Failed to fetch models: ${error.message}`);
}
}
async populateLLMModels(defaultModel = "gemini-2.5-pro-exp-03-25") {
try {
const modelList = await this.fetchLLMModels();
const sortedModels = modelList.sort((a, b) => {
if (a === defaultModel) return -1;
if (b === defaultModel) return 1;
return a.localeCompare(b);
});
const finalModels = [];
if (sortedModels.includes(defaultModel)) {
finalModels.push(defaultModel);
sortedModels.forEach(model => {
if (model !== defaultModel) finalModels.push(model);
});
} else {
finalModels.push(defaultModel);
finalModels.push(...sortedModels);
}
this.models = finalModels;
console.log("Populated models:", this.models);
return this.models;
} catch (error) {
console.error("Error populating models:", error);
this.models = [defaultModel];
throw error;
}
}
updateModelSelect(elementId = 'modelSelect', selectedModel = null) {
const select = document.getElementById(elementId);
if (!select) {
console.warn(`Element ID ${elementId} not found.`);
return;
}
const currentSelection = selectedModel || select.value || this.models[0];
select.innerHTML = '';
if (this.models.length === 0 || (this.models.length === 1 && this.models[0] === "gemini-2.5-pro-exp-03-25" && !this.apiKey)) {
const option = document.createElement('option');
option.value = "";
option.textContent = "-- Fetch models first --";
option.disabled = true;
select.appendChild(option);
return;
}
this.models.forEach(model => {
const option = document.createElement('option');
option.value = model;
option.textContent = model;
if (model === currentSelection) option.selected = true;
select.appendChild(option);
});
if (!select.value && this.models.length > 0) select.value = this.models[0];
}
async rateLimitWait() {
const currentTime = Date.now();
this.callTimestamps = this.callTimestamps.filter(ts => currentTime - ts <= 60000);
if (this.callTimestamps.length >= this.maxCallsPerMinute) {
const waitTime = 60000 - (currentTime - this.callTimestamps[0]);
const waitSeconds = Math.ceil(waitTime / 1000);
const waitMessage = `Rate limit (${this.maxCallsPerMinute}/min) reached. Waiting ${waitSeconds}s...`;
console.log(waitMessage);
showStatus(waitMessage, 'warn');
await new Promise(resolve => setTimeout(resolve, waitTime + 100));
showStatus('Resuming after rate limit wait...', 'info');
this.callTimestamps = this.callTimestamps.filter(ts => Date.now() - ts <= 60000);
}
}
async callAgent(model, messages, tools = null) {
await this.rateLimitWait();
const startTime = Date.now();
console.log("Calling Agent:", model);
let body = {
model: model,
messages: messages
}
body.tools = tools;
try {
const response = await fetch(this.apiUrl + 'chat/completions', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${this.apiKey}`
},
body: JSON.stringify(body)
});
const endTime = Date.now();
this.callTimestamps.push(endTime);
console.log(`API call took ${endTime - startTime} ms`);
if (!response.ok) {
const errorData = await response.json().catch(() => ({ error: { message: response.statusText } }));
console.error("API Error:", errorData);
throw new Error(errorData.error?.message || `API failed: ${response.status}`);
}
const data = await response.json();
if (!data.choices || !data.choices[0]?.message) throw new Error("Invalid API response structure");
console.log("API Response received.");
return data.choices[0].message;
} catch (error) {
this.callTimestamps.push(Date.now());
console.error('Error calling agent:', error);
throw error;
}
}
setMaxCallsPerMinute(value) {
const parsedValue = parseInt(value, 10);
if (!isNaN(parsedValue) && parsedValue > 0) {
console.log(`Max calls/min set to: ${parsedValue}`);
this.maxCallsPerMinute = parsedValue;
return true;
}
console.warn(`Invalid max calls/min: ${value}`);
return false;
}
}
class ConversationalAgentClient extends BaseAgentClient {
constructor(apiKey, apiUrl = 'https://llm.synapse.thalescloud.io/v1/') {
super(apiKey, apiUrl);
}
async call(model, userPrompt, conversationHistory = [], tools) {
const messages = userPrompt ? [
...conversationHistory,
{ role: 'user', content: userPrompt }
] : [
...conversationHistory
];
const assistantResponse = await super.callAgent(model, messages, tools);
const updatedHistory = userPrompt ? [
...conversationHistory,
{ role: 'user', content: userPrompt },
{ role: assistantResponse.role, content: assistantResponse.content }
] : [
...conversationHistory,
{ role: assistantResponse.role, content: assistantResponse.content }
];
return {
response: assistantResponse,
history: updatedHistory
};
}
}
// Model selection change handler
document.getElementById('modelSelect').addEventListener('change', function() {
currentModel = this.value;
showStatus(`Model changed to: ${currentModel}`);
});