File size: 2,688 Bytes
63c0c36
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
"""This module contains functions to interact with the OpenRouter API.
   It load dotenv, OpenAI and other necessary packages to interact
   with the OpenRouter API.
   Also stores the chat history in a list."""
from dotenv import load_dotenv
from openai import OpenAI
from IPython.display import Markdown, display
import os

# override any existing environment variables
load_dotenv(override=True)

# load
openrouter_api_key = os.getenv('OPENROUTER_API_KEY')

if openrouter_api_key:
    print(f"OpenAI API Key exists and begins {openrouter_api_key[:8]}")
else:
    print("OpenAI API Key not set - please head to the troubleshooting guide in the setup folder")
    

chatHistory = []


def chatWithOpenRouter(model:str, prompt:str)-> str:
    """ This function takes a model and a prompt and shows the response
        in markdown format. It uses the OpenAI class from the openai package"""

    # here instantiate the OpenAI class but with the OpenRouter
    # API URL
    llmRequest = OpenAI(
        api_key=openrouter_api_key,
        base_url="https://openrouter.ai/api/v1"
    )

    # add the prompt to the chat history
    chatHistory.append({"role": "user", "content": prompt})

    # make the request to the OpenRouter API
    response = llmRequest.chat.completions.create(
        model=model,
        messages=chatHistory
    )

    # get the output from the response
    assistantResponse = response.choices[0].message.content

    # show the answer
    display(Markdown(f"**Assistant:** {assistantResponse}"))
    
    # add the assistant response to the chat history
    chatHistory.append({"role": "assistant", "content": assistantResponse})


def getOpenrouterResponse(model:str, prompt:str)-> str:
    """
    This function takes a model and a prompt and returns the response
    from the OpenRouter API, using the OpenAI class from the openai package.
    """
    llmRequest = OpenAI(
        api_key=openrouter_api_key,
        base_url="https://openrouter.ai/api/v1"
    )

    # add the prompt to the chat history
    chatHistory.append({"role": "user", "content": prompt})

    # make the request to the OpenRouter API
    response = llmRequest.chat.completions.create(
        model=model,
        messages=chatHistory
    )

    # get the output from the response
    assistantResponse = response.choices[0].message.content
    
    # add the assistant response to the chat history
    chatHistory.append({"role": "assistant", "content": assistantResponse})

    # return the assistant response
    return assistantResponse


#clear chat history
def clearChatHistory():
    """ This function clears the chat history. It can't be undone!"""
    chatHistory.clear()