Spaces:
Running
Running
File size: 5,119 Bytes
63c0c36 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 |
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### In this notebook, I’ll use the OpenAI class to connect to the OpenRouter API.\n",
"#### This way, I can use the OpenAI class just as it’s shown in the course."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# First let's do an import\n",
"from dotenv import load_dotenv\n",
"from openai import OpenAI\n",
"from IPython.display import Markdown, display\n",
"import requests\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Next it's time to load the API keys into environment variables\n",
"\n",
"load_dotenv(override=True)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Check the keys\n",
"\n",
"import os\n",
"openRouter_api_key = os.getenv('OPENROUTER_API_KEY')\n",
"\n",
"if openRouter_api_key:\n",
" print(f\"OpenAI API Key exists and begins {openRouter_api_key[:8]}\")\n",
"else:\n",
" print(\"OpenAI API Key not set - please head to the troubleshooting guide in the setup folder\")\n",
" \n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Now let's define the model names\n",
"# The model names are used to specify which model you want to use when making requests to the OpenAI API.\n",
"Gpt_41_nano = \"openai/gpt-4.1-nano\"\n",
"Gpt_41_mini = \"openai/gpt-4.1-mini\"\n",
"Claude_35_haiku = \"anthropic/claude-3.5-haiku\"\n",
"Claude_37_sonnet = \"anthropic/claude-3.7-sonnet\"\n",
"#Gemini_25_Pro_Preview = \"google/gemini-2.5-pro-preview\"\n",
"Gemini_25_Flash_Preview_thinking = \"google/gemini-2.5-flash-preview:thinking\"\n",
"\n",
"\n",
"free_mistral_Small_31_24B = \"mistralai/mistral-small-3.1-24b-instruct:free\"\n",
"free_deepSeek_V3_Base = \"deepseek/deepseek-v3-base:free\"\n",
"free_meta_Llama_4_Maverick = \"meta-llama/llama-4-maverick:free\"\n",
"free_nous_Hermes_3_Mistral_24B = \"nousresearch/deephermes-3-mistral-24b-preview:free\"\n",
"free_gemini_20_flash_exp = \"google/gemini-2.0-flash-exp:free\"\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"chatHistory = []\n",
"# This is a list that will hold the chat history"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"def chatWithOpenRouter(model:str, prompt:str)-> str:\n",
" \"\"\" This function takes a model and a prompt and returns the response\n",
" from the OpenRouter API, using the OpenAI class from the openai package.\"\"\"\n",
"\n",
" # here instantiate the OpenAI class but with the OpenRouter\n",
" # API URL\n",
" llmRequest = OpenAI(\n",
" api_key=openRouter_api_key,\n",
" base_url=\"https://openrouter.ai/api/v1\"\n",
" )\n",
"\n",
" # add the prompt to the chat history\n",
" chatHistory.append({\"role\": \"user\", \"content\": prompt})\n",
"\n",
" # make the request to the OpenRouter API\n",
" response = llmRequest.chat.completions.create(\n",
" model=model,\n",
" messages=chatHistory\n",
" )\n",
"\n",
" # get the output from the response\n",
" assistantResponse = response.choices[0].message.content\n",
"\n",
" # show the answer\n",
" display(Markdown(f\"**Assistant:**\\n {assistantResponse}\"))\n",
" \n",
" # add the assistant response to the chat history\n",
" chatHistory.append({\"role\": \"assistant\", \"content\": assistantResponse})\n",
" "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# message to use with the chatWithOpenRouter function\n",
"userPrompt = \"Shortly. Difference between git and github. Response in markdown.\""
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"chatWithOpenRouter(free_mistral_Small_31_24B, userPrompt)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"#clear chat history\n",
"def clearChatHistory():\n",
" \"\"\" This function clears the chat history\"\"\"\n",
" chatHistory.clear()"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "UV_Py_3.12",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.10"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
|