Spaces:
Running
on
Zero
Running
on
Zero
Commit
·
27c88c6
1
Parent(s):
3085585
Corrected OpenAI API usage
Browse files- .dockerignore +1 -0
- .gitignore +2 -1
- app.py +3 -2
- tools/llm_funcs.py +11 -6
.dockerignore
CHANGED
|
@@ -15,6 +15,7 @@ usage/*
|
|
| 15 |
feedback/*
|
| 16 |
test_code/*
|
| 17 |
unsloth_compiled_cache/*
|
|
|
|
| 18 |
input/
|
| 19 |
output/
|
| 20 |
logs/
|
|
|
|
| 15 |
feedback/*
|
| 16 |
test_code/*
|
| 17 |
unsloth_compiled_cache/*
|
| 18 |
+
.vscode/*
|
| 19 |
input/
|
| 20 |
output/
|
| 21 |
logs/
|
.gitignore
CHANGED
|
@@ -16,4 +16,5 @@ feedback/*
|
|
| 16 |
test_code/*
|
| 17 |
config/*
|
| 18 |
tmp/*
|
| 19 |
-
unsloth_compiled_cache/*
|
|
|
|
|
|
| 16 |
test_code/*
|
| 17 |
config/*
|
| 18 |
tmp/*
|
| 19 |
+
unsloth_compiled_cache/*
|
| 20 |
+
.vscode/*
|
app.py
CHANGED
|
@@ -314,8 +314,9 @@ with app:
|
|
| 314 |
google_api_key_textbox = gr.Textbox(value = GEMINI_API_KEY, label="Enter Gemini API key (only if using Google API models)", lines=1, type="password")
|
| 315 |
|
| 316 |
with gr.Accordion("Azure/OpenAI Inference", open = False):
|
| 317 |
-
|
| 318 |
-
|
|
|
|
| 319 |
|
| 320 |
with gr.Accordion("Hugging Face token for downloading gated models", open = False):
|
| 321 |
hf_api_key_textbox = gr.Textbox(value = HF_TOKEN, label="Enter Hugging Face API key (only for gated models that need a token to download)", lines=1, type="password")
|
|
|
|
| 314 |
google_api_key_textbox = gr.Textbox(value = GEMINI_API_KEY, label="Enter Gemini API key (only if using Google API models)", lines=1, type="password")
|
| 315 |
|
| 316 |
with gr.Accordion("Azure/OpenAI Inference", open = False):
|
| 317 |
+
with gr.Row():
|
| 318 |
+
azure_api_key_textbox = gr.Textbox(value = AZURE_OPENAI_API_KEY, label="Enter Azure/OpenAI Inference API key (only if using Azure/OpenAI models)", lines=1, type="password")
|
| 319 |
+
azure_endpoint_textbox = gr.Textbox(value = AZURE_OPENAI_INFERENCE_ENDPOINT, label="Enter Azure Inference endpoint URL (only if using Azure models)", lines=1)
|
| 320 |
|
| 321 |
with gr.Accordion("Hugging Face token for downloading gated models", open = False):
|
| 322 |
hf_api_key_textbox = gr.Textbox(value = HF_TOKEN, label="Enter Hugging Face API key (only for gated models that need a token to download)", lines=1, type="password")
|
tools/llm_funcs.py
CHANGED
|
@@ -685,13 +685,18 @@ def construct_azure_client(in_api_key: str, endpoint: str) -> Tuple[object, dict
|
|
| 685 |
if not endpoint:
|
| 686 |
endpoint = os.environ.get("AZURE_OPENAI_INFERENCE_ENDPOINT", "")
|
| 687 |
if not endpoint:
|
| 688 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 689 |
|
| 690 |
-
|
| 691 |
-
client = OpenAI(
|
| 692 |
-
api_key=key,
|
| 693 |
-
base_url=f"{endpoint}",
|
| 694 |
-
)
|
| 695 |
|
| 696 |
return client, dict()
|
| 697 |
except Exception as e:
|
|
|
|
| 685 |
if not endpoint:
|
| 686 |
endpoint = os.environ.get("AZURE_OPENAI_INFERENCE_ENDPOINT", "")
|
| 687 |
if not endpoint:
|
| 688 |
+
# Assume using OpenAI API
|
| 689 |
+
client = OpenAI(
|
| 690 |
+
api_key=key,
|
| 691 |
+
)
|
| 692 |
+
else:
|
| 693 |
+
# Use the provided endpoint
|
| 694 |
+
client = OpenAI(
|
| 695 |
+
api_key=key,
|
| 696 |
+
base_url=f"{endpoint}",
|
| 697 |
+
)
|
| 698 |
|
| 699 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
| 700 |
|
| 701 |
return client, dict()
|
| 702 |
except Exception as e:
|