Spaces:
Sleeping
Sleeping
Andre
commited on
Commit
·
3ece4ac
1
Parent(s):
1700c38
“Update”
Browse files- app.py +3 -15
- colab.ipynb +1 -1
- config/config.py +0 -7
- src/{img_gen_logic_colab.py → img_gen_colab.py} +0 -0
app.py
CHANGED
|
@@ -1,18 +1,6 @@
|
|
| 1 |
-
# app.py
|
| 2 |
-
import sys
|
| 3 |
-
import os
|
| 4 |
-
|
| 5 |
-
|
| 6 |
-
# Add the src folder to the Python path
|
| 7 |
-
# Solves all problems w subfolders - option2
|
| 8 |
-
src_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "src"))
|
| 9 |
-
if src_path not in sys.path:
|
| 10 |
-
sys.path.append(src_path)
|
| 11 |
-
|
| 12 |
# Import gradio_interface
|
| 13 |
from gradio_interface import demo
|
| 14 |
|
| 15 |
-
|
| 16 |
-
|
| 17 |
-
# Launch the Gradio app
|
| 18 |
-
demo.launch()
|
|
|
|
| 1 |
+
# app.py
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2 |
# Import gradio_interface
|
| 3 |
from gradio_interface import demo
|
| 4 |
|
| 5 |
+
# Launch the Gradio app
|
| 6 |
+
demo.launch()
|
|
|
|
|
|
colab.ipynb
CHANGED
|
@@ -19,7 +19,7 @@
|
|
| 19 |
"import ipywidgets as widgets\n",
|
| 20 |
"from huggingface_hub import InferenceClient\n",
|
| 21 |
"from IPython.display import display, clear_output\n",
|
| 22 |
-
"from src.
|
| 23 |
"from config.config_colab import models, prompts, api_token # Import from config folder\n",
|
| 24 |
"from PIL import Image\n",
|
| 25 |
"from google.colab import userdata\n",
|
|
|
|
| 19 |
"import ipywidgets as widgets\n",
|
| 20 |
"from huggingface_hub import InferenceClient\n",
|
| 21 |
"from IPython.display import display, clear_output\n",
|
| 22 |
+
"from src.img_gen_colab import generate_image, save_image\n",
|
| 23 |
"from config.config_colab import models, prompts, api_token # Import from config folder\n",
|
| 24 |
"from PIL import Image\n",
|
| 25 |
"from google.colab import userdata\n",
|
config/config.py
CHANGED
|
@@ -6,13 +6,6 @@ from config.models import models
|
|
| 6 |
# Retrieve the Hugging Face token
|
| 7 |
api_token = os.getenv("HF_TOKEN")
|
| 8 |
|
| 9 |
-
# Debugging: Check if the Hugging Face token is available
|
| 10 |
-
if not api_token:
|
| 11 |
-
print("ERROR: Hugging Face token (HF_TOKEN) is missing. Please set it as an environment variable or in Colab secrets.")
|
| 12 |
-
else:
|
| 13 |
-
print("Hugging Face token loaded successfully.")
|
| 14 |
-
|
| 15 |
-
|
| 16 |
# Debugging: Print prompt and model options
|
| 17 |
print("Prompt Options:", [p["alias"] for p in prompts])
|
| 18 |
print("Model Options:", [m["alias"] for m in models])
|
|
|
|
| 6 |
# Retrieve the Hugging Face token
|
| 7 |
api_token = os.getenv("HF_TOKEN")
|
| 8 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 9 |
# Debugging: Print prompt and model options
|
| 10 |
print("Prompt Options:", [p["alias"] for p in prompts])
|
| 11 |
print("Model Options:", [m["alias"] for m in models])
|
src/{img_gen_logic_colab.py → img_gen_colab.py}
RENAMED
|
File without changes
|