geyik1 commited on
Commit
12e7fde
·
verified ·
1 Parent(s): 4464251

Delete app.py

Browse files
Files changed (1) hide show
  1. app.py +0 -110
app.py DELETED
@@ -1,110 +0,0 @@
1
- import gradio as gr
2
- import torch
3
- from huggingface_hub import HfApi, login, whoami
4
- import os
5
- import sys
6
- import traceback
7
- from getpass import getpass
8
-
9
- # Force CPU usage if needed
10
- device = "cuda" if torch.cuda.is_available() else "cpu"
11
- print(f"Using device: {device}")
12
-
13
- # More details about the environment
14
- print(f"Gradio version: {gr.__version__}")
15
- print(f"Python version: {sys.version}")
16
-
17
- def validate_token(token):
18
- """Validate a Hugging Face token by attempting to get user info."""
19
- try:
20
- login(token=token)
21
- user_info = whoami()
22
- print(f"Successfully logged in as: {user_info['name']} ({user_info.get('email', 'N/A')})")
23
- return True
24
- except Exception as e:
25
- print(f"Token validation error: {str(e)}")
26
- return False
27
-
28
- # Try to get token from environment variable
29
- hf_token = os.getenv("HUGGINGFACE_TOKEN")
30
-
31
- # If not found in environment variables, prompt for manual input
32
- if not hf_token:
33
- print("HUGGINGFACE_TOKEN not found in environment variables.")
34
- print("You can set it permanently with:")
35
- print(" export HUGGINGFACE_TOKEN=your_token_here # On Linux/macOS")
36
- print(" set HUGGINGFACE_TOKEN=your_token_here # On Windows")
37
- print("\nAlternatively, you can enter your token manually below:")
38
-
39
- try:
40
- hf_token = getpass("Enter your Hugging Face token (input will be hidden): ")
41
- except Exception as e:
42
- print(f"Error reading token: {str(e)}")
43
- print("Continuing without token...")
44
-
45
- # Validate token if available
46
- if hf_token:
47
- token_valid = validate_token(hf_token)
48
- if not token_valid:
49
- print("Warning: Token validation failed. Model loading may not work.")
50
- else:
51
- print("Warning: No Hugging Face token provided. Model loading may not work.")
52
-
53
- def load_model():
54
- try:
55
- print("Attempting to load model from Hugging Face Hub...")
56
- # Try direct Hub access first
57
- try:
58
- from huggingface_hub import snapshot_download
59
- print("Trying to access model directory...")
60
- model_path = snapshot_download(repo_id="goofyai/3d_render_style_xl", token=hf_token)
61
- print(f"Model downloaded to {model_path}")
62
- except Exception as e:
63
- print(f"Error downloading model: {str(e)}")
64
-
65
- # Try different approaches to load the model
66
- try:
67
- print("Using gr.load() method with explicit token...")
68
- interface = gr.load(
69
- "models/goofyai/3d_render_style_xl",
70
- hf_token=hf_token
71
- )
72
- return interface
73
- except Exception as e:
74
- print(f"Error with gr.load() using explicit token: {str(e)}")
75
- print(traceback.format_exc())
76
-
77
- # Try without specifying token (using the login from earlier)
78
- try:
79
- print("Using gr.load() without explicit token...")
80
- interface = gr.load("models/goofyai/3d_render_style_xl")
81
- return interface
82
- except Exception as e:
83
- print(f"Error with gr.load() without token: {str(e)}")
84
- print(traceback.format_exc())
85
-
86
- # Try old Interface.load as last resort
87
- print("Trying Interface.load() as fallback...")
88
- interface = gr.Interface.load("models/goofyai/3d_render_style_xl")
89
- return interface
90
- except Exception as e:
91
- print(f"Error loading model: {str(e)}")
92
- print(traceback.format_exc())
93
- return None
94
-
95
- # Create the interface
96
- try:
97
- interface = load_model()
98
- if interface:
99
- print("Model loaded successfully, launching interface...")
100
- interface.launch(
101
- share=False,
102
- server_name="0.0.0.0",
103
- server_port=7860,
104
- show_error=True
105
- )
106
- else:
107
- print("Failed to load the interface")
108
- except Exception as e:
109
- print(f"Error launching interface: {str(e)}")
110
- print(traceback.format_exc())