File size: 5,699 Bytes
72f7653
 
07be61d
72f7653
 
 
 
1bc70ef
72f7653
 
 
 
 
 
 
 
 
1714062
72f7653
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5bb65b1
72f7653
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1f0390e
07be61d
 
72f7653
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
import gradio as gr
from openai import OpenAI
import os 
from dotenv import load_dotenv

load_dotenv()

SYSTEM_PROMPT = os.getenv("XTRNPMT")

API_BASE_URL = "https://api.featherless.ai/v1"

FEATHERLESS_API_KEY = os.getenv("FEATHERLESS_API_KEY")

FEATHERLESS_MODEL = "darkc0de/XortronCriminalComputingConfig"

if not FEATHERLESS_API_KEY:
    print("WARNING: FEATHERLESS_API_KEY environment variable is not set.")
    
try:
    if not FEATHERLESS_API_KEY:
        raise ValueError("FEATHERLESS_API_KEY is not set. Please set it as an environment variable or a secret in your deployment environment.")

    client = OpenAI(
        base_url=API_BASE_URL,
        api_key=FEATHERLESS_API_KEY
    )
    print(f"OpenAI client initialized with base_url: {API_BASE_URL} for Featherless AI, model: {FEATHERLESS_MODEL}")

except Exception as e:
    print(f"Error initializing OpenAI client with base_url '{API_BASE_URL}': {e}")
    raise RuntimeError(
        "Could not initialize OpenAI client. "
        f"Please check the API base URL ('{API_BASE_URL}'), your Featherless AI API key, model ID, "
        f"and ensure the server is accessible. Original error: {e}"
    )


def respond(message, history):
    """
    This function processes the user's message and the chat history to generate a response
    from the language model using the Featherless AI API (compatible with OpenAI's API),
    including a static system prompt.

    Args:
        message (str): The latest message from the user.
        history (list of lists): A list where each inner list contains a pair of
                                 [user_message, ai_message].

    Yields:
        str: The generated response token by token (for streaming).
    """
    messages = [{"role": "system", "content": SYSTEM_PROMPT}]

    for user_message, ai_message in history:
        if user_message:
            messages.append({"role": "user", "content": user_message})
        if ai_message:
            messages.append({"role": "assistant", "content": ai_message})

    messages.append({"role": "user", "content": message})

    response_text = ""

    try:
        stream = client.chat.completions.create(
            messages=messages,
            model=FEATHERLESS_MODEL,
            stream=True,
        )

        for chunk in stream:
            if chunk.choices and chunk.choices[0].delta and chunk.choices[0].delta.content is not None:
                token = chunk.choices[0].delta.content
                response_text += token
                yield response_text
            elif chunk.choices and chunk.choices[0].message and chunk.choices[0].message.content is not None:
                token = chunk.choices[0].message.content
                response_text += token
                yield response_text

    except Exception as e:
        error_message = f"An error occurred during model inference with Featherless AI: {e}"
        print(error_message)
        yield error_message


kofi_script = """
<script src='https://storage.ko-fi.com/cdn/scripts/overlay-widget.js'></script>
<script>
  kofiWidgetOverlay.draw('sonnydesorbo', {
    'type': 'floating-chat',
    'floating-chat.donateButton.text': 'Support me',
    'floating-chat.donateButton.background-color': '#00b9fe',
    'floating-chat.donateButton.text-color': '#fff'
  });
</script>
"""

kofi_button_html = """
<div style="text-align: center; padding: 20px;">
    <a href='https://ko-fi.com/Z8Z51E5TIG' target='_blank'>
        <img height='36' style='border:0px;height:36px;' src='https://storage.ko-fi.com/cdn/kofi5.png?v=6' border='0' alt='Buy Me a Coffee at ko-fi.com' />
    </a>
</div>
"""

donation_solicitation_html = """
<div style="text-align: center; font-size: x-small; margin-bottom: 5px;">
    The Cybernetic Criminal Computing Corporation presents: XORTRON, free of charge, unlimited, no login, no signup, no bullshit. Im sure even a low-life deadbeat freeloader like yourself can at least throw some spare change right? - Support Xortron @ ko-fi.com/xortron<br>
    
</div>
"""

custom_css = """
@import url('https://fonts.googleapis.com/css2?family=Orbitron:wght@400;700&display=swap');
body, .gradio-container {
    font-family: 'Orbitron', sans-serif !important;
}
.gr-button { font-family: 'Orbitron', sans-serif !important; }
.gr-input { font-family: 'Orbitron', sans-serif !important; }
.gr-label { font-family: 'Orbitron', sans-serif !important; }
.gr-chatbot .message { font-family: 'Orbitron', sans-serif !important; }
"""

with gr.Blocks(theme="dark", head=kofi_script, css=custom_css) as demo:
    gr.ChatInterface(
        fn=respond,               # The function to call when a message is sent
        chatbot=gr.Chatbot(       # Configure the chatbot display area
            height=800,           # Set the height of the chat history display to 800px
            label="Xortron - Criminal Computing"  # Set the label
        )
    )

    gr.HTML(donation_solicitation_html)
    gr.HTML(kofi_button_html)


if __name__ == "__main__":
    if not FEATHERLESS_API_KEY:
        print("\nCRITICAL ERROR: FEATHERLESS_API_KEY is not set.")
        print("Please ensure it's set as a secret in your Hugging Face Space settings or as an environment variable.\n")

    try:
        demo.queue(default_concurrency_limit=1)

        demo.launch(show_api=False, share=True) 
    except NameError as ne:
        print(f"Gradio demo could not be launched. 'client' might not have been initialized: {ne}")
    except RuntimeError as re:
        print(f"Gradio demo could not be launched due to an error during client initialization: {re}")
    except Exception as e:
        print(f"An unexpected error occurred when trying to launch Gradio demo: {e}")