Spaces:
Sleeping
Sleeping
Ravi theja K
commited on
Update app.py
Browse files
app.py
CHANGED
|
@@ -138,21 +138,15 @@ def generate_arctic_response():
|
|
| 138 |
abort_chat(f"Conversation length too long. Please keep it under {max_tokens} tokens.")
|
| 139 |
|
| 140 |
st.session_state.messages.append({"role": "assistant", "content": ""})
|
| 141 |
-
for
|
| 142 |
input={"prompt": prompt_str,
|
| 143 |
"prompt_template": r"{prompt}",
|
| 144 |
"temperature": st.session_state.temperature,
|
| 145 |
"top_p": st.session_state.top_p,
|
| 146 |
-
})
|
| 147 |
-
|
| 148 |
-
|
| 149 |
-
|
| 150 |
-
st.session_state.messages[-1]["content"] += str(event)
|
| 151 |
-
yield str(event)
|
| 152 |
-
|
| 153 |
-
# Final safety check...
|
| 154 |
-
if not check_safety():
|
| 155 |
-
abort_chat("I cannot answer this question.")
|
| 156 |
|
| 157 |
if __name__ == "__main__":
|
| 158 |
main()
|
|
|
|
| 138 |
abort_chat(f"Conversation length too long. Please keep it under {max_tokens} tokens.")
|
| 139 |
|
| 140 |
st.session_state.messages.append({"role": "assistant", "content": ""})
|
| 141 |
+
for event in replicate.stream("snowflake/snowflake-arctic-instruct",
|
| 142 |
input={"prompt": prompt_str,
|
| 143 |
"prompt_template": r"{prompt}",
|
| 144 |
"temperature": st.session_state.temperature,
|
| 145 |
"top_p": st.session_state.top_p,
|
| 146 |
+
}):
|
| 147 |
+
print(str(event), end="")
|
| 148 |
+
|
| 149 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 150 |
|
| 151 |
if __name__ == "__main__":
|
| 152 |
main()
|