NeuralInternet Asif Ahmad commited on
Commit
b23917e
·
0 Parent(s):

Duplicate from asifhugs/InfiniteGPT

Browse files

Co-authored-by: Asif Ahmad <asifhugs@users.noreply.huggingface.co>

Files changed (4) hide show
  1. .gitattributes +34 -0
  2. README.md +13 -0
  3. app.py +62 -0
  4. requirements.txt +2 -0
.gitattributes ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ *.7z filter=lfs diff=lfs merge=lfs -text
2
+ *.arrow filter=lfs diff=lfs merge=lfs -text
3
+ *.bin filter=lfs diff=lfs merge=lfs -text
4
+ *.bz2 filter=lfs diff=lfs merge=lfs -text
5
+ *.ckpt filter=lfs diff=lfs merge=lfs -text
6
+ *.ftz filter=lfs diff=lfs merge=lfs -text
7
+ *.gz filter=lfs diff=lfs merge=lfs -text
8
+ *.h5 filter=lfs diff=lfs merge=lfs -text
9
+ *.joblib filter=lfs diff=lfs merge=lfs -text
10
+ *.lfs.* filter=lfs diff=lfs merge=lfs -text
11
+ *.mlmodel filter=lfs diff=lfs merge=lfs -text
12
+ *.model filter=lfs diff=lfs merge=lfs -text
13
+ *.msgpack filter=lfs diff=lfs merge=lfs -text
14
+ *.npy filter=lfs diff=lfs merge=lfs -text
15
+ *.npz filter=lfs diff=lfs merge=lfs -text
16
+ *.onnx filter=lfs diff=lfs merge=lfs -text
17
+ *.ot filter=lfs diff=lfs merge=lfs -text
18
+ *.parquet filter=lfs diff=lfs merge=lfs -text
19
+ *.pb filter=lfs diff=lfs merge=lfs -text
20
+ *.pickle filter=lfs diff=lfs merge=lfs -text
21
+ *.pkl filter=lfs diff=lfs merge=lfs -text
22
+ *.pt filter=lfs diff=lfs merge=lfs -text
23
+ *.pth filter=lfs diff=lfs merge=lfs -text
24
+ *.rar filter=lfs diff=lfs merge=lfs -text
25
+ *.safetensors filter=lfs diff=lfs merge=lfs -text
26
+ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
27
+ *.tar.* filter=lfs diff=lfs merge=lfs -text
28
+ *.tflite filter=lfs diff=lfs merge=lfs -text
29
+ *.tgz filter=lfs diff=lfs merge=lfs -text
30
+ *.wasm filter=lfs diff=lfs merge=lfs -text
31
+ *.xz filter=lfs diff=lfs merge=lfs -text
32
+ *.zip filter=lfs diff=lfs merge=lfs -text
33
+ *.zst filter=lfs diff=lfs merge=lfs -text
34
+ *tfevents* filter=lfs diff=lfs merge=lfs -text
README.md ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ title: InfiniteGPT
3
+ emoji: 📚
4
+ colorFrom: indigo
5
+ colorTo: yellow
6
+ sdk: streamlit
7
+ sdk_version: 1.19.0
8
+ app_file: app.py
9
+ pinned: false
10
+ duplicated_from: asifhugs/InfiniteGPT
11
+ ---
12
+
13
+ Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
app.py ADDED
@@ -0,0 +1,62 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import openai
2
+ from concurrent.futures import ThreadPoolExecutor
3
+ import streamlit as st
4
+ import os
5
+
6
+ # Add your own OpenAI API key
7
+ # openai.api_key = "sk-XXXXXXXXXXXXXXXXXXXXX"
8
+
9
+ openai.api_key = os.environ["OPENAI_API_KEY"]
10
+
11
+ def load_text(file_path):
12
+ with open(file_path, 'r') as file:
13
+ return file.read()
14
+
15
+ def save_to_file(responses, output_file):
16
+ with open(output_file, 'w') as file:
17
+ for response in responses:
18
+ file.write(response + '\n')
19
+
20
+ def call_openai_api(chunk):
21
+ response = openai.ChatCompletion.create(
22
+ model="gpt-3.5-turbo",
23
+ messages=[
24
+ {"role": "system", "content": "PASS IN ANY ARBITRARY SYSTEM VALUE TO GIVE THE AI AN IDENITY"},
25
+ {"role": "user", "content": f"OPTIONAL PREPROMPTING FOLLOWING BY YOUR DATA TO PASS IN: {chunk}."},
26
+ ],
27
+ max_tokens=1750,
28
+ n=1,
29
+ stop=None,
30
+ temperature=0.5,
31
+ )
32
+ return response.choices[0]['message']['content'].strip()
33
+
34
+ def split_into_chunks(text, tokens=1500):
35
+ words = text.split()
36
+ chunks = [' '.join(words[i:i + tokens]) for i in range(0, len(words), tokens)]
37
+ return chunks
38
+
39
+ def process_chunks(input_text):
40
+ chunks = split_into_chunks(input_text)
41
+
42
+ # Processes chunks in parallel
43
+ with ThreadPoolExecutor() as executor:
44
+ responses = list(executor.map(call_openai_api, chunks))
45
+
46
+ return "\n".join(responses)
47
+
48
+ def main():
49
+ st.title("InfiniteGPT")
50
+ st.write("Input unlimited size text and process it using the OpenAI API.")
51
+ st.write("For more details and credit check this [GitHub repository](https://github.com/emmethalm/infiniteGPT).")
52
+
53
+ input_text = st.text_area("Enter or paste your text here", height=300)
54
+ process_button = st.button("Process Text")
55
+
56
+ if process_button and input_text:
57
+ with st.spinner("Processing text..."):
58
+ output_text = process_chunks(input_text)
59
+ st.write(output_text)
60
+
61
+ if __name__ == "__main__":
62
+ main()
requirements.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ openai
2
+ streamlit