import openai from concurrent.futures import ThreadPoolExecutor import streamlit as st import os # Add your own OpenAI API key # openai.api_key = "sk-XXXXXXXXXXXXXXXXXXXXX" openai.api_key = os.environ["OPENAI_API_KEY"] def load_text(file_path): with open(file_path, 'r') as file: return file.read() def save_to_file(responses, output_file): with open(output_file, 'w') as file: for response in responses: file.write(response + '\n') def call_openai_api(chunk): response = openai.ChatCompletion.create( model="gpt-4", messages=[ {"role": "system", "content": "PASS IN ANY ARBITRARY SYSTEM VALUE TO GIVE THE AI AN IDENITY"}, {"role": "user", "content": f"OPTIONAL PREPROMPTING FOLLOWING BY YOUR DATA TO PASS IN: {chunk}."}, ], max_tokens=1750, n=1, stop=None, temperature=0.5, ) return response.choices[0]['message']['content'].strip() def split_into_chunks(text, tokens=1500): words = text.split() chunks = [' '.join(words[i:i + tokens]) for i in range(0, len(words), tokens)] return chunks def process_chunks(input_text): chunks = split_into_chunks(input_text) # Processes chunks in parallel with ThreadPoolExecutor() as executor: responses = list(executor.map(call_openai_api, chunks)) return "\n".join(responses) def main(): st.title("InfiniteGPT") st.write("Input unlimited size text and process it using the GPT-4 OpenAI API.") st.write("For more details and credit check this [GitHub repository](https://github.com/emmethalm/infiniteGPT).") # Define columns for input and output col1, col2 = st.columns(2) with col1: input_text = st.text_area("Enter or paste your text here", height=300) process_button = st.button("Process Text") # Initialize output_text as an empty string output_text = '' # Process text if the button is pressed if process_button and input_text: with st.spinner("Processing text..."): output_text = process_chunks(input_text) with col2: st.text_area("Output", output_text, height=300) clear_button = st.button("Clear Text") if clear_button: input_text = '' st.experimental_rerun() if __name__ == "__main__": main()