Better_tool_calling / backup.py
akiko19191's picture
Create backup.py
f543240 verified
import re
import sys
import json
import random
import mimetypes
from uuid import uuid4
from curl_cffi import requests, CurlMime
cookies = {
'pplx.visitor-id': 'da5c198a-2ba9-4616-ac21-b6fa92040084',
'cf_clearance': 'IWD6WpGAz8BqOhKbqChKS4lZzlx9Zz2MwRJig.Eg23g-1753551228-1.2.1.1-FCjtyp4ikXRY2fTZE.fqFLn_cnlpdwQVmq9aS.HlfVszUnjKIjh6QmpaATTzqGgj3Kb34TbTrSAN2GEWk_IddRM8GJiAHNpm5oa5FGnTzw2b93Neetyd5ivFzgUGxC.mwFj8Ml7twAjUZ4wYPnEp5Y6N7ciMOfxDws_rJK8Lm3R1t0Urll7NiU21JCQwkp_9F1rDEBL8SnwUN97y60kQa9UIHvdorzv7moe8teN.U34',
'segmented-control-popover-studio': '1',
'sidebarHiddenHubs': '[]',
'_gcl_au': '1.1.1446397632.1751723919',
'__Secure-next-auth.session-token': 'eyJhbGciOiJkaXIiLCJlbmMiOiJBMjU2R0NNIn0..zpX6jlRXnbVw-ZTx.2Sj3psYc90towqn_3PAAcXeCpl64TZVVPFiON1LZjTKExF9sTxZm4ssg4FlFm4h7XY8uDzsVHQvAZ8yZJvcr1sjLuqofOd2FU-eLptrQzdGvkrMtOdm7h1jPBf2lRzlOcKH2IF8Cz0u94lQmCl1DYtSGnR3vlhhM0lKbLwnGTEFWttUUYzFZcOJXV-rYdjPa8rdl7zPWLqHPqjAbqCY6J9VVKsx_xn5zM5DCZ0pOFzmVfomKSpLhN9JO-ySEh0T-nIAtJtZENm2roUZAWGS5pobT_mA1zSWqx9gH2E8t2xH_qdPN3KYyH_4MZ7MwTUtt3i-LptiuZwg0yJak_bfr40bQ3b8-Kt7AjQKzboiI0VBmhbW20odKMXJnD3lABt9yuxpdK_Tx2SBNZQbec6MsDEAgz8eWNVxZ6kj0O1_sngdE_E2sY8BoCw.8CxZdLCzf4bX1KPQ-mbeOQ',
'pplx.metadata': '{%22qc%22:11%2C%22qcu%22:13%2C%22qcm%22:0%2C%22qcc%22:10%2C%22qcr%22:5%2C%22qcdr%22:1%2C%22qcs%22:0%2C%22qcd%22:0%2C%22hli%22:true%2C%22hcga%22:false%2C%22hcds%22:false%2C%22hso%22:false%2C%22hfo%22:false%2C%22fqa%22:1751723941278%2C%22lqa%22:1753690592635}',
'__podscribe_perplexityai_referrer': '_',
'__podscribe_perplexityai_landing_url': 'https://www.perplexity.ai/?login-source=tryPro^&login-new=false',
'_fbp': 'fb.1.1751723930574.245699921229618591',
'pplx.search-models-v3': '{%22research%22:%22pplx_alpha%22%2C%22search%22:%22claude37sonnetthinking%22}',
'__stripe_mid': 'a3785ab0-0fd2-4fe0-ac88-82067750fdb649476b',
'intercom-id-l2wyozh0': '314b3a1e-1c7a-45aa-9aa2-bf756467787b',
'intercom-session-l2wyozh0': '',
'intercom-device-id-l2wyozh0': 'acc4d380-2161-4a82-b7ee-91052897761d',
'AWSALB': 'pN1s+X8xGuy/Rd4EWzNH5KXhJCPPREibux4M/SALeVseSEQnnZdoHd/hDDExDLwWe+DzjdNCdXre+rXnyN8Sv4NeKjtKLs2N9GIW26ejOLMUZPPvJ+zo9NC2TiS/',
'AWSALBCORS': 'pN1s+X8xGuy/Rd4EWzNH5KXhJCPPREibux4M/SALeVseSEQnnZdoHd/hDDExDLwWe+DzjdNCdXre+rXnyN8Sv4NeKjtKLs2N9GIW26ejOLMUZPPvJ+zo9NC2TiS/',
'__ps_fva': '1753545417872',
'pplx.source-selection-v3-space-': '[]',
'pplx.source-selection-v3-space-deaca082-6dce-4526-a678-ace2198255bb': '[%22web%22]',
'pplx.search-mode': 'search',
'ph_phc_TXdpocbGVeZVm5VJmAsHTMrCofBQu3e0kN8HGMNGTVW_posthog': '%7B%22distinct_id%22%3A%2201984b16-8c3d-7985-9031-0eba8f3aea5a%22%2C%22%24sesid%22%3A%5B1753606545782%2C%2201984b16-8c3c-7faa-a67a-a0525a91704a%22%2C1753606425660%5D%7D',
'__cflb': '02DiuDyvFMmK5p9jVbVnMNSKYZhUL9aGmwpP9ftWXpJ3N',
'_rdt_uuid': '1751723930283.ed01281c-7332-44d4-85de-08299d6fcb90',
'__cf_bm': 'FAi5I7TNSaAZ9BYFgNnZKfEFzEQWTZdsjZpZYqheRZU-1753690586-1.0.1.1-Ahjp.tfhGXFmq4QYYQxFI3pnAwaen5yFuPhQxqWtpZ7XkF0f6eYsqvGyiqhoU6P2eniAQGsnnB_EAfqXZDgm2hBVo4nO8mqt4GK6WCTEaDQ',
'pplx.session-id': '0b02bcfe-a439-4533-9219-8edb6a14734d',
'_dd_s': 'aid=4abdc694-575a-41e1-98ce-bb6396152071^&rum=2^&id=0eb417e5-ac2e-486d-b955-ed978e39692a^&created=1753690584764^&expire=1753691492631^&logs=0',
'comet-custom-color-themes-enabled': 'true',
'__stripe_sid': 'b1ddb3f8-09ef-45fe-80e5-4884ec3fcb279dd03c',
}
class Client:
'''
A client for interacting with the Perplexity AI API.
'''
def __init__(self, cookies={}):
self.session = requests.Session(headers={
'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7',
'accept-language': 'en-US,en;q=0.9',
'cache-control': 'max-age=0',
'dnt': '1',
'priority': 'u=0, i',
'sec-ch-ua': '"Not;A=Brand";v="24", "Chromium";v="128"',
'sec-ch-ua-arch': '"x86"',
'sec-ch-ua-bitness': '"64"',
'sec-ch-ua-full-version': '"128.0.6613.120"',
'sec-ch-ua-full-version-list': '"Not;A=Brand";v="24.0.0.0", "Chromium";v="128.0.6613.120"',
'sec-ch-ua-mobile': '?0',
'sec-ch-ua-model': '""',
'sec-ch-ua-platform': '"Windows"',
'sec-ch-ua-platform-version': '"19.0.0"',
'sec-fetch-dest': 'document',
'sec-fetch-mode': 'navigate',
'sec-fetch-site': 'same-origin',
'sec-fetch-user': '?1',
'upgrade-insecure-requests': '1',
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/128.0.0.0 Safari/537.36',
}, cookies=cookies, impersonate='chrome')
self.own = bool(cookies)
self.copilot = 0 if not cookies else float('inf')
self.file_upload = 0 if not cookies else float('inf')
self.signin_regex = re.compile(r'"(https://www\.perplexity\.ai/api/auth/callback/email\?callbackUrl=.*?)"')
self.timestamp = format(random.getrandbits(32), '08x')
self.session.get('https://www.perplexity.ai/api/auth/session')
def create_account(self, cookies):
'''
Function to create a new account
'''
while True:
try:
emailnator_cli = Emailnator(cookies)
resp = self.session.post('https://www.perplexity.ai/api/auth/signin/email', data={
'email': emailnator_cli.email,
'csrfToken': self.session.cookies.get_dict()['next-auth.csrf-token'].split('%')[0],
'callbackUrl': 'https://www.perplexity.ai/',
'json': 'true'
})
if resp.ok:
new_msgs = emailnator_cli.reload(wait_for=lambda x: x['subject'] == 'Sign in to Perplexity', timeout=20)
if new_msgs:
break
else:
print('Perplexity account creating error:', resp)
except Exception:
pass
msg = emailnator_cli.get(func=lambda x: x['subject'] == 'Sign in to Perplexity')
new_account_link = self.signin_regex.search(emailnator_cli.open(msg['messageID'])).group(1)
self.session.get(new_account_link)
self.copilot = 5
self.file_upload = 10
return True
def search(self, query, mode='auto', model=None, sources=['web'], files={}, stream=False, language='en-US', follow_up=None, incognito=False):
'''
Query function
'''
assert mode in ['auto', 'pro', 'reasoning', 'deep research'], 'Search modes -> ["auto", "pro", "reasoning", "deep research"]'
# assert model in {
# 'auto': [None],
# 'pro': [None, 'sonar', 'gpt-4.5', 'gpt-4o', 'claude 3.7 sonnet', 'gemini 2.0 flash', 'grok-2'],
# 'reasoning': [None, 'r1', 'o3-mini', 'claude 3.7 sonnet'],
# 'deep research': [None]
# }[mode] if self.own else True, '''Models for modes -> {
# 'auto': [None],
# 'pro': [None, 'sonar', 'gpt-4.5', 'gpt-4o', 'claude 3.7 sonnet', 'gemini 2.0 flash', 'grok-2'],
# 'reasoning': [None, 'r1', 'o3-mini', 'claude 3.7 sonnet','grok4'],
# 'deep research': [None]
# }'''
assert all([source in ('web', 'scholar', 'social') for source in sources]), 'Sources -> ["web", "scholar", "social"]'
assert self.copilot > 0 if mode in ['pro', 'reasoning', 'deep research'] else True, 'You have used all of your enhanced (pro) queries'
assert self.file_upload - len(files) >= 0 if files else True, f'You have tried to upload {len(files)} files but you have {self.file_upload} file upload(s) remaining.'
self.copilot = self.copilot - 1 if mode in ['pro', 'reasoning', 'deep research'] else self.copilot
self.file_upload = self.file_upload - len(files) if files else self.file_upload
uploaded_files = []
for filename, file in files.items():
file_type = mimetypes.guess_type(filename)[0]
file_upload_info = (self.session.post(
'https://www.perplexity.ai/rest/uploads/create_upload_url?version=2.18&source=default',
json={
'content_type': file_type,
'file_size': sys.getsizeof(file),
'filename': filename,
'force_image': False,
'source': 'default',
}
)).json()
mp = CurlMime()
for key, value in file_upload_info['fields'].items():
mp.addpart(name=key, data=value)
mp.addpart(name='file', content_type=file_type, filename=filename, data=file)
upload_resp = self.session.post(file_upload_info['s3_bucket_url'], multipart=mp)
if not upload_resp.ok:
raise Exception('File upload error', upload_resp)
if 'image/upload' in file_upload_info['s3_object_url']:
uploaded_url = re.sub(
r'/private/s--.*?--/v\d+/user_uploads/',
'/private/user_uploads/',
upload_resp.json()['secure_url']
)
else:
uploaded_url = file_upload_info['s3_object_url']
uploaded_files.append(uploaded_url)
json_data = {
'query_str': query,
'params':
{
'attachments': uploaded_files + follow_up['attachments'] if follow_up else uploaded_files,
'frontend_context_uuid': str(uuid4()),
'frontend_uuid': str(uuid4()),
'is_incognito': incognito,
'language': language,
'last_backend_uuid': follow_up['backend_uuid'] if follow_up else None,
'mode': 'concise' if mode == 'auto' else 'copilot',
'model_preference': {
'auto': {
None: 'turbo'
},
'pro': {
None: 'pplx_pro',
'sonar': 'experimental',
'gpt-4.5': 'gpt45',
'gpt-4o': 'gpt4o',
'claude 3.7 sonnet': 'claude2',
'gemini 2.0 flash': 'gemini2flash',
'grok-2': 'grok'
},
'reasoning': {
None: 'pplx_reasoning',
'r1': 'r1-1776',
'o3-mini': 'o3mini',
'grok-4': 'grok4',
'gpt-4.1': 'gpt41',
'gemini-2.5-pro': 'gemini2flash',
'o3':'o3',
'claude-sonnet-4-20250514':'claude37sonnetthinking',
'sonar-pro':'pplx_alpha'
},
'deep research': {
None: 'pplx_alpha'
}
}[mode][model],
'source': 'default',
'sources': sources,
'version': '2.18'
}
}
resp = self.session.post('https://www.perplexity.ai/rest/sse/perplexity_ask', json=json_data, stream=True)
chunks = []
# Generator for streaming responses
def stream_response(resp):
for chunk in resp.iter_lines(delimiter=b'\r\n\r\n'):
content = chunk.decode('utf-8')
if content.startswith('event: message\r\n'):
# Parse the JSON data from the message
content_json = json.loads(content[len('event: message\r\ndata: '):])
# Yield the entire chunk for the consumer to process
yield content_json
elif content.startswith('event: end_of_stream\r\n'):
# End the generator when the stream is finished
return
# If streaming is requested, return the generator
if stream:
return stream_response(resp)
# Non-streaming: process the full response to find the final answer
full_answer = None
for chunk in resp.iter_lines(delimiter=b'\r\n\r\n'):
content = chunk.decode('utf-8')
if content.startswith('event: message\r\n'):
content_json = json.loads(content[len('event: message\r\ndata: '):])
# Check for the final answer within the new 'blocks' structure
for block in content_json.get('blocks', []):
if block.get('intended_usage') == 'ask_text':
markdown = block.get('markdown_block', {})
if markdown.get('progress') == 'DONE' and 'answer' in markdown:
full_answer = markdown['answer']
elif content.startswith('event: end_of_stream\r\n'):
# Stop processing once the stream ends
break
# Return the final answer, or None if not found
if full_answer is not None:
return full_answer
else:
print("No full response.")
return None
# ##
# perplexity_cli=Client()
# # incognito = Enables incognito mode, for people who are using their own account
# resp = perplexity_cli.search('Create an image on happiness', mode='auto', model=None, sources=[], files={}, stream=True, language='en-US', follow_up=None, incognito=False)
# for i in resp:
# try:
# print(i["blocks"][0]["markdown_block"]["chunks"][0],end="")
# # print(i["blocks"][0]["markdown_block"]["chunks"][0],end="")
# except:
# pass