repo_name
stringlengths 7
111
| __id__
int64 16.6k
19,705B
| blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 5
151
| content_id
stringlengths 40
40
| detected_licenses
list | license_type
stringclasses 2
values | repo_url
stringlengths 26
130
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
42
| visit_date
timestamp[ns] | revision_date
timestamp[ns] | committer_date
timestamp[ns] | github_id
int64 14.6k
687M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 12
values | gha_fork
bool 2
classes | gha_event_created_at
timestamp[ns] | gha_created_at
timestamp[ns] | gha_updated_at
timestamp[ns] | gha_pushed_at
timestamp[ns] | gha_size
int64 0
10.2M
⌀ | gha_stargazers_count
int32 0
178k
⌀ | gha_forks_count
int32 0
88.9k
⌀ | gha_open_issues_count
int32 0
2.72k
⌀ | gha_language
stringlengths 1
16
⌀ | gha_archived
bool 1
class | gha_disabled
bool 1
class | content
stringlengths 10
2.95M
| src_encoding
stringclasses 5
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 10
2.95M
| extension
stringclasses 19
values | num_repo_files
int64 1
202k
| filename
stringlengths 4
112
| num_lang_files
int64 1
202k
| alphanum_fraction
float64 0.26
0.89
| alpha_fraction
float64 0.2
0.89
| hex_fraction
float64 0
0.09
| num_lines
int32 1
93.6k
| avg_line_length
float64 4.57
103
| max_line_length
int64 7
931
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
rampsy/Signature-Verification-System | 8,907,762,192,931 | 84584cd7b41aaf768f409cf4b9c6f57aa906e7d4 | b9b44c3689cb9e7e74783adb49ff6b10ba470281 | /final_signature.py | 6166d0e5e7deb1050e9aef61184b631380b8e165 | []
| no_license | https://github.com/rampsy/Signature-Verification-System | b2f1cf5caab0eab563a817f3a3999424a2f80566 | f172b9bb07ab994d65bb9ba4a6786d5bd0c9d6ff | refs/heads/master | 2020-04-08T00:13:42.305689 | 2018-11-28T16:51:10 | 2018-11-28T16:51:10 | 158,842,966 | 2 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | """
Created on Fri Nov 23 14:02:40 2018
@author: Rudra
"""
###############################################################################
#Importing packages
import numpy as np
import cv2
from sklearn.externals import joblib #For loading saved model
import feature_extract #Python code containing code for feature extraction from image
from tkinter import filedialog #For opening file explorer
from tkinter import font #For changing font of the words
from tkinter import *
import os
from PIL import Image, ImageTk
import identity_trail #For training classifier for identity
import classifier_model #For training classifier for identifying the status of the signature
import pandas as pd
import time
###############################################################################
root=Tk() #For blank window
root.title('GUI FOR SIGNATURE VERIFICATION SYSTEM') #For giving title to the window
fname = Canvas(root,height=200,width=200,bg='white') #Canvas for displaying claimed signature
fname.grid(row=9,column=0)
fname1 = Canvas(root,height=200,width=200,bg='white') #Canvas for displaying original signature of claimed identity
fname1.grid(row=9,column=1,sticky=W)
frame1=Frame(root,width=90,height=20,bg='white') #Frame for displaying the status of the signature
frame1.grid(row=9,column=2,sticky=W)
try:
c1=joblib.load('classifier2.sav') #Loading classifier model for identity
c2=joblib.load('classifier1.sav') #Loading classifier model for status of the signature
except:
print('Identity classifier file is not present') #Error message if classifier model is not present
###############################################################################
Database=[]
l=pd.read_csv('directory.csv') #Reading current directory name
if os.path.exists(np.array(l).tolist()[0][0]):
Database.append(np.array(l).tolist()[0][0])
else:
Database.append(os.getcwd()+'\Dataset\dataset4/real1')
###############################################################################
'''
FUNCTION NAME:verify
INPUT:NONE
OUTPUT:Status of the identity of the claimed signature
LOGIC:Two seperate model is trained independent of each other one for identity
and another for status of the signature and both the model are tuned such that
model gives max accuracy_score on test case and then both the model is used for
finding out the status of the claimed signature.
'''
def verify():
try:
frame2=Frame(frame1,width=90,height=20,bg='white')
frame2.grid(row=9,column=2,sticky=W)
root.filename=filedialog.askopenfilename(initialdir="/",title='select file',\
filetypes=(('PNG','*.png'),('all files','*.*')))
file=root.filename #Selected file
img=cv2.imread(file)
image=[(file,img)]
feature=feature_extract.extract(image)
genuine=c2.predict(np.array(feature).reshape(1,-1))
identity=c1.predict(np.array(feature).reshape(1,-1))
# print(genuine)
# print(identity)
pil_image = Image.open(file)
pil_image = pil_image.resize((200, 200), Image.ANTIALIAS)
image = ImageTk.PhotoImage(pil_image)
fname.image=image
fname.create_image((0,0), image=image, anchor=NW)
claimed_id=entry.get() #Claimed identity
cudi=Database[0]
di=os.listdir(cudi)
im=[]
for i_d in range(len(di)):
if int(claimed_id)== int(di[i_d][-6:-4]):
im.append(di[i_d])
break
pil_image1 = Image.open(os.path.join(cudi,im[0]))
pil_image1 = pil_image1.resize((200, 200), Image.ANTIALIAS)
image1 = ImageTk.PhotoImage(pil_image1)
fname1.image=image1
fname1.create_image((0,0), image=image1, anchor=NW)
if int(claimed_id)!=identity[0]:
label8=Label(frame2,text='NOT MATCHED')
label8.grid(row=9,column=2)
else:
if int(genuine[0])==1:
label8=Label(frame2,text='MATCHED')
label8.grid(row=9,column=2)
else:
label8=Label(frame2,text='NOT MATCHED')
label8.grid(row=9,column=2)
except:
print('CLAIMED ID NUMBER NOT FOUND IN DATABASE')
label8=Label(frame2,text=' CLAIMED ID NUMBER NOT FOUND \n \
IN DATABASE OR NO FILE IS SELECTED')
label8.grid(row=9,column=2)
###############################################################################
###############################################################################
'''
FUNCTION NAME:change_directory
INPUT:NONE
OUTPUT:New identityclassifier file and statusclassifier file are trained on the selected database
LOGIC:RandomForestClassifier model is trained on new database to identify the
identity of the signatures and the status of the signature, and then model is
tuned by taking accuracy_score as a measure and changing n_estimators and Random_state
values and best value is found out for which model performs best on test case.
'''
def change_directory():
Database.clear()
try:
frame4=Frame(frame3,width=90,height=20,bg='white')
frame4.grid(row=17,column=2)
label14=Label(frame4,text='PROCESSING....')
label14.grid(row=17,column=2)
filename = filedialog.askdirectory()
list1=[filename]
df=pd.DataFrame(data={"col1":list1})
df.to_csv("directory.csv",sep=',',index=False) #saving current directory into csv file
filename1 = filedialog.askdirectory()
num=len(os.listdir(filename))
#num=len(os.listdir(filename))
#print(num)
print('Training identity classifier')
genuine_image_database=feature_extract.load_images_from_folder(filename)
genuine_feature_database=feature_extract.extract(genuine_image_database)
n,r,clas=identity_trail.tuning(genuine_feature_database,num,genuine_image_database)
print('Training status classifier')
forge_image_database=feature_extract.load_images_from_folder(filename1)
forge_feature_database=feature_extract.extract(forge_image_database)
clas1=classifier_model.training_tuning(genuine_feature_database,forge_feature_database,num,forge_image_database)
frame5=Frame(frame4,width=90,height=20,bg='white')
frame5.grid(row=17,column=2)
label14=Label(frame5,text='......DONE......')
label14.grid(row=17,column=2)
time.sleep(3)
label14=Label(frame5,text='Please run the file once again')
label14.grid(row=17,column=2)
filen='classifier2.sav' #Status classifier
joblib.dump(clas,filen)
filen1='classifier1.sav'
joblib.dump(clas1,filen1) #Identity classifier
Database.append(filename)
label15=Label(frame6,text=Database[0])
label15.grid(row=15,column=1,sticky=E)
time.sleep(5)
print('Please run the file once again')
root.destroy()
except:
print('NO FOLDER OR WRONG FOLDER IS SELECTED')
###############################################################################
###############################################################################
'''
FUNCTION NAME:leave
INPUT:NONE
OUTPUT:NONE
LOGIC:IT Simply destroy or closes the windows/GUI
'''
def leave():
print('THANK YOU')
root.destroy()
###############################################################################
###############################################################################
#BELOW CODE WILL MAKE THE GUI AS PER THE REQUIREMENT
font=font.Font(family='Helvetica',size=10,weight='bold') #It assign the font variable with the information of the font.
label1=Label(root,text='GUI FOR SIGNATURE VERIFICATION SYSTEM')#This is the label which gives the idea about what
label1['font']=font #It set the font of the letters displayed in the GUI
label1.grid(row=0,column=0)
frame=Frame(root,width=15,height=15)
frame.grid(row=1,column=0)
label2=Label(root,text='VERIFY SIGNATURES:',fg='red')
label2.grid(row=2,column=0)
frame=Frame(root,width=15,height=15)
frame.grid(row=3,column=0)
label3=Label(root,text='CLAIMED ID NUMBER:')
label3.grid(row=4,column=0,sticky=E)
entry=Entry(root)
entry.grid(row=4,column=1,sticky=W)
frame=Frame(root,width=15,height=15)
frame.grid(row=5,column=0)
label4=Label(root,text='VERIFY:')
label4.grid(row=6,column=0,sticky=E)
button1=Button(root,text='click to choose and verify',command=verify)
button1.grid(row=6,column=1,sticky=W)
frame=Frame(root,width=15,height=15)
frame.grid(row=7,column=0)
label5=Label(root,text='CLAIMED SIGNATURE:')
label5.grid(row=8,column=0)
label6=Label(root,text='SIGNATURE IN DATABASE:')
label6.grid(row=8,column=1)
label7=Label(root,text='STATUS:')
label7.grid(row=8,column=2)
frame=Frame(root,width=15,height=15)
frame.grid(row=10,column=0)
label9=Label(root,text='DO YOU WANT TO CLOSE THE GUI, PRESS THE QUIT BUTTON:')
label9.grid(row=11,column=0,sticky=E)
button1=Button(root,text='QUIT',command=leave)
button1.grid(row=11,column=1,sticky=W)
frame=Frame(root,width=15,height=15)
frame.grid(row=12,column=0)
label10=Label(root,text='WANNA CHANGE THE DATABASE DIRECTORY:',fg='red')
label10.grid(row=13,column=0)
frame=Frame(root,width=15,height=15)
frame.grid(row=14,column=0)
label11=Label(root,text='CURRENT DIRECTORY:')
label11.grid(row=15,column=0,sticky=E)
frame6=Frame(root,width=60,height=15)
frame6.grid(row=15,column=1)
label12=Label(frame6,text=Database[0])
label12.grid(row=15,column=1,sticky=E)
frame=Frame(root,width=15,height=15)
frame.grid(row=16,column=0)
label13=Label(root,text='CHOOSE DIRECTORY:\n (FIRST GENUINE FOLDER THAN FORGED FOLDER)')
label13.grid(row=17,column=0,sticky=E)
button2=Button(root,text='CLICK',command=change_directory)
button2.grid(row=17,column=1,sticky=W)
frame3=Frame(root,width=90,height=20,bg='white')
frame3.grid(row=17,column=2)
root.mainloop()
###############################################################################
| UTF-8 | Python | false | false | 10,768 | py | 6 | final_signature.py | 4 | 0.590918 | 0.559807 | 0 | 258 | 39.736434 | 125 |
thewhiteh4t/nexfil | 8,074,538,518,510 | a31a6a10cd6f156a4fdeafcb97ec59683d0157da | 37b99235c3f2311c2eb08c54e68bbf21805e775e | /nexfil.py | ed1659f6e866c0f0ba74fbdfc36c8ef03a11834d | [
"MIT"
]
| permissive | https://github.com/thewhiteh4t/nexfil | 27a3ead2eddc5c9dd32524cda9c950ae42000755 | 4d93c577c38e91e352175e36a4872f64d4aeb9d4 | refs/heads/main | 2023-08-26T18:49:56.750783 | 2023-03-26T17:28:34 | 2023-03-26T17:28:34 | 369,583,524 | 1,851 | 192 | MIT | false | 2023-09-04T16:42:22 | 2021-05-21T15:47:13 | 2023-09-03T23:17:02 | 2023-09-04T16:42:20 | 138 | 1,847 | 192 | 15 | Python | false | false | #!/usr/bin/env python3
SCRIPT_V = '1.0.5'
import argparse
parser = argparse.ArgumentParser(description=f'nexfil - Find social media profiles on the web | v{SCRIPT_V}')
parser.add_argument('-u', help='Specify username', type=str)
parser.add_argument('-f', help='Specify a file containing username list', type=str)
parser.add_argument('-l', help='Specify multiple comma separated usernames', type=str)
parser.add_argument('-t', help='Specify timeout [Default : 10]', type=int)
parser.add_argument('-v', help='Prints version', action='store_true')
parser.add_argument('-U', help='Check for Updates', action='store_true')
parser.add_argument('-pm', help='Proxy mode [Available : single, file] [Default : single]', type=str)
parser.add_argument('-proto', help='Proxy protocol [Available : http, https] [Default : http]', type=str)
parser.add_argument('-ph', help='Proxy Hostname', type=str)
parser.add_argument('-pp', help='Proxy port', type=int)
parser.set_defaults(
t=10,
v=False,
U=False,
pm='single',
proto='http'
)
args = parser.parse_args()
uname = args.u
ulist = args.l
fname = args.f
tout = args.t
vers = args.v
update = args.U
proxy_mode = args.pm
proxy_proto = args.proto
proxy_host = args.ph
proxy_port = args.pp
import sys
if vers is True:
print(SCRIPT_V)
sys.exit()
USE_PROXY = False
if proxy_host is not None and proxy_port is not None:
USE_PROXY = True
from json import loads
from packaging import version
from requests import get
from modules.write_log import log_writer
def chk_update():
try:
print('> Fetching Metadata...', end='')
rqst = get('https://raw.githubusercontent.com/thewhiteh4t/nexfil/master/metadata.json', timeout=5)
fetch_sc = rqst.status_code
if fetch_sc == 200:
print('OK')
metadata = rqst.text
json_data = loads(metadata)
gh_version = json_data['version']
if version.parse(gh_version) > version.parse(SCRIPT_V):
print(f'> New Update Available : {gh_version}')
else:
print('> Already up to date.')
except Exception as upd_exc:
print(f'Exception : {str(upd_exc)}')
log_writer(f'nexfil.py, {upd_exc}')
sys.exit()
if update is True:
chk_update()
if uname is None and ulist is None and fname is None:
print('''
Please provide one of the following :
\t* Username [-u]
\t* Comma separated usernames [-l]
\t* File containing list of usernames [-f]
''')
sys.exit()
if uname is not None:
MODE = 'single'
if len(uname) > 0:
if uname.isspace():
print('Error : Username Missing!')
sys.exit()
else:
pass
else:
print('Error : Username Missing!')
sys.exit()
elif fname is not None:
MODE = 'file'
elif ulist is not None:
MODE = 'list'
tmp = ulist
if ',' not in tmp:
print('Error : Invalid Format!')
sys.exit()
else:
ulist = tmp.split(',')
else:
pass
from modules.printer import smsg, emsg, wmsg, clout, pprog
smsg('Importing Modules...', '+')
import asyncio
import aiohttp
from datetime import datetime
from os import getenv, path, makedirs, getcwd
from modules.url import test_url
from modules.alt import test_alt
from modules.api import test_api
from modules.sub import test_sub
from modules.string_case import test_string
from modules.method import test_method
from modules.redirect import test_redirect
from modules.headless import test_driver
import modules.share
from selenium.common.exceptions import WebDriverException
if sys.platform == 'win32':
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
home = getcwd()
else:
home = getenv('HOME')
codes = [200, 301, 302, 405, 418]
log_file = home + '/.local/share/nexfil/exceptions.log'
loc_data = home + '/.local/share/nexfil/dumps/'
if not path.exists(loc_data):
makedirs(loc_data)
modules.share.LOG_FILE_PATH = log_file
def print_banner():
with open('metadata.json', 'r') as metadata:
json_data = loads(metadata.read())
twitter_url = json_data['twitter']
comms_url = json_data['comms']
banner = r'''
__ _ _____ _ _ _____ _____ _
| \ | |____ \___/ |____ | |
| \_| |____ _/ \_ | __|__ |_____'''
smsg(f'{banner}', None)
print()
smsg('Created By : thewhiteh4t', '>')
smsg(f' |---> Twitter : {twitter_url}', None)
smsg(f' |---> Community : {comms_url}', None)
smsg(f'Version : {SCRIPT_V}', '>')
print()
async def query(session, browser, url, test, data, uname):
if USE_PROXY is False:
proxy_url = ''
else:
proxy_url = f'{proxy_proto}://{proxy_host}:{proxy_port}'
try:
if test == 'method':
await test_method(session, USE_PROXY, proxy_url, url)
elif test == 'string':
await test_string(session, USE_PROXY, proxy_url, url, data)
elif test == 'redirect':
await test_redirect(session, USE_PROXY, proxy_url, url)
elif test == 'api':
data = data.format(uname)
await test_api(session, USE_PROXY, proxy_url, url, data)
elif test == 'alt':
data = data.format(uname)
await test_alt(session, USE_PROXY, proxy_url, url, data)
elif test == 'headless' and browser is not False:
browser.get(url)
await test_driver(browser, url, data, tout)
else:
if USE_PROXY is True:
response = await session.head(
url,
allow_redirects=True,
proxy=proxy_url
)
else:
response = await session.head(url, allow_redirects=True)
if response.status in codes:
if test is None:
await clout(response.url)
elif test == 'url':
await test_url(response.url)
elif test == 'subdomain':
await test_sub(url, response.url)
else:
pass
elif response.status == 404 and test == 'method':
await test_method(session, USE_PROXY, proxy_url, url)
elif response.status != 404:
modules.share.errors.append(url)
else:
pass
except asyncio.exceptions.TimeoutError as exc:
modules.share.timedout.append(url)
log_writer(f'nexfil.py, {exc}, {url}')
except aiohttp.ClientError as exc:
modules.share.errors.append(url)
log_writer(f'nexfil.py, {exc}, {url}')
except WebDriverException as exc:
modules.share.errors.append(url)
log_writer(f'nexfil.py, {exc}, {url}')
modules.share.COUNTER += 1
await pprog(modules.share.COUNTER)
def autosave(uname, ulist, mode, found, start_time, end_time):
if mode == 'single':
filename = f'{uname}_{str(int(datetime.now().timestamp()))}.txt'
username = uname
elif mode == 'list' or mode == 'file':
filename = f'session_{str(int(datetime.now().timestamp()))}.txt'
username = ulist
else:
pass
with open(loc_data + filename, 'w') as outfile:
outfile.write(f'nexfil v{SCRIPT_V}\n')
outfile.write(f'{"-" * 40}\n')
if isinstance(username, list):
outfile.write(f'Username : {", ".join(username)}\n')
else:
outfile.write(f'Username : {username}\n')
outfile.write(f'Start Time : {start_time.strftime("%c")}\n')
outfile.write(f'End Time : {end_time.strftime("%c")}\n')
outfile.write(f'Total Hits : {len(found)}\n')
outfile.write(f'Total Timeouts : {len(modules.share.timedout)}\n')
outfile.write(f'Total Errors : {len(modules.share.errors)}\n\n')
outfile.write('URLs : \n\n')
for url in found:
outfile.write(f'{url}\n')
outfile.write(f'\n{"-" * 40}\n')
smsg(f'Saved : {loc_data + filename}', '+')
async def main(uname):
tasks = []
smsg(f'Target : {uname}', '+')
print()
headers = {
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/104.0.5112.79 Safari/537.36'
}
timeout = aiohttp.ClientTimeout(sock_connect=tout, sock_read=tout)
conn = aiohttp.TCPConnector(ssl=False)
if USE_PROXY is True:
smsg('Proxy : ON', '+')
smsg(f'Proxy Mode : {proxy_mode}', '+')
smsg(f'Proxy Type : {proxy_proto}', '+')
smsg(f'Proxy Host : {proxy_host}', '+')
smsg(f'Proxy Port : {proxy_port}', '+')
log_writer('Proxy will be used!')
log_writer(f'Proxy details : {proxy_mode}, {proxy_proto}, {proxy_host}, {proxy_port}')
wmsg('Finding Profiles...')
print()
wmsg('Initializing Chrome Driver...')
try:
import undetected_chromedriver as uc
options = uc.ChromeOptions()
options.add_argument('--headless')
if USE_PROXY is True:
options.add_argument(f'--proxy-server={proxy_proto}://{proxy_host}:{proxy_port}')
caps = options.capabilities
caps["pageLoadStrategy"] = "eager"
driver = uc.Chrome(options=options, desired_capabilities=caps)
smsg('Chromedriver is Ready!', '+')
print()
except ModuleNotFoundError:
emsg('undetected_chromedriver not found!')
wmsg('Some websites will be skipped!')
print()
driver = False
except TypeError:
emsg('Chrome not found!')
wmsg('Some websites will be skipped!')
print()
driver = False
async with aiohttp.ClientSession(connector=conn, headers=headers, timeout=timeout, trust_env=True) as session:
for block in urls_json:
curr_url = block['url'].format(uname)
test = block['test']
data = block['data']
task = asyncio.create_task(query(session, driver, curr_url, test, data, uname))
tasks.append(task)
await asyncio.gather(*tasks)
if __name__ == "__main__":
try:
log_writer('----- STARTING -----')
print_banner()
wmsg('Loading URLs...')
with open('url_store.json', 'r', encoding='utf-8') as url_store:
raw_data = url_store.read()
urls_json = loads(raw_data)
smsg(f'{len(urls_json)} URLs Loaded!', '+')
smsg(f'Timeout : {tout} secs', '+')
start_time = datetime.now()
if MODE == 'single':
asyncio.run(main(uname))
elif MODE == 'list':
for uname in ulist:
ulist[ulist.index(uname)] = uname.strip()
asyncio.run(main(uname))
elif MODE == 'file':
ulist = []
try:
with open(fname, 'r') as wdlist:
tmp = wdlist.readlines()
for user in tmp:
ulist.append(user.strip())
for uname in ulist:
uname = uname.strip()
asyncio.run(main(uname))
except Exception as exc:
wmsg(f'Exception [file] : {str(exc)}')
log_writer(exc)
sys.exit()
else:
pass
end_time = datetime.now()
delta = end_time - start_time
str_d = datetime.strptime(str(delta), '%H:%M:%S.%f')
h_delta = datetime.strftime(str_d, "%H Hours %M Minutes %S Seconds")
print('\n')
smsg(f'Completed In : {h_delta}', '>')
smsg(f'Total Profiles Found : {len(modules.share.found)}', '>')
smsg(f'Total Timeouts : {len(modules.share.timedout)}', '>')
smsg(f'Total Exceptions : {len(modules.share.errors)}', '>')
print()
if len(modules.share.found) != 0:
autosave(uname, ulist, MODE, modules.share.found, start_time, end_time)
else:
pass
log_writer('----- COMPLETED -----')
except KeyboardInterrupt:
print()
emsg('Keyboard Interrupt.')
log_writer('nexfil.py, recieved keyboard interrupt')
log_writer('----- COMPLETED -----')
sys.exit()
| UTF-8 | Python | false | false | 12,229 | py | 15 | nexfil.py | 12 | 0.571756 | 0.565868 | 0 | 373 | 31.785523 | 145 |
Amitchawarekar/College-Projects | 2,241,972,950,532 | ff7ebbce5ae9388f2dd8c0ead6bd01dbee1c8ec2 | 23a5a5819e5facc4d83c64cbbb25e64df8d9ed0a | /Student Management System-1/employee_login.py | 8e3834145f39900830d4ac3a96f089511c4f65ae | []
| no_license | https://github.com/Amitchawarekar/College-Projects | 2e2a3daf25173a478a0556340bce94aa3cef88ec | 3ad3ad1a6580a02aa0e1165e30a85c66281f3fe8 | refs/heads/main | 2023-06-19T13:07:39.728879 | 2021-07-24T14:42:45 | 2021-07-24T14:42:45 | 384,102,907 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from tkinter import *
import pymysql
from tkinter import messagebox as tsmg
from tkinter import ttk
import os
from PIL import Image, ImageTk
class emp_login_window:
def __init__(self, root):
self.root = root
self.root.title('Login Window')
self.root.geometry("1350x700+0+0")
self.root.resizable(True, True)
self.root.config(bg="white")
self.bg1 = ImageTk.PhotoImage(file="image/bg-1.jpg")
bg1 = Label(self.root, image=self.bg1).place(x=0, y=0, relwidth=1, relheight=1)
login_frame = Frame(self.root, bg="white", bd=1, relief=RIDGE)
login_frame.place(x=200, y=150, width=350, height=400)
self.bg2 = ImageTk.PhotoImage(file="image/employee_bg_image.jpg")
bg2 = Label(self.root, image=self.bg2).place(x=550, y=150, width=550, height=400)
self.bg3 = ImageTk.PhotoImage(file="image/employee_login.jpg")
bg3 = Label(login_frame, image=self.bg3).place(x=0, y=0, width=350, height=80)
label = Label(login_frame, text="Welcome to Student Management System", font=('times new roman', 12, 'bold'),
justify=CENTER, bg="white", fg='blue')
label.place(x=20, y=90)
self.bg_uname = ImageTk.PhotoImage(file="image/username.png")
bg_uname = Label(login_frame, image=self.bg_uname).place(x=20, y=130, width=30, height=30)
username = Label(login_frame, text="Username", font=('Bell MT', 16, 'bold'), justify=CENTER, bg="white",
fg='black').place(x=50, y=130)
self.txt_empid = Entry(login_frame, font=("times new roman", 15), bg="light grey")
self.txt_empid.place(x=20, y=170, width=280)
self.bg_pass = ImageTk.PhotoImage(file="image/download.png")
bg_pass = Label(login_frame, image=self.bg_pass).place(x=20, y=205, width=30, height=30)
password = Label(login_frame, text="Password", font=('Bell MT', 16, 'bold'), justify=CENTER, bg="white",
fg='black').place(x=50, y=205)
self.txt_password = Entry(login_frame, font=("times new roman", 15), bg="light grey", show="*")
self.txt_password.place(x=20, y=245, width=280)
btn_reg = Button(login_frame, text="Register new Account?", command=self.Register_window,
font=("Times new roman", 13, 'bold'), bg="white", bd=0, fg="green", cursor="hand2")
btn_reg.place(x=10, y=285)
btn_forget = Button(login_frame, text="Forget Password?", command=self.forget_password,
font=("Times new roman", 13, 'bold'), bg="white", bd=0, fg="red", cursor="hand2")
btn_forget.place(x=190, y=285)
self.btn_image1 = ImageTk.PhotoImage(file="image/download1.jpg")
btn_login = Button(login_frame, image=self.btn_image1, command=self.login_data, bd=0, bg="CRIMSON", fg="white",
cursor="hand2")
btn_login.place(x=60, y=320, width=200, height=50)
# ========footer ==============#
footer = Label(self.root,
text="Developed By : Amit Anand Chawarekar Email-ID : amit.chawarekar@gmail.com Contact no. : 9503016634,9921663430",
font=("goudy old style", 15), bg="#262626", fg="white").pack(side=BOTTOM, fill=X)
def login_data(self):
if self.txt_empid.get() == "" or self.txt_password.get() == "":
tsmg.showerror("Error", "All field are required", parent=self.root)
else:
try:
con = pymysql.connect(host='localhost', user='root', password="", database="srms1")
cur = con.cursor()
cur.execute("select * from registered_employee where empid=%s and password=%s",
(self.txt_empid.get(), self.txt_password.get()))
row = cur.fetchone()
if row == None:
tsmg.showerror('Error', "Invalid Username and password", parent=self.root)
else:
tsmg.showinfo("Success", f"Welcome {self.txt_empid.get()}", parent=self.root)
self.root.destroy()
os.system("python employee_dashboard.py")
con.close()
except Exception as es:
tsmg.showerror('Error', f" Error due to {str(es)}", parent=self.root)
def reset_password(self):
if self.combo_question.get() == "Select" or self.txt_answer.get() == "" or self.txt_new_paswword.get() == "":
tsmg.showerror("Error", 'All Field are required', parent=self.root2)
else:
try:
con = pymysql.connect(host='localhost', user='root', password="", database="srms1")
cur = con.cursor()
cur.execute("select * from registered_employee where empid=%s and question=%s and answer=%s",
(self.txt_empid.get(), self.combo_question.get(), self.txt_answer.get()))
row = cur.fetchone()
if row == None:
tsmg.showerror('Error', "Please select the correct security Question/Enter Answer", parent=self.root2)
else:
cur.execute("update registered_employee set password=%s where empid=%s",
(self.txt_new_paswword.get(), self.txt_empid.get()))
con.commit()
con.close()
tsmg.showinfo('Success', "Your Password has been reset,Please login with new password",
parent=self.root2)
self.clear()
self.root2.destroy()
except Exception as es:
tsmg.showerror('Error', f" Error due to {str(es)}", parent=self.root)
def forget_password(self):
if self.txt_empid.get() == "":
tsmg.showerror("Error", "Please Enter valid Employee ID to reset your password", parent=self.root)
else:
try:
con = pymysql.connect(host='localhost', user='root', password="", database="srms1")
cur = con.cursor()
cur.execute("select * from registered_employee where empid=%s",
(self.txt_empid.get()))
row = cur.fetchone()
if row == None:
tsmg.showerror('Error', "Please enter valid Employee ID to reset your password", parent=self.root)
else:
con.close()
self.root2 = Toplevel()
self.root2.title("Forget Password")
self.root2.geometry('360x400+200+160')
self.root2.config(bg="white")
self.root2.focus_force()
self.root2.grab_set()
t = Label(self.root2, text='Forget Password', font=('Times new roman', 20, 'bold'), bg='yellow',
fg='blue')
t.pack(fill=X)
# ============================Forget Password================
question = Label(self.root2, text="Security Question", font=("times new roman", 17, "bold"),
bg="white",
fg="black").place(x=50, y=70)
self.combo_question = ttk.Combobox(self.root2, font=("times new roman", 15), state='readonly',
justify=CENTER)
self.combo_question['values'] = (
"Select", "your First Pet Name", "Your Birth Place", "Your Best Friend")
self.combo_question.place(x=50, y=110, width=260)
self.combo_question.current(0)
answer = Label(self.root2, text="Answer", font=("times new roman", 17, "bold"), bg="white",
fg="black").place(x=50,
y=150)
self.txt_answer = Entry(self.root2, font=("times new roman", 15), bg="light yellow")
self.txt_answer.place(x=50, y=190, width=260)
new_password = Label(self.root2, text="New Password", font=("times new roman", 17, "bold"),
bg="white", fg="black").place(x=75, y=230)
self.txt_new_paswword = Entry(self.root2, font=("times new roman", 15), bg="light yellow")
self.txt_new_paswword.place(x=50, y=270, width=260)
btn_reset_pass = Button(self.root2, text="ResetPassword?", command=self.reset_password,
font=("Times new roman", 15, 'bold'), bg="green", fg="white",
cursor="hand2")
btn_reset_pass.place(x=85, y=320)
except Exception as es:
tsmg.showerror('Error', f" Error due to {str(es)}", parent=self.root)
def clear(self):
self.combo_question.current(0)
self.txt_empid.delete(0, END)
self.txt_password.delete(0, END)
self.txt_new_paswword.delete(0, END)
self.txt_answer.delete(0, END)
def Register_window(self):
self.root.destroy()
os.system("python employee_register.py")
root = Tk()
obj = emp_login_window(root)
root.mainloop()
| UTF-8 | Python | false | false | 9,597 | py | 15 | employee_login.py | 11 | 0.5186 | 0.490049 | 0 | 182 | 50.730769 | 177 |
lucacrm/fuzzysets | 7,937,099,611,648 | ba5f16e8e0b0c08d3819eb38f0c29c0091ebce59 | 87e9cc7f8ca0d9c08e2bda81197415aaff1ff124 | /muzzifiers.py | c9d8645351698ea5eee2ecf1555d4e068ea5be8a | []
| no_license | https://github.com/lucacrm/fuzzysets | 3bf27557e4ec7610c85a59f21a18496e84a96c57 | 37f255e009fb52cf5e4b378ae59a1bf7a81c8000 | refs/heads/master | 2021-05-25T10:36:05.149226 | 2018-12-03T15:52:16 | 2018-12-03T15:52:16 | 127,121,638 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import numpy as np
from support_vector_clustering import *
class BaseMuzzifier(object):
def __init__(self):
pass
def get_mus(self, x, clusters, kernel, c, gen):
raise NotImplementedError(
'the base class does not implement get_mus method')
class BinaryMuzzifier(BaseMuzzifier):
def __init__(self):
self.name = 'Binary Muzzifier'
self.latex_name = '$\\hat\\mu_{\\text{binary muzzifier}}$'
def get_mus(self, x, clusters, kernel, c, gen):
return [[1 if point in cl else 0 for point in x] for cl in clusters]
def __repr__(self):
return 'BinaryMuzzifier()'
def __str__(self):
return self.__repr__()
def __eq__(self, other):
return type(self) == type(other)
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash(self.__repr__())
def __nonzero__(self):
return True
class LinearMuzzifier(BaseMuzzifier):
def __init__(self):
self.name = 'Linear Muzzifier'
self.latex_name = '$\\hat\\mu_{\\text{linear muzzifier}}$'
def get_mus(self, x, clusters, kernel, c, gen):
mus = []
sample = gen(1500)
#print 'the sample is ', sample
for cl in clusters:
#print '\n\ncluster ', cl
betas = solve_wolf(cl, kernel, c)
index_sv = []
for i in range(len(betas)):
if 0 < betas[i] < c:
index_sv.append(i)
#print 'index_sv ', index_sv
if(len(index_sv)>0):
radius, d = squared_radius_and_distance(cl, betas, index_sv, kernel, c, mean=False)
#print 'radius is ', radius
max_distance = np.max(map(d,sample))
#print 'max distance is ', max_distance
'''
for point in x:
d_from_cl = d(point)
print 'point ', point
print 'distance from cluster is ', d(point)
'''
curr_mu = [1 if d(point) <= radius else 0 if d(point) >= max_distance
else (max_distance - d(point))/(max_distance - radius) for point in x]
#print '\ncurrent mus ', curr_mu
else:
curr_mu = None
mus.append(curr_mu)
return mus
def __repr__(self):
return 'LinearMuzzifier()'
def __str__(self):
return self.__repr__()
def __eq__(self, other):
return type(self) == type(other)
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash(self.__repr__())
def __nonzero__(self):
return True | UTF-8 | Python | false | false | 2,959 | py | 41 | muzzifiers.py | 6 | 0.471105 | 0.467726 | 0 | 107 | 26.663551 | 99 |
zashib/scrapy-generator-gui | 9,698,036,191,210 | 8dcd3110b942f51d6582c3c6a58b749243b6b9fb | c53d855f8fb42528f0d191e5448f4b64f0e2b353 | /scrapy_generator/settings/headers.py | b4e81af6b4bd7abfb64fa445424915675400d172 | [
"MIT"
]
| permissive | https://github.com/zashib/scrapy-generator-gui | 11169eb7c634338695fc979484c6774ba3e190aa | ec1465e558971dd3e6748b444577da54e084c279 | refs/heads/master | 2022-08-31T09:05:44.997930 | 2020-09-10T18:44:29 | 2020-09-10T18:44:29 | 202,672,083 | 7 | 0 | MIT | false | 2022-07-29T22:38:06 | 2019-08-16T06:34:19 | 2022-04-28T20:02:08 | 2022-07-29T22:38:03 | 37 | 6 | 0 | 1 | Python | false | false | #########################
#Default request headers#
#########################
# https://github.com/TeamHG-Memex/scrapy-rotating-proxies
DEFAULT_REQUEST_HEADERS = { 'Host': 'www.crunchbase.com',
'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:67.0) Gecko/20100101 Firefox/67.0',
'Accept': '*/*',
'Accept-Language': 'ru-RU,ru;q=0.8,en-US;q=0.5,en;q=0.3',
'Accept-Encoding': 'gzip, deflate, br',
'Connection': 'keep-alive',
'Cache-Control': 'max-age=0'}
| UTF-8 | Python | false | false | 458 | py | 16 | headers.py | 14 | 0.60262 | 0.543668 | 0 | 12 | 37.166667 | 89 |
ashutosh108/video-scripts | 3,092,376,480,437 | 0c89bd8ce44be12fea9be4bf13a3206a4ecd5f56 | c5fc562d3ed78c59e4a4eb074fb64d1f76def39b | /tests/test_meta.py | b68787b39e1dffc1a8518b2bb897ff96a624f16f | []
| no_license | https://github.com/ashutosh108/video-scripts | 7c6861069afd654c4f4fe6501bf05180da19663d | 115213280ce4f0c33acdc14a3a2fbeeaf5314181 | refs/heads/master | 2020-04-11T23:44:22.561731 | 2019-10-29T11:03:50 | 2019-10-29T11:03:50 | 162,177,274 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null | from unittest import TestCase
import datetime
import meta
import os
class test_meta(TestCase):
def test_get_skip_time_for_file_nonexisting(self):
self.assertEqual(meta.get_skip_time('qwe'), None)
def test_get_skip_time_for_file_existing(self):
filename = self.get_test_filename('2016-10-07 goswamimj.mp4')
self.assertEqual(meta.get_skip_time(filename), '1:15')
# pyyaml somehow automatically leaves 0:07 as '0:07', but converts 1:07 to int 67 (seconds).
# We have to deal with either.
def test_get_skip_time_for_zero_minutes(self):
filename = self.get_test_filename('2016-10-17 avadhutmj.mp4')
self.assertEqual(meta.get_skip_time(filename), '0:07')
def test_get_skip_time_timedelta_nonexisting(self):
self.assertEqual(datetime.timedelta(), meta.get_skip_time_timedelta('qwe'))
def test_get_skip_time_timedelta_invalid(self):
filename = self.get_test_filename('invalid_time.mp4')
self.assertEqual(datetime.timedelta(), meta.get_skip_time_timedelta(filename))
def test_get_skip_time_timedelta_seconds_only(self):
filename = self.get_test_filename('skip_seconds_only.mp4')
self.assertEqual(datetime.timedelta(seconds=17), meta.get_skip_time_timedelta(filename))
def test_get_skip_time_timedelta_minutes_and_seconds(self):
filename = self.get_test_filename('2016-10-07 goswamimj.mp4')
self.assertEqual(datetime.timedelta(minutes=1, seconds=15), meta.get_skip_time_timedelta(filename))
def test_get_skip_time_timedelta_hours_minutes_and_seconds(self):
filename = self.get_test_filename('skip_hms.mp4')
self.assertEqual(datetime.timedelta(hours=1, minutes=2, seconds=19), meta.get_skip_time_timedelta(filename))
def test_get_artist_en(self):
filename = self.get_test_filename('2016-10-07 goswamimj.mp4')
self.assertEqual('Bhakti Sudhīr Goswāmī', meta.get_artist_en(filename))
def test_get_artist_en_multiple(self):
filename = self.get_test_filename('2016-10-07 janardanmj_goswamimj.mp4')
self.assertEqual('Bhakti Pāvan Janārdan, Bhakti Sudhīr Goswāmī', meta.get_artist_en(filename))
def test_get_artist_en_ranjan(self):
filename = self.get_test_filename('2016-10-07 brmadhusudan.mp4')
self.assertEqual('Bhakti Rañjan Madhusūdan', meta.get_artist_en(filename))
def test_get_year_month_day(self):
filename = self.get_test_filename('2016-10-07 brmadhusudan.mp4')
[year, month, day] = meta.get_year_month_day(filename)
self.assertEqual('2016', year)
self.assertEqual('10', month)
self.assertEqual('07', day)
def test_get_title_en(self):
filename = self.get_test_filename('2016-10-07 goswamimj.mp4')
title_en = meta.get_title_en(filename)
self.assertEqual('Have faith in Guru-Vaishnava, not in yourself', title_en)
def test_get_skip_time(self):
filename = self.get_test_filename('2016-10-07 goswamimj.mp4')
skip_time = meta.get_skip_time(filename)
self.assertEqual('1:15', skip_time)
def test_get_skip_time_from_yml(self):
filename = self.get_test_filename('2016-10-12 brmadhusudan.mp4')
skip_time = meta.get_skip_time(filename)
self.assertEqual('0:07:56', skip_time)
def test_get_youtube_description_en(self):
filename = self.get_test_filename('2016-10-12 brmadhusudan.mp4')
expected = """- The story of Chiangmai ashram
- Good fortune of those who have facilities for practicing devotional life
- How Srila Govinda Maharaj appreciated Thai culture
- The original purpose of the building of Chiangmai ashram: sanskrit school
Śrīla Bhakti Rañjan Madhusūdan Mahārāja
October 12, 2016
Theistic Media Studios, Gupta Govardhan Āśram.
Downloaded from TMS_TV https://livestream.com/accounts/2645002
На русском: (ссылка скоро будет)"""
self.assertIn(expected, meta.get_youtube_description_orig(filename, 'en'))
def test_get_youtube_title_ru_stereo(self):
filename = self.get_test_filename('2016-10-12 brmadhusudan.mp4')
expected = 'Удача Чиангмайского ашрама. Бхакти Ран̃джан Мадхусӯдан'
self.assertEqual(expected, meta.get_youtube_title_ru_stereo(filename))
def test_get_youtube_title_ru_mono(self):
filename = self.get_test_filename('2016-10-12 brmadhusudan.mp4')
expected = 'Удача Чиангмайского ашрама (моно). Бхакти Ран̃джан Мадхусӯдан'
self.assertEqual(expected, meta.get_youtube_title_ru_mono(filename))
def test_get_youtube_title_ru_mono_dot(self):
filename = self.get_test_filename('2016-01-01 goswamimj.mp4')
expected = 'Проверка точки (моно). Бхакти Судхӣр Госва̄мӣ'
self.assertEqual(expected, meta.get_youtube_title_ru_mono(filename))
def test_get_youtube_title_ru_stereo_question_mark(self):
filename = self.get_test_filename('2016-10-05 goswamimj.mp4')
expected = 'Настроение или сердце? Бхакти Судхӣр Госва̄мӣ'
self.assertEqual(expected, meta.get_youtube_title_ru_stereo(filename))
def test_get_youtube_title_ru_mono_question_mark(self):
filename = self.get_test_filename('2016-10-05 goswamimj.mp4')
expected = 'Настроение или сердце? (моно) Бхакти Судхӣр Госва̄мӣ'
self.assertEqual(expected, meta.get_youtube_title_ru_mono(filename))
def test_get_youtube_descr_ru_stereo(self):
filename = self.get_test_filename('2016-10-12 brmadhusudan.mp4')
expected = """- История Чиангмайсколго ашрама
- Удача тех, кто имеет возможность практиковать преданное служение
- Как Шрила Говинда Махарадж ценил Тайскую культуру
- Здание Чиангмайского ашрама строилось для санскритской школы
Ш́рӣла Бхакти Ран̃джан Мадхусӯдан Mаха̄ра̄дж
12 октября 2016
Студия «Теистик Медиа», А̄ш́рам на Гупта Говардхане.
Загружено с TMS_TV https://livestream.com/accounts/2645002
English original: (link pending)
Моно перевод: (link pending)"""
self.assertIn(expected, meta.get_youtube_description_ru_stereo(filename))
def test_get_youtube_descr_ru_mono(self):
filename = self.get_test_filename('2016-10-12 brmadhusudan.mp4')
expected = """- История Чиангмайсколго ашрама
- Удача тех, кто имеет возможность практиковать преданное служение
- Как Шрила Говинда Махарадж ценил Тайскую культуру
- Здание Чиангмайского ашрама строилось для санскритской школы
Ш́рӣла Бхакти Ран̃джан Мадхусӯдан Mаха̄ра̄дж
12 октября 2016
Студия «Теистик Медиа», А̄ш́рам на Гупта Говардхане.
Загружено с TMS_TV https://livestream.com/accounts/2645002
English original: (link pending)
Стерео перевод: (link pending)"""
self.assertIn(expected, meta.get_youtube_description_ru_mono(filename))
@staticmethod
def get_test_filename(base_filename):
directory = os.path.dirname(__file__)
filename = os.path.join(directory, 'files', base_filename)
return filename
def test_get_lang(self):
filename = self.get_test_filename('2016-10-17 avadhutmj.mp4')
expected = 'ru'
self.assertEqual(expected, meta.get_lang(filename))
def test_get_lang_en_default(self):
filename = self.get_test_filename('2016-10-12 brmadhusudan.mp4')
expected = 'en'
self.assertEqual(expected, meta.get_lang(filename))
def test_get(self):
filename = self.get_test_filename('2016-10-17 avadhutmj.mp4')
self.assertEqual('ru', meta.get(filename, 'lang'))
def test_youtube_links_from_orig(self):
filename = self.get_test_filename('2016-01-02 goswamimj.mp4')
expected_regex = '(?m)^На русском: https://youtu.be/sssssssssss$'
self.assertRegex(meta.get_youtube_description_orig(filename, 'en'), expected_regex)
def test_youtube_links_from_mono(self):
filename = self.get_test_filename('2016-01-02 goswamimj.mp4')
expected_regex_orig = '(?m)^English original: https://youtu.be/ooooooooooo$'
expected_regex_stereo = '(?m)^Стерео перевод: https://youtu.be/sssssssssss$'
self.assertRegex(meta.get_youtube_description_ru_mono(filename), expected_regex_orig)
self.assertRegex(meta.get_youtube_description_ru_mono(filename), expected_regex_stereo)
def test_youtube_links_from_stereo(self):
filename = self.get_test_filename('2016-01-02 goswamimj.mp4')
# (?m) for multi-line so that ^ will match newlines in the middle
expected_regex1 = '(?m)^English original: https://youtu.be/ooooooooooo$'
expected_regex2 = '(?m)^Моно перевод: https://youtu.be/mmmmmmmmmmm$'
self.assertRegex(meta.get_youtube_description_ru_stereo(filename), expected_regex1)
self.assertRegex(meta.get_youtube_description_ru_stereo(filename), expected_regex2)
| UTF-8 | Python | false | false | 9,625 | py | 39 | test_meta.py | 21 | 0.703636 | 0.668877 | 0 | 186 | 46.021505 | 116 |
JiayingGaoo/lc | 10,385,230,924,390 | 962ee329f4507744ed5e81899fd9f6a35d847e0b | 7ac37581df5e8b6a5b37ac24803933629ce74b8b | /Implement_strStr.py | 276648c151c4b74c052838490cd79edd808c9739 | []
| no_license | https://github.com/JiayingGaoo/lc | d5b13c1336c03281943350e54465a1af0b854517 | ef788c3876a61cc2b7acb98d997273602dc0d865 | refs/heads/master | 2020-06-27T01:33:24.463877 | 2019-09-10T00:53:01 | 2019-09-10T00:53:01 | 199,811,076 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | class Solution:
def strStr(self, haystack: str, needle: str) -> int:
# do not use find function
if needle == '':
return 0
for i in range(0, len(haystack) - len(needle)):
if haystack[i] == needle[0]:
be_a_part = True
for j in range(1, len(needle)):
if needle[j] != haystack[i + j]:
be_a_part = False
break
if be_a_part:
return i
return -1
Sol = Solution()
print(Sol.strStr('hello', 'll')) | UTF-8 | Python | false | false | 584 | py | 35 | Implement_strStr.py | 35 | 0.440068 | 0.431507 | 0 | 18 | 31.5 | 56 |
fateiswar/ftpserver | 19,447,611,944,405 | 8ece50ae11523444052293f0bc9aa087d6da66aa | 4058dca866ffc936407793ce24e76b8b773448f5 | /ftpserver.py | 4256abbcd33fee3db3a17a06de7946e1fd5e2845 | []
| no_license | https://github.com/fateiswar/ftpserver | 35f9c970b166ec146fe3b79c5d6d141d380f5eb7 | 346c3e84c3a07085936fbb82c30cafd1df1acb94 | refs/heads/master | 2021-01-10T20:20:25.951204 | 2013-08-20T04:36:22 | 2013-08-20T04:36:22 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from pyftpdlib.authorizers import DummyAuthorizer
from pyftpdlib.handlers import FTPHandler
from pyftpdlib.servers import FTPServer
from pyftpdlib.filesystems import AbstractedFS
from filesystem_view import *
from FtpFs import *
from oss_client.oss_api import *
import os
from optparse import OptionParser
def main(user, password, port, bucket, access_id, access_key):
fs_view.access_id = access_id
fs_view.access_key = access_key
fsview.oss = OssAPI(fs_view.host, fs_view.access_id, fs_view.access_key)
authorizer = DummyAuthorizer()
authorizer.add_user(user, password, bucket, perm = 'elradfmwM')
authorizer.add_anonymous(bucket)
handler = FTPHandler
handler.authorizer = authorizer
handler.abstracted_fs = ftpFS
#handler.abstracted_fs = AbstractedFS
handler.banner = 'pyftpdlib based ftpd ready'
address = ('0.0.0.0', port)
server = FTPServer(address, handler)
server.serve_forever()
if __name__ == '__main__':
parser = OptionParser()
parser.add_option("", "--access_key_id", dest="access_id", help="")
parser.add_option("", "--access_key_secret", dest="access_key", help="")
parser.add_option("", "--ftp_user_name", dest="ftp_user_name", help="")
parser.add_option("", "--ftp_password", dest="ftp_password", help="")
parser.add_option("", "--bucket", dest="bucket", help="")
parser.add_option("", "--port", dest="port", help="")
(options, args) = parser.parse_args()
if options.ftp_user_name:
user = options.ftp_user_name
else:
user = 'admin'
if options.ftp_password:
password = options.ftp_password
else:
password = '12345'
if options.port:
port = options.port
else:
port = 2121
bucket = options.bucket
if not bucket.startswith('/'):
bucket = '/' + bucket
if not bucket.endswith('/'):
bucket = bucket + '/'
main(user, password, port, bucket, options.access_id, options.access_key)
| UTF-8 | Python | false | false | 2,216 | py | 4 | ftpserver.py | 3 | 0.587545 | 0.581679 | 0 | 64 | 31.046875 | 77 |
mattmacari/opug-blockchain | 1,571,958,044,533 | 2a48ef4bc3cdb359646137a8901034cee2287db0 | d36bd49aabaca83d21f3156c321a4dd5afdf3166 | /tests/blockchain/test_blockchain.py | 47f84c529dd8ba0fa335f08f2fca02b4a5a7b604 | [
"Unlicense"
]
| permissive | https://github.com/mattmacari/opug-blockchain | 9c17fa8b9eba336df11b60271e08a7cc44a1433f | 14c7880d9086861f98760bd3e85a5c5b32767189 | refs/heads/develop | 2021-06-06T20:26:31.791957 | 2018-05-25T00:24:56 | 2018-05-25T00:24:56 | 132,812,353 | 0 | 0 | Unlicense | false | 2021-04-20T17:30:53 | 2018-05-09T21:00:47 | 2018-05-25T00:24:58 | 2021-04-20T17:30:52 | 42 | 0 | 0 | 3 | Jupyter Notebook | false | false | import unittest
from unittest import mock
import hashlib
import json
from datetime import datetime
from freezegun import freeze_time
from blockchain.chain import (Block,
BlockChain,
DEFAULT_HASH,
BlockVerificationError)
@freeze_time('2006-12-03 19:25')
class BlockTestCase(unittest.TestCase):
def setUp(self):
self.tx = {
'transacton': {
'spam': 1,
'eggs': 2
},
'public_key': None,
'signature': None
}
self.previous_hash = hashlib.new(
DEFAULT_HASH, 'previous_hash'.encode('utf-8')).hexdigest()
self.timestamp = str(datetime.timestamp(datetime(year=2006,
day=3,
month=12,
hour=19, minute=25)
).as_integer_ratio()[0])
def test_generate_hash(self):
blk = Block(transaction=self.tx,
previous_hash=self.previous_hash)
block_hash = hashlib.new(DEFAULT_HASH, b''.join([
json.dumps(self.tx,
sort_keys=True).encode('utf-8'),
self.previous_hash.encode('utf-8'),
self.timestamp.encode('utf-8')
])).hexdigest()
self.assertEqual(blk.hash_block(), block_hash)
def test_verify_hash(self):
blk = Block(transaction=self.tx,
previous_hash=self.previous_hash)
self.assertIsNone(blk.verify_hash())
with self.assertRaises(BlockVerificationError):
blk.transaction.update({'test': '405'})
blk.verify_hash()
@freeze_time('2006-12-03 19:25')
class BlockChainTestCase(unittest.TestCase):
def setUp(self):
self.tx = {
'transaction': {
'spam': 1,
'eggs': 2
},
'public_key': None,
'signature': None
}
self.tx = {'spam': 1, 'eggs': 2}
self.bc = BlockChain()
def test_block_factory(self):
blk = self.bc.block_factory()
self.assertIs(blk, Block)
@mock.patch('blockchain.chain.Block.verify_transaction')
def test_append(self, mock_verify):
self.bc.append(self.tx)
self.assertEqual(len(self.bc.chain), 2)
@mock.patch('blockchain.chain.Block.verify_transaction')
def test_last_block(self, mock_verify):
self.bc.append(self.tx)
self.assertEqual(self.bc.last_block.transaction,
self.tx)
| UTF-8 | Python | false | false | 2,696 | py | 14 | test_blockchain.py | 11 | 0.510015 | 0.491469 | 0 | 82 | 31.878049 | 76 |
uw-it-aca/canvas-sis-provisioner | 584,115,570,899 | 06489ddca5636ecca696e8eba133c2bee377bd23 | df453a7a2ef0d695e35327fa55f73012b28b8c8e | /sis_provisioner/management/commands/generate_user_report.py | 9d4ac3cb9c1c79cb681fba13dc87f1c9788ac6a1 | [
"Apache-2.0"
]
| permissive | https://github.com/uw-it-aca/canvas-sis-provisioner | 4b14f5bc60f297e5c497c5e936ab7eb48e2af5cf | d8265b325f54bcd3ee7f13ac2cfac8743545f8af | refs/heads/main | 2023-09-01T19:57:51.268231 | 2023-08-27T16:54:44 | 2023-08-27T16:54:44 | 43,710,673 | 0 | 0 | Apache-2.0 | false | 2023-09-13T19:06:28 | 2015-10-05T20:07:31 | 2021-11-19T17:03:18 | 2023-09-13T15:52:06 | 3,200 | 0 | 0 | 0 | Python | false | false | # Copyright 2023 UW-IT, University of Washington
# SPDX-License-Identifier: Apache-2.0
from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from sis_provisioner.dao.user import valid_reg_id, valid_gmail_id
from sis_provisioner.exceptions import UserPolicyException
from datetime import datetime, timedelta
import csv
class Command(BaseCommand):
help = "Creates a report of users in Canvas."
def add_arguments(self, parser):
parser.add_argument(
'last_access_report', help='last_access_report_path')
parser.add_argument(
'enrollment_report', help='enrollment_report_path')
def handle(self, *args, **options):
last_access_report = options.get('last_access_report')
enrollment_report = options.get('enrollment_report')
users_all = 0
users_uw = 0
users_google = 0
users_unknown = 0
users_no_sisid = 0
users_uw_login_one_year = 0
users_google_login_one_year = 0
users_uw_login_never = 0
users_google_login_never = 0
users_uw_no_enrollments = 0
users_google_no_enrollments = 0
enrollments = {}
with open(enrollment_report, 'rb') as csvfile:
reader = csv.reader(csvfile)
for row in reader:
sis_user_id = row[3]
if len(sis_user_id):
if sis_user_id in enrollments:
enrollments[sis_user_id] += 1
else:
enrollments[sis_user_id] = 1
with open(last_access_report, 'rb') as csvfile:
reader = csv.reader(csvfile)
for row in reader:
sis_user_id = row[1]
if len(sis_user_id):
last_access = row[3]
try:
valid_reg_id(sis_user_id)
users_all += 1
users_uw += 1
if len(last_access):
if not self.logged_in_past_year(last_access):
users_uw_login_one_year += 1
else:
users_uw_login_never += 1
if sis_user_id not in enrollments:
users_uw_no_enrollments += 1
except UserPolicyException:
try:
valid_gmail_id(sis_user_id)
users_all += 1
users_google += 1
if len(last_access):
if not self.logged_in_past_year(last_access):
users_google_login_one_year += 1
else:
users_google_login_never += 1
if sis_user_id not in enrollments:
users_google_no_enrollments += 1
except UserPolicyException:
users_unknown += 1
else:
if row[2] != 'Student, Test':
users_no_sisid += 1
print('\n\n')
print('All users: {}'.format(users_all))
print('UW users: {}'.format(users_uw))
print('UW users with 0 enrollments: {}'.format(
users_uw_no_enrollments))
print('UW users with 0 logins: {}'.format(users_uw_login_never))
print('UW users who have not logged in the past year: {}'.format(
users_uw_login_one_year))
print('\n\n')
print('Google users: {}'.format(users_google))
print('Google users with 0 enrollments: {}'.format(
users_google_no_enrollments))
print('Google users with 0 logins: {}'.format(
users_google_login_never))
print('Google users who have not logged in the past year: {}'.format(
users_google_login_one_year))
print('\n\n')
print('No SIS ID users: {}'.format(users_no_sisid))
print('Bad SIS ID users: {}'.format(users_unknown))
print('\n\n')
def logged_in_past_year(self, last_access_str):
last_access_dt = datetime.strptime(last_access_str[:-6],
'%Y-%m-%dT%H:%M:%S')
return last_access_dt < datetime.utcnow() - timedelta(days=365)
| UTF-8 | Python | false | false | 4,440 | py | 177 | generate_user_report.py | 152 | 0.501802 | 0.492117 | 0 | 115 | 37.608696 | 77 |
cld-emman05/DaPCom.MS | 13,219,909,363,675 | 9c6511d30414e692eaabb262ab058c4501c137ba | 7ffdabd56eb8ca5c9d894175a0be09afdba24dff | /DaPComMS/PDI/admin.py | 6160aa983eec55f70fe5907db223d3401044dd95 | [
"MIT"
]
| permissive | https://github.com/cld-emman05/DaPCom.MS | 8db11eacecf5402497d219b40e23468cb0a2a3f6 | 7c2ec03c2c225abbabde978e3a683ce6d52d061e | refs/heads/master | 2020-06-09T11:38:31.967544 | 2019-07-24T09:25:16 | 2019-07-24T09:25:16 | 193,430,970 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null | from django.contrib import admin
from .models import DataSubject
from .models import Office
from .models import UserRole
from .models import Activity
from .models import Attachment
from .models import Data
from .models import Source
from .models import Purpose
from .models import Basis
from .models import Location
from .models import DataUser
from .models import DataPolicy
# Register your models here.
admin.site.register(DataSubject)
admin.site.register(Office)
admin.site.register(UserRole)
admin.site.register(Activity)
admin.site.register(Attachment)
admin.site.register(Data)
admin.site.register(Source)
admin.site.register(Purpose)
admin.site.register(Basis)
admin.site.register(Location)
admin.site.register(DataUser)
admin.site.register(DataPolicy)
# DataSubject, Office, UserRole, Activity, Attachment, Data, Source, Purpose, Location, Basis, DataUser, DataPolicy, ActivityData
| UTF-8 | Python | false | false | 900 | py | 101 | admin.py | 50 | 0.812222 | 0.812222 | 0 | 37 | 23.324324 | 129 |
1047351469/ceshibiji | 7,919,919,711,989 | bee122181fbda5470a11f60f6245724e0ff6f25a | ced4abcdf4a104758bfb0d6e30c96fede46e65db | /就业班/第三阶段 Python基础/Python-day007/代码/day07/13-new魔法方法.py | 9d2a4dccf158a1564dc18d4772f00c2b3faed57e | []
| no_license | https://github.com/1047351469/ceshibiji | 1facf98acbf5b587dc7aee20a8d2d555759118e7 | ba798adb253b01495fabd773043e320aace901a8 | refs/heads/master | 2020-04-26T03:06:12.714070 | 2019-03-01T07:28:01 | 2019-03-01T07:28:01 | 173,255,870 | 0 | 2 | null | null | null | null | null | null | null | null | null | null | null | null | null | class Person(object):
# 魔法方法
# def __new__(cls, *args, **kwargs):
# return object.__new__(cls)
def __new__(cls, *args, **kwargs):
obj = object.__new__(cls)
# print(obj)
print(id(obj))
return obj
def __init__(self, name, age, no):
self.name = name
self.age = age
self.no = no
def move(self):
print('move')
xiaoming = Person('小明', 23, '23232')
print(id(xiaoming))
xiaoming.move()
# a = None
# # <class 'NoneType'>
# print(type(a)) | UTF-8 | Python | false | false | 537 | py | 608 | 13-new魔法方法.py | 240 | 0.508571 | 0.495238 | 0 | 26 | 19.230769 | 40 |
umass-forensics/IBLT-optimization | 14,645,838,518,320 | 1d49f39e214ec38d2cb2c7771d7857fd06604aec | 0b96987f6ed0952eba36ce0c7a4a08f5579870bc | /parse_csv.py | aaaf680965dc1416e53f5ad6f97368bf520b88a0 | [
"MIT"
]
| permissive | https://github.com/umass-forensics/IBLT-optimization | 5c6f84631f781f86ca15c8b6473a0b4a59fef9d8 | 9f31846d3848a07ba2ec52cae4f830db5a2bbec7 | refs/heads/master | 2020-03-22T22:55:24.803320 | 2019-08-17T15:42:17 | 2019-08-17T15:42:17 | 140,778,672 | 7 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null | import re
param_file = 'param.export.0.995833.2018-07-16.csv'
template_file = 'iblt_params.h.tpl'
h_file = 'iblt_params.h'
with open(param_file) as fd:
rows = [row.rstrip('\n').split(',') for row in fd.readlines()]
header = rows.pop(0)
print(header)
items = []
for row in rows:
n_items = int(row[header.index('items')])
hedge = float(row[header.index('hedge')])
numhashes = int(row[header.index('keys')])
items.append('\t{%d, IbltParamItem(%f, %d)},' % (n_items, hedge, numhashes))
with open(template_file) as fd:
template = fd.read()
file_contents = re.sub('<<<items>>>', '\n'.join(items), template)
print(h_file)
with open(h_file, 'w') as fd:
fd.write(file_contents)
| UTF-8 | Python | false | false | 744 | py | 18 | parse_csv.py | 14 | 0.603495 | 0.581989 | 0 | 25 | 28.76 | 84 |
MichaelGhizzi/Python | 11,948,599,055,883 | c330379402f016903fc3297df4987fb347a0e0d8 | e79198e622758cfbe737f4389e775e7a931e1343 | /isValidTime.py | c0590fa90600e45c6ae5dd85f3a453c086e5dd7e | []
| no_license | https://github.com/MichaelGhizzi/Python | e26d2d20607f6f18034344954e93c6ce9904da1f | 9b6586397b45a57df27514bc6c0271700fe5dc23 | refs/heads/master | 2021-01-10T16:22:52.005125 | 2016-08-28T23:01:11 | 2016-08-28T23:01:11 | 51,849,446 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/python3
#---------------------
# Name: Michael Ghizzi
# Lab Project #5
# CPSC 254 8:00pm T/TH
# isValidTime.py
# This program has the user enter in a specific time via arguments, and the program defines if the time is valid or not.
#---------------------
import sys
import re
def main():
# defines main function
if len(sys.argv) != 2: # if there are more than 2 arguments
# the time is invalid
print("ERROR: Invalid time. Usage: ./isValidTime.py '##:## am|AM|pm|PM'") # prints invalid
else:
# Regular expression used to make sure the time is between 1-12
# followed by a am/pm at the end of the two numbers.
# the result is then assigned the the first argument
#pattern = re.compile(r"(12|11|10|0?[1-9]):([0-5][0-9]) ?([aApP])[mM]")
pattern = re.compile(r"(12|11|10|0?[1-9]):([0-5][0-9])(\s)?(AM|am|pm|PM)")
result = pattern.match(sys.argv[1])
if result: # if result is valid, prints is time valid
print("is valid time")
else: # if result is not valid, prints invalid time
print("invalid time")
# Pythonese needed to run the program by itself, gives the
# program permission to use anything inside the program itself
# If we take this out, the program will not run properly.
# Allows the program to be called from outside
if __name__ == "__main__":
main()
| UTF-8 | Python | false | false | 1,319 | py | 16 | isValidTime.py | 11 | 0.652767 | 0.622441 | 0 | 35 | 36.685714 | 120 |
deepstone/YOHO_Automated_Test | 5,506,148,098,407 | d1b5a0e8478fcc1a2ea3442ba1e4076633ce2489 | bcc9304a620080bc7327388530d490173d1b50bd | /Public/P_Login_out_h5.py | 40db49b855c0983eb99fb3c980f1d2de18c99cf7 | []
| no_license | https://github.com/deepstone/YOHO_Automated_Test | 9566f620bf580e773dbd3c4ba44e22d602e51496 | 20fcabc846f18ae79f116bba92edf5a02720ae29 | refs/heads/master | 2016-08-06T18:59:04.329948 | 2016-01-20T02:46:40 | 2016-01-20T02:46:40 | 41,981,994 | 1 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null | # -*- coding: utf-8 -*-
from Page import PageImp
from Automan import PublicImp
from time import sleep
from appium import webdriver
class Login_And_out():
@classmethod
def login(cls, account=None, password=None):
"""
if PageImp.Page_HomeGuide.Page_HomeGuide.GoBoys.IsExist():
PageImp.Page_HomeGuide.Page_HomeGuide.GoBoys.Click()
if PageImp.Page_Home.Page_Home.Float_Layer_Close.IsExist():
PageImp.Page_Home.Page_Home.Float_Layer_Close.Click()
PageImp.Page_Home.Page_Home.UserLogin.Click()
# if PageImp.Page_Login.Page_Login.Login_del_image.IsExist():
# PageImp.Page_Login.Page_Login.Login_del_image.Click()
# sleep(2)
"""
if PageImp.Page_PersonalCenter.Page_PersonalCenter.loginAndRegisterBtn.IsExist():
PageImp.Page_PersonalCenter.Page_PersonalCenter.loginAndRegisterBtn.Click()
if account is not None and password is not None:
PageImp.Page_Login.Page_Login.UserName.Set(account)
PageImp.Page_Login.Page_Login.PassWord.Set(password)
else:
xls = PublicImp.datadriver.ExcelSheet("LoginUserData_h5.xlsx", "LoginAccount")
for i in range(1, xls.nrows()):
PublicImp.log.step_section("Execute Excel Date: Line [%s]" % i)
exc_account = xls.cell(i, "UserName")
exc_password = xls.cell(i, "Password")
PageImp.Page_Login.Page_Login.UserName.Set(exc_account)
PageImp.Page_Login.Page_Login.PassWord.Set(exc_password)
# PageImp.Page_Login.Page_Login.Login_loginbtn.Click()
# 点击屏幕坐标点,点击1次: 红米手机
# PublicImp.env.driver.execute_script("mobile: tap", {"touchCount": "1", "x": 355, "y": 634})
# 点击屏幕坐标点,点击1次: Mate7
# PublicImp.env.driver.execute_script("mobile: tap", {"touchCount": "1", "x": 513, "y": 916})
# ZTE U5S
PublicImp.env.driver.execute_script("mobile: tap", {"touchCount": "1", "x": 333, "y": 633})
sleep(3)
@classmethod
def logout(cls):
PageImp.Page_PersonalCenter.Page_PersonalCenter.GoToUserInfo.Click()
PageImp.Page_PersonalCenter.Page_PersonalCenter.LogOut.Click()
sleep(2)
| UTF-8 | Python | false | false | 2,297 | py | 48 | P_Login_out_h5.py | 42 | 0.63317 | 0.619386 | 0 | 57 | 38.45614 | 101 |
yuanchangwang/cheshi | 14,139,032,366,986 | dd566261ae11ab89cba19e86d170213858c23a76 | a7173a0b9a2294bf3e4f04bfcb92e6fe81842102 | /L05 内置函数、pickle、json、math、random、time、os、zipfile模块/课件/ceshi0512/dange/2.pickle.py | f4896979d5633978763666d3f7f17bb6e20d9f15 | []
| no_license | https://github.com/yuanchangwang/cheshi | e1c01760675fa97415539b2970d68556b1bebdac | 4e101f6b279057aa82d23c5dac61b0c7d1267c23 | refs/heads/master | 2020-09-13T03:35:08.250776 | 2019-11-19T08:57:11 | 2019-11-19T08:57:11 | 222,645,227 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # ### 序列化模块 pickle
'''
序列化:把不能够直接存储的数据变成可存储的过程就是序列化
反序列化:把储存的数据拿出来恢复成原来的数据类型就是反序列化
php:
serialize
unserialize
'''
# import 引入模块 引入pickle模块
import pickle
#dumps 把任意对象序列化成一个bytes
dic = {"a":1,"b":2}
res = pickle.dumps(dic)
print(res)
#loads 把任意bytes反序列化成原来数据
res = pickle.loads(res)
print(res,type(res))
# 函数可以序列化么?
def func():
print("我是一个函数")
res = pickle.dumps(func)
print(res)
print("<==>")
res = pickle.loads(res)
res()
# 迭代器可以序列化么?
from collections import Iterator,Iterable
it = iter(range(10))
print(isinstance(it,Iterator))
res = pickle.dumps(it)
print(res)
res = pickle.loads(res)
print(res)
for i in range(3):
print(next(res))
# 所有的数据类型都可以通过pickle进行序列化
#dump 把对象序列化后写入到file-like Object(即文件对象)
dic = {"a":1,"b":2}
with open("0512_1.txt",mode="wb") as fp:
# pickle.dump(数据类型,文件对象) 先把数据变成二进制字节流 在存储在文件当中
pickle.dump(dic,fp)
#load 把file-like Object(即文件对象)中的内容拿出来,反序列化成原来数据
with open("0512_1.txt",mode="rb") as fp:
res = pickle.load(fp)
print(res)
| UTF-8 | Python | false | false | 1,394 | py | 293 | 2.pickle.py | 186 | 0.694 | 0.677 | 0 | 53 | 17.245283 | 48 |
Jacksonleste/exercicios-python-curso-em-video | 4,526,895,563,548 | e358e6f4c6cdde114558f73f19dd6ceb388e1103 | 0966c0630bb3d9824e11c986a825d3bf32af9ab3 | /ex041.py | b32c7e05e0881566cc1dbfd0e58502eb52a65298 | []
| no_license | https://github.com/Jacksonleste/exercicios-python-curso-em-video | b5995216b5cc61dbbd6fdd2ebc87ec40673eb2ac | aad364e440e315df9cd1a1ca661c7d15d07704de | refs/heads/main | 2023-04-10T13:52:44.254157 | 2021-04-13T03:13:51 | 2021-04-13T03:13:51 | 328,409,343 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from datetime import datetime
ano = int(input('ano de nascimento: '))
idade: int = datetime.now().year - ano
print('o atleta que tem {} anos'.format(idade))
if idade <= 9:
print('catégoria: MIRIM')
elif idade <= 14:
print('catégoria: INFANTIL')
elif idade <= 19:
print('catégoria: JUNIOR')
elif idade <= 25:
print('catégoria: SÊNIOR')
elif idade > 25:
print('sua catégoria é MASTER')
| UTF-8 | Python | false | false | 412 | py | 111 | ex041.py | 110 | 0.661728 | 0.639506 | 0 | 15 | 26 | 47 |
HBinhCT/Q-project | 16,509,854,316,979 | bc325363382c057a11471e3142811ac5d63c8925 | d66818f4b951943553826a5f64413e90120e1fae | /hackerrank/Algorithms/Tara's Beautiful Permutations/solution.py | e304b52eadd17730bfbe3dea4fb35a2d043a6ea2 | [
"MIT"
]
| permissive | https://github.com/HBinhCT/Q-project | 0f80cd15c9945c43e2e17072416ddb6e4745e7fa | 19923cbaa3c83c670527899ece5c3ad31bcebe65 | refs/heads/master | 2023-08-30T08:59:16.006567 | 2023-08-29T15:30:21 | 2023-08-29T15:30:21 | 247,630,603 | 8 | 1 | MIT | false | 2020-07-22T01:20:23 | 2020-03-16T06:48:02 | 2020-07-21T10:55:44 | 2020-07-22T01:20:22 | 593 | 0 | 0 | 0 | Python | false | false | #!/bin/python3
import os
#
# Complete the beautifulPermutations function below.
#
def beautifulPermutations(arr):
#
# Write your code here.
#
import math
mod = 1000000007 # 10 ** 9 + 7
length = len(arr)
repetitions = length - len(set(arr))
perm = 0
mat = 1
for i in range(repetitions + 1):
perm += (-1) ** i * mat * math.factorial(length - i) // (2 ** (repetitions - i))
perm %= mod
mat = mat * (repetitions - i) // (i + 1)
return perm % mod
if __name__ == '__main__':
fptr = open(os.environ['OUTPUT_PATH'], 'w')
t = int(input())
for t_itr in range(t):
arr_count = int(input())
arr = list(map(int, input().rstrip().split()))
result = beautifulPermutations(arr)
fptr.write(str(result) + '\n')
fptr.close()
| UTF-8 | Python | false | false | 834 | py | 3,248 | solution.py | 1,828 | 0.540767 | 0.515588 | 0 | 41 | 19.341463 | 88 |
Maruf-S/Competitve-programing | 16,209,206,607,354 | 429e8b5bb70d953d75a2210853feaa5592da674d | b20cc37e0b986a0b458f7f446d5025beee01ba7a | /1346-check-if-n-and-its-double-exist/1346-check-if-n-and-its-double-exist.py | 2cc44352ed4bc21b0047dd10c005a40069679226 | []
| no_license | https://github.com/Maruf-S/Competitve-programing | e5e405912a4c2d9a1fad35d66411964ecbc10d00 | 3134d9c1b7e987c6cec3c614512faab4114fe0bd | refs/heads/master | 2023-02-04T08:08:43.374565 | 2023-02-01T17:29:55 | 2023-02-01T17:29:55 | 225,399,606 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null | class Solution:
def checkIfExist(self, arr: List[int]) -> bool:
count = Counter()
for i in arr:
if count[i * 2] > 0 or count[i / 2] > 0:
return True
count[i] += 1
return False | UTF-8 | Python | false | false | 243 | py | 335 | 1346-check-if-n-and-its-double-exist.py | 329 | 0.473251 | 0.452675 | 0 | 8 | 29.5 | 52 |
DmitryVGusev/algo_and_structures_python | 12,558,484,376,667 | ce74a100db31a37e60375e22824aa6e12ee03523 | ea51fbd934a412018fe112342ffe51a9de639cee | /Lesson_1/6.py | dc13cfcb9a1414ce6fff53d47e1d6ebb3a724160 | []
| no_license | https://github.com/DmitryVGusev/algo_and_structures_python | f6e2ebc76802646f663e5085a14bfb5b8af55753 | d94ae172dbf02bb6436d31716bfbb4f86d8f7627 | refs/heads/master | 2020-05-05T08:24:22.225547 | 2019-04-27T17:38:31 | 2019-04-27T17:38:31 | 179,862,838 | 0 | 0 | null | true | 2019-04-27T17:38:32 | 2019-04-06T17:09:20 | 2019-04-24T14:46:11 | 2019-04-27T17:38:32 | 59 | 0 | 0 | 0 | Python | false | false | # 6. Пользователь вводит номер буквы в алфавите. Определить, какая это буква.
def ident_letter_by_position():
"""Определяет букву по положению в алфавите"""
position = int(input("Введите номер буквы: "))
print(f"Буква '{chr(96 + position)}' находится под номером {position}")
if __name__ == '__main__':
ident_letter_by_position()
| UTF-8 | Python | false | false | 486 | py | 33 | 6.py | 33 | 0.671429 | 0.662857 | 0 | 11 | 30.818182 | 77 |
tue-robotics/action_server | 7,636,451,865,088 | 94e909029733633272afc9456ef37f7ec74a2c6d | ee33729c1b082dd41ea9621953672081268f9ecb | /action_server/src/action_server/actions/util/entities_from_description.py | 968f6385ee71fe33d9fff99f4b532f4d8134d1a9 | []
| no_license | https://github.com/tue-robotics/action_server | 3276cc65048893c3a87f8b772f2420fd64d8efde | 97c53226e6e2c46dfb3fe3ac0689e3bbfaa31452 | refs/heads/master | 2023-08-23T01:22:42.610153 | 2023-05-06T07:17:13 | 2023-05-06T07:17:13 | 26,604,363 | 0 | 0 | null | false | 2023-09-06T07:32:17 | 2014-11-13T19:38:36 | 2022-01-03T15:14:26 | 2023-09-06T07:32:16 | 623 | 0 | 0 | 23 | Python | false | false | from typing import List, Tuple
from pykdl_ros import VectorStamped
from robot_skills.robot import Robot
def length_sq(x, y):
return x * x + y * y
# ----------------------------------------------------------------------------------------------------
def entities_from_description(entity_descr: dict, robot: Robot) -> Tuple[List, str]:
"""
Query entities with various methods
:param entity_descr: A dict that contains an 'id' or 'type' field
:param robot: The robot object
:return: (entities, error_msg)
entities - list of entities that fulfill the description
(each element has type EntityInfo)
error_msg - If something goes wrong, this contains the message
"""
if not isinstance(entity_descr, dict):
return ([],
"entities_from_description: the specified entity_descr should be a dictionary! I received: %s" % str(
entity_descr))
if not isinstance(robot, Robot):
return [], "entities_from_description: the specified robot should be a Robot! I received: %s" % str(robot)
if "id" in entity_descr:
e = robot.ed.get_entity(uuid=entity_descr["id"])
if not e:
return [], "No entity with id '%s'" % entity_descr["id"]
entities = [e]
elif "type" in entity_descr:
entities = robot.ed.get_entities(etype=entity_descr["type"])
else:
entities = robot.ed.get_entities()
if "location" in entity_descr:
location_entity = robot.ed.get_entity(uuid=entity_descr["location"]["id"])
if location_entity:
if "area" in entity_descr and entity_descr["area"] in location_entity.volumes:
area = entity_descr["area"]
else:
area = "on_top_of"
entities = [e for e in entities if
location_entity.in_volume(volume_id=area, point=VectorStamped.from_framestamped(e.pose))]
if not entities:
return [], "No such entity"
robot_location = robot.base.get_location()
robot_pos = robot_location.frame.p
# Sort entities by distance
entities = sorted(entities, key=lambda entity: entity.distance_to_2d(robot_pos))
return entities, ""
| UTF-8 | Python | false | false | 2,229 | py | 43 | entities_from_description.py | 37 | 0.593988 | 0.59354 | 0 | 64 | 33.828125 | 117 |
sisul1204/xq9 | 16,767,552,329,479 | f8b3eda8bffa65691000afa03bff3b3704bbe526 | 53824319c44db2dd68749f124c636544612a146f | /page_object/driver/AndroidClient.py | 385b891f28a37af0fde1482c536db19f2a08ca4c | []
| no_license | https://github.com/sisul1204/xq9 | 43f79b091925f32ba1a47362c7a1b3e10e94c66f | 186a9b87ba50253e835ce408f56fc4cc511e8ebd | refs/heads/master | 2022-11-15T11:51:28.938353 | 2020-07-13T12:23:27 | 2020-07-13T12:23:27 | 279,068,100 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # * coding:utf-8 *
# Author:sisul
#创建时间:2020/7/12 7:41
from appium.webdriver import webdriver
from appium.webdriver.webdriver import WebDriver
from appium import webdriver
class AndroidClient:
driver:WebDriver
@classmethod
def installApp(cls) -> WebDriver:
caps = {}
caps['platformName'] = 'android'
caps['deviceName'] = 'oneplus'
caps['appPackage'] = 'com.xueqiu.android'
caps['appActivity'] = '.view.WelcomeActivityAlias'
#解决第一次启动的问题
caps['autoGrantPermissions'] ='true'
cls.driver = webdriver.Remote('http://localhost:4273/wd/hub', caps)
cls.driver.implicitly_wait(10)
return cls.driver
@classmethod
def restartApp(cls):
caps = {}
caps['platformName'] = 'android'
caps['deviceName'] = 'oneplus'
caps['appPackage'] = 'com.xueqiu.android'
caps['appActivity'] = '.view.WelcomeActivityAlias'
caps['unicodeKeyboard'] = 'True'
caps['resetKeyboard'] = 'True'
caps['noReset'] = 'True'
cls.driver = webdriver.Remote('http://localhost:4723/wd/hub', caps)
cls.driver.implicitly_wait(10)
return cls.driver
| UTF-8 | Python | false | false | 1,221 | py | 7 | AndroidClient.py | 7 | 0.624685 | 0.605374 | 0 | 39 | 29.538462 | 75 |
acgeist/wxgonk | 4,157,528,352,284 | d6e46631c12a8a0a9f5e13dcbfe6b22782259d26 | ff8f28e0eb4f4b2acb3610325e6483530cb92294 | /wxgonk.py | ca9d75f6e5467474cc6847e331ac1d7cc404375f | [
"MIT"
]
| permissive | https://github.com/acgeist/wxgonk | b37dcb2b599bca987c4dda4514bc2cde9bfc63b5 | 0c78a0273ada7d861853b8995bf636d5cb524796 | refs/heads/master | 2020-03-22T01:05:05.493095 | 2019-01-05T12:10:28 | 2019-01-05T12:10:28 | 139,282,109 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# wxgonk.py
# TODO:
# Eventually, index.html should be used
# to present the data (i.e. build the display table). For troubleshooting
# purposes, can also turn this into an AFI 11-202v3 tutorial.
# -Include an option for using USAF rules vice FAA rules. Under FAA rules,
# we can just classify everything as VFR, MVFR, IFR, LIFR.
# -keep a running list of countries with bad weather to make subsequent
# searches (on the same day) faster. Specifically, once we've queried
# a country and there aren't even any fields found, we shouldn't search
# that country again. However, it should still remain available in the
# list of countries.
import countries
import latlongcalcs
import mapurlmaker
import usingcgi
import wxurlmaker
from datetime import datetime
import logging
from random import choice
import re
import requests
import sys
import urllib.request
import webbrowser
from typing import List
try:
from lxml import etree
except ImportError:
import xml.etree.ElementTree as etree
# TODO: This should perhaps be in a "main.py" file
logging.basicConfig( \
level=logging.DEBUG, \
filename = '.logs/test.log', \
filemode='w', \
format='\n%(asctime)s - %(filename)s: line %(lineno)s, %(funcName)s: %(message)s')
FILING_MINS = {'vis': 1.5, 'ceiling': 500}
ALT_REQ = {'vis': 3.0, 'ceiling': 2000}
ALT_MINS = {'vis': 2.0, 'ceiling': 1000}
NO_CEIL_VAL = 99999
COUNTRY_DICT = countries.make_country_dict()
TEST_FIELDS = []
USING_CGI = usingcgi.called_from_cgi()
NUM_REQS = 0
class InvalidFunctionInput(Exception):
pass
class InvalidDataType(Exception):
pass
def get_root(url:str):
'''Return the root node of an xml file'''
global NUM_REQS
NUM_REQS += 1
return etree.fromstring(urllib.request.urlopen(url).read())
def node_contains_field(node, field:str) -> bool:
'''Return whether or not a particular node contains a given station'''
# if no result is found, [] (an empty list) will be returned, which
# is a so-called falsey value in python
if re.match(r'\b[a-zA-Z]{4}\b', field) == None:
return false
return node.findall('.//*[station_id="' + field.upper() + '"]/raw_text')
def can_file_metar(metar_node, field:str) -> bool:
'''Return filing legality based on current conditions'''
vis_at_dest = float(metar_node.findall('.//*[station_id="' + DEST_ID
+ '"]/visibility_statute_mi')[0].text)
debug_str = 'In function "can_file_metar" the visibility at ' + DEST_ID + ' is '
debug_str += '{:.1f}'.format(vis_at_dest) + 'sm, which is '
debug_str += '>=' if vis_at_dest >= FILING_MINS['vis'] else '<'
debug_str += ' FILING_MINS["vis"] (' + '{:.1f}'.format(FILING_MINS['vis']) + 'sm)'
logging.debug(debug_str)
return vis_at_dest > FILING_MINS['vis']
def has_ceiling(node) -> bool:
'''Return whether or not node contains a BKN/OVC/OVX line'''
layers = list(filter(lambda layer:
layer.get('sky_cover') in ['BKN', 'OVC', 'OVX'], node))
return False if len(layers) == 0 else True
def get_ceiling(node) -> int:
'''Return the ceiling in feet AGL, or 9999 if no ceiling exists'''
if not has_ceiling(node):
return NO_CEIL_VAL
else:
layers = list(filter(lambda layer:
layer.get('sky_cover') in ['BKN', 'OVC', 'OVX'], node))
layers = list(map(lambda layer:
int(layer.get('cloud_base_ft_agl')), layers))
return min(layers)
def get_vis(node) -> str:
return node.find('visibility_statute_mi').text
def req_alt(node) -> bool:
'''Return whether or not an alternate is required'''
vis_at_dest = float(node.findall('.//*[station_id="' + DEST_ID
+ '"]/visibility_statute_mi')[0].text)
ceil_at_dest = get_ceiling(node)
debug_str = '\nIn function "req_alt" the visibility at ' + DEST_ID + ' is '
debug_str += '{:.1f}'.format(vis_at_dest) + 'sm, which is '
debug_str += '>=' if vis_at_dest >= ALT_REQ['vis'] else '<'
debug_str += ' ALT_REQ["vis"] (' + '{:.1f}'.format(ALT_REQ['vis']) + 'sm)'
debug_str += '\nIn function "req_alt" the ceiling at ' + DEST_ID + ' is '
debug_str += '{:.0f}'.format(ceil_at_dest) + 'ft agl, which is '
debug_str += '>=' if ceil_at_dest >= ALT_REQ['ceiling'] else '<'
debug_str += ' ALT_REQ["ceiling"] (' + '{:.0f}'.format(ALT_REQ['ceiling']) + 'ft)'
logging.debug(debug_str)
return vis_at_dest < ALT_REQ['vis'] or ceil_at_dest < ALT_REQ['ceiling']
def valid_alt(node, field:str) -> bool:
'''Return whether or not a field is a valid alternate based on wx'''
vis_at_alt = float(node.findall('.//*[station_id="' + DEST_ID
+ '"]/visibility_statute_mi')[0].text)
ceil_at_alt = get_ceiling(node)
return vis_at_alt < ALT_MINS['vis'] or ceil_at_alt < ALT_MINS['ceiling']
def get_raw_text(field:str, metar_or_taf:str) -> str:
'''Print the raw metar or taf for a given 4-letter ICAO identifier'''
### INPUT VALIDATION ###
# TODO: write a function to validate ICAO identifiers. Should figure out
# how to pull a list of all valid identifiers from an official database
# and do a simple lookup.
if not isinstance(field, str) or not isinstance(metar_or_taf, str):
err_str = 'One of the inputs to get_raw_text was not ' + 'a string.\n'
err_str += 'field = ' + str(field) + ', type(field) = '
err_str += str(type(field)) + '\nmetar_or_taf = ' + str(metar_or_taf)
err_str += ', type(metar_or_taf) = ' + str(type(metar_or_taf))
logging.warning(err_str)
raise InvalidFunctionInput(err_str)
if re.match(r'\b[a-zA-Z]{4}\b', field) == None:
err_str = 'Invalid value for field in function get_raw_text: ' + field
logging.warning(err_str)
raise InvalidFunctionInput(err_str)
if not re.match(r'(METAR|TAF|BOTH)', metar_or_taf.upper()):
err_str = 'Invalid input at get_raw_text, second argument must be '
err_str += '"metar", "taf", or "both" (case insensitive). Received '
err_str += metar_or_taf
logging.warning(err_str)
raise InvalidFunctionInput(err_str)
logging.debug('get_raw_text called with parameters: field = ' + field \
+ ', metar_or_taf = ' + metar_or_taf.upper())
### ACTUALLY DOING THE THING ###
result_str = ''
if field in TEST_FIELDS:
#TODO: this should check if there is actually a metar or taf for the given
# field rather than just checking if the field is in TEST_FIELDS
temp_root = metar_root if metar_or_taf.upper() == 'METAR' else \
taf_root
result_str = '' if not node_contains_field(temp_root, field) else \
temp_root.findall('.//*[station_id="' + field.upper()
+ '"]/raw_text')[0].text
else:
temp_url = wxurlmaker.make_adds_url('METAR', field.split()) if \
metar_or_taf.upper() == 'METAR' else \
wxurlmaker.make_adds_url('TAF', field.split())
temp_root = get_root(temp_url)
result_str = temp_root.findall('.//raw_text')[0].text if \
int(temp_root.find('data').attrib['num_results']) > 0 else \
metar_or_taf.upper() + ' for ' + field + ' not found.'
# Add newlines to make raw text TAFs easier to read
if metar_or_taf.upper() == 'TAF' or metar_or_taf.upper() == 'BOTH':
result_str = re.sub(r'(TAF|FM|TEMPO|BECMG)', r'\n\1', result_str)
if metar_or_taf.upper() == 'BOTH':
result_str = get_raw_text(field, 'METAR') + '\n' + result_str
return result_str
def time_str_to_obj(input_time:str) -> datetime:
return datetime.strptime(input_time, '%Y-%m-%dT%H:%M:%SZ')
def node_to_str(node, indent:int = 0):
'''Print an XML tree'''
# TODO: include attributes
output = ''
output += indent * '\t'
output += node.tag if node.text == None else node.tag + ': ' + node.text
output += '\n'
if len(node.findall('*')) > 0:
for child in node:
output += node_to_str(child, indent + 1)
return output
def make_coord_list():
'''Make a list of all field locations. Used to generate map URL.'''
field_list = []
temp_node = {}
for field in field_root.findall('.//Station'):
temp_node = {
'station_id': field.find('station_id').text,
'name': field_root.findall('.//*.[station_id="'
+ field.find('station_id').text
+ '"]/site')[0].text,
'lat': field.find('latitude').text,
'lon': field.find('longitude').text}
# Homestation/destination needs to be first item in the list.
# This affects the Google Map that is created later on.
if temp_node['station_id'] == DEST_ID:
field_list.insert(0, temp_node)
else:
field_list.append(temp_node)
return field_list
def gen_bad_fields(country:str = '00', num_results:int = 10) -> List[str]:
'''Generate a list of ICAO ids where the visibility is currently bad'''
# TODO: develop some persistent data to keep track of which countries
# rarely yield bad weather and stop choosing those as often. Perhaps
# weight the countries based on how many fields are there? Could also
# consider querying every country and removing from my countries data
# file all countries that return 0 fields.
is_valid_choice = False
country_choices = list(COUNTRY_DICT.keys())
num_countries_tried = 0 # For debugging/data gathering
countries_tried_str = ''
while not is_valid_choice:
num_countries_tried += 1
country_choice = choice(country_choices) if \
not countries.is_valid_country(country) else \
country
countries_tried_str += country_choice + ' '
logging.debug('Looking for bad weather in ' + country_choice + ' ('
+ countries.country_name_from_code(country_choice) + '), ')
bad_field_url = wxurlmaker.make_adds_url('country', [], country_choice)
bad_field_root = get_root(bad_field_url)
bad_fields_list = []
logging.debug(bad_field_root.find('data').attrib['num_results']
+ ' fields found.')
# if not many results were returned, we
# don't want to try this country again.
if int(bad_field_root.find('data').attrib['num_results']) < 10:
country_choices.remove(country_choice)
continue
for field in bad_field_root.findall('.//Station'):
# Ensure field has TAF capability
if field.findall('.//TAF'):
bad_fields_list.append(field.find('station_id').text)
rand_field_list = []
# Based on trial/error, http requests start to break with >1000 fields
for i in range(0, min(1000, len(bad_fields_list))):
new_addition = choice(bad_fields_list)
if not new_addition in rand_field_list:
rand_field_list.append(new_addition)
bad_metar_url = wxurlmaker.make_adds_url('METAR', \
stationList = rand_field_list)
bad_metar_root = get_root(bad_metar_url)
bad_metars = bad_metar_root.findall('.//METAR')
bad_metars = list(filter(lambda metar:
not re.search('\d+', metar.find('station_id').text) and
metar.find('visibility_statute_mi') is not None and
float(metar.find('visibility_statute_mi').text) < ALT_REQ['vis'],
bad_metars))
# TODO: filter bad metars by whether or not the site itself has
# TAF capability. This is listed in the station xml file under
# <Station><site_type><TAF/></site_type></Station>
if len(bad_metars) > 2:
is_valid_choice = True
else:
logging.debug('No fields in '
+ countries.country_name_from_code(country_choice)
+ ' currently have visibility < '
+ str(ALT_REQ['vis']) + '. Picking another country.')
country_choices.remove(country_choice)
logging.debug('Tried ' + str(num_countries_tried - 1) \
+ ' countries unsuccessfully: ' + countries_tried_str[:-3])
logging.debug(str(len(bad_metars)) + ' fields in '
+ countries.country_name_from_code(country_choice)
+ ' currently have visibility < ' + str(ALT_REQ['vis']))
if len(bad_metars) > num_results:
del bad_metars[num_results:]
bad_field_ids = []
for metar in bad_metars:
bad_field_ids.append(metar.find('station_id').text)
return bad_field_ids
def test():
home_lat = float(field_root.findall('.//*.[station_id="' + DEST_ID
+ '"]/latitude')[0].text)
home_lon = float(field_root.findall('.//*.[station_id="' + DEST_ID
+ '"]/longitude')[0].text)
logging.debug('Home station/destination = ' + DEST_ID + ' ('
+ field_root.findall('.//*.[station_id="' + DEST_ID
+ '"]/site')[0].text + '), located at lat/long: '
+ str(home_lat) + ', '+ str(home_lon))
for root in roots:
results = 'Received ' + root.find('data').attrib['num_results']
results += ' ' + root.find('data_source').attrib['name'] + ': '
for id in root.findall('.//station_id'):
results += ' ' + id.text
logging.debug(results)
for field in field_root.findall('.//Station'):
if field.find('station_id').text == DEST_ID:
continue
field_id = field.find('station_id').text
logging.debug(field_id + ' (' + field_root.findall('.//*.[station_id="'
+ field_id + '"]/site')[0].text + ') is '
+ str(round(latlongcalcs.dist_between_coords(home_lat, home_lon,
field.find('latitude').text, field.find('longitude').text)))
+ ' nautical miles from ' + DEST_ID + ' on a heading of '
+ '{:03d}'.format(round(latlongcalcs.hdg_between_coords(home_lat,
home_lon, field.find('latitude').text,
field.find('longitude').text)))
# Use the HTML escape if called from cgi,
# otherwise input unicode directly.
+ ('°' if USING_CGI else u'\N{DEGREE SIGN}') + ' true.')
logging.debug('\nCurrent METAR/TAF at ' \
+ field_id + ': \n' + get_raw_text(field_id, 'both'))
# https://docs.python.org/2/library.xml.etree.elementtree.html#elementtree-xpath
metars = metar_root.findall('.//METAR')
logging.debug('Can I legally file to ' + DEST_ID + '?')
logging.debug(get_raw_text(DEST_ID, 'METAR'))
logging.debug('can_file_metar: ' + str(can_file_metar(metar_root, DEST_ID)))
logging.debug('has_ceiling: ' + str(has_ceiling(metar_root.findall(
'.//*[station_id="' + DEST_ID + '"]/sky_condition'))))
logging.debug('ceiling: ' + str(get_ceiling(metar_root.findall(
'.//*[station_id="' + DEST_ID + '"]/sky_condition'))))
logging.debug('visibility: ' + get_vis(metar_root.find(
'.//*[station_id="' + DEST_ID + '"]')))
if can_file_metar(metar_root, DEST_ID):
logging.debug('Do I require an alternate to file to ' + DEST_ID + '?')
logging.debug('req_alt: ' + str(req_alt(metar_root)))
tafs = taf_root.findall('.//TAF')
for taf in tafs:
forecast_list = taf.findall('.//forecast')
taf_intro = taf.find('station_id').text + ': '
taf_intro += str(len(forecast_list)) + ' lines.'
logging.debug(taf_intro)
for forecast in forecast_list:
taf_intro += '\nFrom '
taf_intro += time_str_to_obj(forecast.find('fcst_time_from').text).strftime("%d%H%z")
taf_intro += 'z to '
taf_intro += time_str_to_obj(forecast.find('fcst_time_to').text).strftime("%d%H%z")
taf_intro += 'z the visibility is forecast to be '
taf_intro += forecast.find('visibility_statute_mi').text + 'sm.'
logging.debug(taf_intro)
# Times follow format YYYY-mm-ddTHH:MM:SSZ
metar_times_list = metar_root.findall('.//METAR/observation_time')
example_time_string = choice(metar_times_list).text
logging.debug('example_time_string = ' + example_time_string)
#TODO: do time zone stuff
date_obj = datetime.strptime(example_time_string, \
'%Y-%m-%dT%H:%M:%SZ')
logging.debug('date_obj = ' + str(date_obj))
if len(sys.argv) > 1:
"""Process command-line arguments"""
# TODO: put some kind of limitation on number of fields that can be searched.
logging.debug('sys.argv = ["' + '", "'.join(sys.argv) + '"]\n')
# If there's only one argument and it is a valid two-letter country
# identifier, search that country for bad weather.
if re.match(r'\b[a-zA-Z]{2}\b', sys.argv[1]) and \
len(sys.argv) == 2 and countries.is_valid_country(sys.argv[1]):
TEST_FIELDS = gen_bad_fields(sys.argv[1])
else:
for arg in sys.argv[1:]:
# Reference https://aviation.stackexchange.com/a/14593 and FAA Order
# JO 7350.9. We're only going to use/deal with 4-letter fields, as
# two-letter, two-number ids likely reference private fields or
# fields smaller than we'll be using for military flying.
if re.match(r'\b[a-zA-Z]{4}\b', arg) == None:
logging.warning('The command line argument "' + arg
+ '" did not match '
+ 'the pattern for a valid ICAO identifier.\n')
break
else:
TEST_FIELDS.append(arg.upper())
logging.debug('TEST_FIELDS set to ' + ', '.join(TEST_FIELDS) + '\n')
else:
logging.debug('No command-line arguments detected. Picking random fields.\n')
TEST_FIELDS = gen_bad_fields()
DEST_ID = TEST_FIELDS[0]
logging.debug('set DEST_ID = TEST_FIELDS[0], which is ' + TEST_FIELDS[0] + '.\n')
logging.debug('making ADDS URLs...\n')
taf_url = wxurlmaker.make_adds_url('tafs', TEST_FIELDS)
metar_url = wxurlmaker.make_adds_url('metars', TEST_FIELDS)
field_url = wxurlmaker.make_adds_url('fields', TEST_FIELDS)
urls = [taf_url, metar_url, field_url]
taf_root = get_root(taf_url)
metar_root = get_root(metar_url)
field_root = get_root(field_url)
roots = [taf_root, metar_root, field_root]
map_url = mapurlmaker.make_map_url(make_coord_list())
map_request = requests.get(map_url)
with open('images/map.jpg', 'wb') as map_img:
map_img.write(map_request.content)
# reference https://stackoverflow.com/a/419185
if __name__ == '__main__':
test()
html_str = ''
if USING_CGI:
html_str += 'Content-type: text/html; charset=UTF-8\n\n'
html_str += '<!DOCTYPE html><html lang="en"><head>'
html_str += '<meta charset="utf-8"><title>WxGonk Troubleshooting'
html_str += '</title><link rel="stylesheet" type="text/css"'
html_str += 'href="styles/wxgonk.css" /></head><body>'
html_str += '<h1>Most recent URLs:</h1>'
html_str += '<a href=' + metar_url + '>METAR XML</a></br>'
html_str += '<a href=' + taf_url + '>TAF XML</a></br>'
html_str += '<a href=' + field_url + '>FIELD XML</a></br>'
html_str += '<a href=' + wxurlmaker.make_metar_taf_url(TEST_FIELDS)
html_str += '>Normal TAFs & METARs</a></br>'
html_str += '<a href=images/map.jpg>Google Static Map</a></br></br>'
logging.debug(str(NUM_REQS) + ' requests made to the ADDS text data server.')
with open('.logs/test.log', newline='\n') as f:
for line in f:
html_str += '<p>' + line + '</p>'
if USING_CGI:
print(html_str)
else:
html_str += '</body></html>'
with open('index.html', 'w') as url_file:
url_file.write(html_str)
| UTF-8 | Python | false | false | 19,806 | py | 16 | wxgonk.py | 10 | 0.596688 | 0.59073 | 0 | 428 | 45.275701 | 97 |
leocadavalHome/autoRig3 | 14,688,788,163,608 | 6e077c1ac2ba8260ea3d7080ea4158fdbe10014f | f8772539cad84dedcd194c7d3c1aa4af95a39c2b | /modules/twistExtractor.py | 9f29151388c6f2a75c9f74edddd2b83412140b3d | []
| no_license | https://github.com/leocadavalHome/autoRig3 | 9327a2f0be759d29ec42a07a16eb254261f289a8 | 5d6a7503cbdf937fce5f803201f0f9456e603dcc | refs/heads/master | 2021-07-12T14:44:53.383353 | 2020-06-06T16:56:45 | 2020-06-06T16:56:45 | 146,034,252 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null | import pymel.core as pm
import logging
logger = logging.getLogger('autoRig')
class twistExtractor:
"""
Cria uma estrutura para calcular o twist de um joint
Parametros:
twistJntIn: joint a ser calculado
"""
def __init__(self, twistJntIn, conn='parentConstraint', flipAxis=False, name='Extractor'):
self.name = name
self.extractor = None
self.axis = 'X' # hard coding X como eixo. Aparentemente so ele funciona
self.extractorGrp = None
# Error Handling
try:
twistJnt = pm.PyNode(twistJntIn)
except:
logger.debug("ERROR:The Node Doesn't Exist: %s" % twistJntIn)
return
try:
twistJnt.getParent()
except:
logger.debug("ERROR:The Node Has No Parent: %s" % twistJntIn)
return
try:
twistJnt.childAtIndex(0)
except:
logger.debug("ERROR:The Node Has No Child: %s" % twistJntIn)
return
if twistJnt.nodeType() != 'joint':
logger.debug("ERROR:The Node Is Not A Joint: %s " % twistJntIn)
return
if twistJnt.childAtIndex(0).nodeType() != 'joint':
logger.debug("ERROR:The Node Child Is Not A Joint: %s " % twistJnt.childAtIndex(0))
return
# cria grupo base e parenteia no pai do joint fonte do twist
extractorGrp = pm.group(empty=True, n=twistJntIn+'_extractor_grp')
matrix = pm.xform(twistJnt.getParent(), q=True, m=True, ws=True)
pm.xform(extractorGrp, m=matrix, ws=True)
if conn == 'parentConstraint':
pm.parentConstraint(twistJnt.getParent(), extractorGrp, mo=False)
elif conn == 'parent':
pm.parent(extractorGrp, twistJnt.getParent())
self.extractorGrp = extractorGrp
# pm.scaleConstraint (twistJnt.getParent(),extractorGrp, mo=True)
# duplica o joint fonte do twist e seu filho
extractorStart = pm.duplicate(twistJnt, po=True)[0]
pm.makeIdentity(extractorStart, a=True, r=True)
extractorEnd = pm.duplicate(twistJnt.childAtIndex(0), po=True)[0]
pm.parent(extractorEnd, extractorStart)
pm.parent(extractorStart, extractorGrp)
# cria o locator que calcula o twist. Cria OrientConstraint
extractorLoc = pm.spaceLocator(n=twistJntIn+'_locTwist')
pm.parent(extractorLoc, extractorStart, r=True)
ori = pm.orientConstraint(twistJnt, extractorStart, extractorLoc, mo=False)
ori.interpType.set(2)
# cria ik handle com polevector zerado e parenteia no joint fonte (noRoll)
extractorIkh = pm.ikHandle(sj=extractorStart, ee=extractorEnd, sol='ikRPsolver', n=twistJntIn+'_ikh' )[0]
extractorIkh.poleVector.set(0, 0, 0)
pm.parentConstraint(twistJnt, extractorIkh, mo=True)
pm.parent(extractorIkh, extractorGrp)
# multiplica por 2 o valor de rot do locator
pm.addAttr(extractorLoc, ln='extractTwist', at='double', k=1)
multi = pm.createNode('multDoubleLinear', n=self.name + 'Multi' )
if flipAxis:
multi.input2.set(-2)
else:
multi.input2.set(2)
extractorLoc.attr('rotate' + self.axis) >> multi.input1
multi.output >> extractorLoc.extractTwist
self.extractor = extractorLoc | UTF-8 | Python | false | false | 3,375 | py | 88 | twistExtractor.py | 81 | 0.623111 | 0.617778 | 0 | 88 | 37.363636 | 113 |
anikamath1/DjangoPracticeApps | 14,766,097,566,148 | b7ee098fabb9340cc4aef5d1d17e2a05a2652a30 | 20ee2d726f7941aa8bb5687fabbe4e20569e5465 | /home/models.py | 421a362917c039966309e607d4ad35b7d1e5e7c9 | []
| no_license | https://github.com/anikamath1/DjangoPracticeApps | a4007dd41d314b82ebf7d2ebbd6c1b72e635c3f2 | 0e2700fcf4eb71c6fe87011e441798e421646b0e | refs/heads/master | 2020-09-24T06:43:39.479131 | 2019-12-03T18:44:04 | 2019-12-03T18:44:04 | 225,691,315 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class Post(models.Model):
post=models.CharField(max_length=20)
user=models.ForeignKey(User,on_delete=models.CASCADE)
created=models.DateTimeField(auto_now_add=True)
updated=models.DateTimeField(auto_now=True)
class Friend(models.Model):
users=models.ManyToManyField(User)
current_user=models.ForeignKey(User,related_name='owner',on_delete=models.CASCADE)
@classmethod
def make_friend(cls,current_user,new_friend):
friend, created=cls.objects.get_or_create(
current_user=current_user
)
friend.users.add(new_friend)
@classmethod
def remove_friend(cls, current_user, new_friend):
friend, created = cls.objects.get_or_create(
current_user=current_user
)
friend.users.remove(new_friend)
class Chat(models.Model):
from_user=models.ForeignKey(User,related_name='from_user',on_delete=models.CASCADE)
to_user=models.ForeignKey(User,related_name='to_user',on_delete=models.CASCADE)
message=models.CharField(max_length=200)
| UTF-8 | Python | false | false | 1,144 | py | 25 | models.py | 15 | 0.713287 | 0.708916 | 0 | 34 | 32.647059 | 87 |
KaltakhchyanD/socrat | 15,272,903,733,561 | 68825967e42c0a6158001cbd6b91ee8dba0e06ad | a5e6bc604193f4e84706ea54fd4775e08b34e676 | /create_db.py | 23a42c894f7a1f4605945b46c534f9888249f5ee | []
| no_license | https://github.com/KaltakhchyanD/socrat | 8787ed6d8716c88bb213f4969d983f2d2e390810 | a59cebcb4cfae1c331190c2bffdc163641b574a1 | refs/heads/master | 2022-11-11T12:44:37.484761 | 2020-06-07T14:49:00 | 2020-06-07T14:49:00 | 248,279,809 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import subprocess
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy_utils import database_exists, create_database
#from myapp import create_app, db
import myapp
try:
myapp.db.create_all(app = myapp.create_app())
except :#sqlalchemy.exc.OperationalError:
print("That fucking error again!")
create_database(myapp.Config.SQLALCHEMY_DATABASE_URI)
#subprocess.call(['createdb', f'{myapp.Config.db_name}'])
finally:
myapp.db.create_all(app = myapp.create_app())
| UTF-8 | Python | false | false | 495 | py | 30 | create_db.py | 22 | 0.735354 | 0.735354 | 0 | 17 | 27.941176 | 64 |
papertux/AWS_Scripts | 16,200,616,685,334 | 8488f4587513d00052a9334aa2f9ba5eedadc594 | 299fc2b5aab1e6c4ff6f158a8aa0438f79e3603c | /instance-scripts/terminate_instance.py | 3a6487b2b827be162789351a36ba60d32cb0a5af | []
| no_license | https://github.com/papertux/AWS_Scripts | 329aff0d1546784506474a687b9fafef79982630 | bf6415dea90e9b35b50aee3b1480d4f4c7ebdc90 | refs/heads/master | 2023-02-17T12:13:46.717180 | 2022-06-14T18:53:29 | 2022-06-14T18:53:29 | 168,874,013 | 1 | 1 | null | false | 2023-02-08T02:40:36 | 2019-02-02T20:22:42 | 2021-12-31T01:05:23 | 2023-02-08T02:40:35 | 1,444 | 1 | 1 | 5 | Python | false | false | from collections import defaultdict
import boto3
import time
region = 'us-east-1'
ec2 = boto3.resource('ec2', region_name=region)
ec2_filter = [{'Name': 'instance-state-name', 'Values': ['running']}]
ec2.instances.filter(Filters=ec2_filter).terminate()
instance_status = ec2.instances.filter(Filters=[{
'Name': 'instance-state-name',
'Values': ['running', 'stopping']}])
time.sleep(5)
ec2info = defaultdict()
for instance in instance_status:
ec2info[instance.id] = {
'Type': instance.instance_type,
'ID': instance.id,
'State': instance.state['Name'],
'Private IP': instance.private_ip_address,
}
attributes = ['Type', 'ID', 'State', 'Private IP']
for instance_id, instance in ec2info.items():
for key in attributes:
print("{0}: {1}".format(key, instance[key]))
print("-------------------------")
| UTF-8 | Python | false | false | 872 | py | 424 | terminate_instance.py | 209 | 0.626147 | 0.608945 | 0 | 32 | 26.25 | 69 |
rodrigocode4/estudo-python | 11,690,901,007,086 | 7471ce3b52cc619d5e818d235c8dee5632ac79bd | 4407e6eff1a32b3df8632cba1c1143922e3b0139 | /funcoes/unpacking_nomeado.py | fba17e045c05cbe97853e6a1653cd7a41d525cc6 | []
| no_license | https://github.com/rodrigocode4/estudo-python | 3cc69bba6261a5821398dc9010b5ab2748c5a94c | 01128e5e1a1d39d2835df51dfe8bbaf4a270aa51 | refs/heads/master | 2022-12-11T01:32:20.920048 | 2020-05-08T00:03:05 | 2020-05-08T00:03:05 | 209,614,844 | 1 | 0 | null | false | 2022-12-08T06:48:15 | 2019-09-19T17:47:07 | 2020-05-08T00:03:20 | 2022-12-08T06:48:15 | 322 | 0 | 0 | 2 | Python | false | false | # **kwargs
def resultado_f1(primeiro, segundo, terceiro):
print(f' 1º {primeiro}')
print(f' 2º {segundo}')
print(f' 3º {terceiro}')
if __name__ == '__main__':
podium = {
'segundo' : 'Rubinho Barriquelo',
'terceiro' : 'F. Massa',
'primeiro' : 'Airton Senna'
}
resultado_f1(**podium) | UTF-8 | Python | false | false | 335 | py | 56 | unpacking_nomeado.py | 54 | 0.542169 | 0.527108 | 0 | 15 | 21.2 | 46 |
1f3lse/p4yl0ad | 9,878,424,820,863 | c5b878be263fd2f9df49a2c658d361fa202b6aba | 0d9639a34a01056255cce41f7833defa72b70217 | /metasploit.py | b6d9baaf0316b9fd14a2d82a147645872f2de6b5 | []
| no_license | https://github.com/1f3lse/p4yl0ad | 60a4fad513e145430db92bd234130a935b81a06c | ab79132c71f0a8daea3f47d05de9860aeb9ae92f | refs/heads/master | 2023-03-17T04:38:59.906286 | 2020-05-24T13:19:33 | 2020-05-24T13:19:33 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/python
#-*- coding:utf8-*-
import os,sys,time
import socket
blanco = '\u001b[30m'
rojo = '\u001b[31m'
verde = '\u001b[32m'
amarillo = '\u001b[33m'
azul = '\u001b[34m'
magenta = '\u001b[35m'
cyan = '\u001b[36m'
blanco = '\u001b[37m'
def entrada1():
os.system("clear")
print (rojo+"Tu direccion ip es: ")
direccion()
host = input(blanco+"Direccion IP/host: ")
port = input("Puerto alojado(8080): ")
nombre = input("Nombre del archivo: ")
print (rojo+"Usaremos los siguientes párametros [msfvenom -p android/meterpreter/reverse_tcp LHOST="+ str(host) +" LPORT="+ str(port) +" R > "+ str(nombre) +".apk] ")
os.system("msfvenom -p android/meterpreter/reverse_tcp LHOST="+ str(host) +" LPORT="+ str(port) +" R > "+ str(nombre) +".apk")
os.system("msfconsole")
def direccion():
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(("8.8.8.8", 80))
print(cyan+s.getsockname()[0])
def entrada2():
os.system("clear")
print (verde+"Tu direccion ip es: ")
direccion()
host = input(blanco+"Direccion IP/host: ")
port = input("Puerto alojado(8080): ")
nombre = input("Nombre del archivo: ")
print (amarillo +"Usaremos los siguientes párametros [msfvenom -p windows/meterpreter/reverse_tcp LHOST="+ host +" LPORT="+ port +" R > "+ nombre +".exe] ")
os.system("msfvenom -p windows/meterpreter/reverse_tcp LHOST="+ host +" LPORT="+ port +" R > "+ nombre +".exe")
os.system("msfconsole")
def entrada3():
os.system("clear")
print (rojo+"""
___________$b__Vb.
___________’$b__V$b.
____________$$b__V$$b.
____________’$$b._V$$$$oooooooo._________..
_____________’$$P*_V$$$$$”"**$$$b.____.o$$P
______________”_.oooZ$$$$b..o$$$$$$$$$$$$C
______________.$$$$$$$$$$$$$$$$$$$$$$$$$$$b.
______________$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$
________.o$$$o$$$$$$$$P”"*$$$$$$$$$P”"”*$$$P
_______.$$$**$$$$P”q$C____”$$$b________.$$P
_______$$P___”$$$b__”$_._.$$$$$b.______*”
_______$$______$$$._____”***$$$$$$$b._A.
_______V$b___._Z$$b.__._______”*$$$$$b$$:
________V$$.__”*$$$b.__b._________”$$$$$
_________”$$b_____”*$.__*b._________”$$$b
___________”$$b._____”L__”$$o.________”*”_____.ooo..
_____________”*$$o.________”*$$o.__________.o$$$$$
_________________”*$$b._______”$$b._______.$$$$$*”
____________________”*$$o.______”$$$o.____$$$$$’
_______________________”$$o_______”$$$b.__”$$$$__
_________________________”$b.______”$$$$b._”$$$$$
________________________._”$$_______”$$$$b__”$$$$
_________________________L.”$.______.$$$$$.__
""")
print (amarillo+"solo para Android 7 o superior, teniendo en cuenta un espacion de 500mb minimo")
ing = str(input(verde+"Empezar: y/n "+blanco))
if ing == "y" or ing == "Y":
os.system("clear")
print (rojo+"[!]Instalando unstable-repo"+verde)
time.sleep(0.3)
os.system("pkg install unstable-repo")
time.sleep(0.2)
os.system("clear")
print (rojo+"[!]Instalando METASPLOIT"+verde)
time.sleep(0.3)
os.system("pkg install metasploit")
time.sleep(0.2)
print("[!]Saliendo")
time.sleep(0.6)
exit()
elif ing == "n" or ing == "N":
os.system("clear")
print (verde+"[!]Regresando..")
time.sleep(0.2)
os.system("clear")
print (rojo+"[!]Regresando..")
time.sleep(0.2)
os.system("clear")
print (amarillo+"[!]Regresando..")
time.sleep(0.2)
os.system("clear")
print (rojo+"[!]Regresando..")
menu()
else:
os.system("clear")
print("Entrada Incorrecta")
time.sleep(0.3)
entrada3()
def entrada4():
print(rojo+"Actualizando metasploit")
os.system("sudo apt install metasploit-framework")
print(verde+"Sucefull update!")
time.sleep(0.5)
menu()
def entrada5():
os.system("google-chrome https://facebook.com/d4rksit3")
#os.system("google-chrome https://facebook.com/d4rksit3")
def menu():
time.sleep(1.2)
os.system("clear")
print(rojo+str(" _ _ _ _ "))
print(str(rojo+" | | | | (_) | "))
print(str(verde+" _ __ ___ ___| |_ __ _ ___ _ __ | | ___ _| |_ "))
print(str(amarillo+"| '_ ` _ \ / _ \ __/ _` / __| '_ \| |/ _ \| | __|"))
print(str(rojo+"| | | | | | __/ || (_| \__ \ |_) | | (_) | | |_ "))
print(str(verde+"|_| |_| |_|\___|\__\__,_|___/ .__/|_|\___/|_|\__|"))
print(str(verde+" | | D4rksit3 "))
print(str(blanco+" |_| "+blanco))
time.sleep(0.2)
print ("[1]- Payload Android")
print ("[2]- Payload Pc (Windows) ")
print ("[3]- Instalar en termux")
print ("[4]- Actualizar Metasploit (Solo Debian)")
print ("[5]- Autor")
print ("[6]- Salir"+verde)
inp = input("DarkGhost-$ "+blanco)
if inp == "1":
entrada1()
elif inp == "2":
entrada2()
elif inp == "3":
entrada3()
elif inp == "4":
entrada4()
elif inp == "5":
entrada5()
elif inp == "6":
os.system("clear")
print ("[!]Saliendo")
time.sleep(0.5)
os.system("clear")
exit()
else:
os.system("clear")
time.sleep(0.2)
print (rojo+"[!] Ingresaste un dato incorrecto")
time.sleep(2)
os.system("clear")
print ("[!]Restaurando")
time.sleep(2)
menu()
menu() | UTF-8 | Python | false | false | 5,162 | py | 3 | metasploit.py | 1 | 0.478912 | 0.455459 | 0 | 198 | 24.631313 | 167 |
valentinelsra/tuto | 14,620,068,702,171 | 7c3ea08ddc238f04e0b01648bc42a3c80fa79b4d | 1eeddcfc302763a54cf9e12c5d032abb09a2ba79 | /Tutorat1/exercice2.py | 54b496a993d28cb5eef50bf0b3fbe7a9c8843823 | []
| no_license | https://github.com/valentinelsra/tuto | 8c53d9f0a6f1484b9a9ebb8e75dfe25673e6a31c | efea16369c25587cd7dfc189b1efa8116078ead5 | refs/heads/master | 2020-08-07T11:32:11.583842 | 2019-10-07T17:26:32 | 2019-10-07T17:26:32 | 213,433,421 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #grille=[]
#for i in range (10):
#grille.append('o')
#print(grille)
#print(grille)
#print(grille[0])
grille = ["O"]
for i in range (10):
print grille[0][0]
| UTF-8 | Python | false | false | 172 | py | 3 | exercice2.py | 2 | 0.575581 | 0.534884 | 0 | 13 | 12.230769 | 23 |
fnadalt/mundo | 19,645,180,430,063 | 1111b7e1aef2fec6a1ae2180b811c9a9b55bde4e | d23176a4c1abe01c8125e7481141c76df0f810ac | /02/persona.py | 5b90d1a90a07f52a910a3e6962e2758caf1747d4 | []
| no_license | https://github.com/fnadalt/mundo | 5e56e823fa8aeec570558295d473ebe3d27cebe8 | 83c2fb24d47e67f2b268aeb41aa81df0673f8258 | refs/heads/master | 2020-12-02T12:46:40.587469 | 2018-07-15T19:44:48 | 2018-07-15T19:44:48 | 96,592,910 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # -*- coding: utf-8 -*-
from direct.showbase.DirectObject import DirectObject
from direct.actor.Actor import Actor
from panda3d.core import *
from proveedores import Topografia
# log
import logging
log = logging.getLogger(__name__)
class Persona(DirectObject):
def __init__(self, contexto, _input):
# referencias
self.contexto = contexto
self.input = _input
self.topografia = None
# componentes
self.actor = None
def iniciar(self):
log.info("iniciar")
b = self.contexto.base
# referencias
self.topografia = self.contexto.obtener_proveedor(Topografia.Nombre)
# actor
self.actor = Actor("modelos/hombre/male.egg")
self.actor.reparentTo(b.render)
# tasks
self.contexto.base.taskMgr.add(self._update, "Persona_update")
#
return True
def terminar(self):
log.info("terminar")
# tasks
self.contexto.base.taskMgr.remove("Persona_update")
# referencias
self.input = None
self.topografia = None
# actor
if self.actor:
self.actor.delete()
self.actor = None
def _update(self, task):
pos2d = Vec2(self.actor.getX(), self.actor.getY())
self.actor.setZ(self.topografia.obtener_altitud(pos2d))
return task.cont
| UTF-8 | Python | false | false | 1,368 | py | 100 | persona.py | 70 | 0.608918 | 0.605263 | 0 | 52 | 25.307692 | 76 |
yaoey/speedclone | 1,133,871,395,088 | 24ab5246385db4443fdbd4c622a688b55434000b | 0655b0229e0069453c22f59150cb25fa7dec1e83 | /main.py | f253adf8ebeccdde08c927fbd32fb805b1cf997e | []
| no_license | https://github.com/yaoey/speedclone | e10ae45eac619feb2b4b90500227eb97900332be | f12a1a8a36c18e0c5c68518d86ed560f589a6b3b | refs/heads/master | 2022-07-30T13:53:32.339398 | 2020-05-20T15:42:09 | 2020-05-20T15:42:09 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import importlib
from speedclone.args import parse_args
from speedclone.manager import TransferManager
TRANSFERS_BASE_IMPORT_PATH = "speedclone.transfers."
BARS_BASE_IMPORT_PATH = "speedclone.bar."
def handle_rest(s):
a = s.split(":/")
return a.pop(0), ":/".join(a)
def main():
args, rest, config, transfers, bars = parse_args()
f, t = rest
f_name, f_path = handle_rest(f)
t_name, t_path = handle_rest(t)
f_conf = config.get(f_name)
t_conf = config.get(t_name)
f_trans = transfers.get(f_conf.get("transfer"))
t_trans = transfers.get(t_conf.get("transfer"))
from_transfer = getattr(
importlib.import_module(TRANSFERS_BASE_IMPORT_PATH + f_trans.get("mod")),
f_trans.get("cls"),
).get_transfer(f_conf, f_path)
to_transfer = getattr(
importlib.import_module(TRANSFERS_BASE_IMPORT_PATH + t_trans.get("mod")),
t_trans.get("cls"),
).get_transfer(t_conf, t_path)
bar = bars.get(args.bar)
bar_manager = getattr(
importlib.import_module(BARS_BASE_IMPORT_PATH + bar.get("mod")), bar.get("cls"),
).get_bar_manager()
transfer_manager = TransferManager(
download_manager=from_transfer,
upload_manager=to_transfer,
bar_manager=bar_manager,
sleep_time=args.sleep_time,
)
transfer_manager.run(max_workers=args.workers)
if __name__ == "__main__":
main()
| UTF-8 | Python | false | false | 1,407 | py | 7 | main.py | 6 | 0.632552 | 0.631841 | 0 | 53 | 25.54717 | 88 |
newxjy/bdd-driveratt | 9,818,295,257,182 | 9a1f7d132db52512925bc48b38b293a37c00efd1 | 73e069fd71eb48f7cab4cc43cb6b909cb0e31a58 | /eye_tracking/analysis/code/CALIBRATION.py | 5eea36949800aab3ccd9d708c99973cc62e9bb9d | []
| no_license | https://github.com/newxjy/bdd-driveratt | 7678a48de6bfbabdc1c291f87125759d0f25620d | 22d3acbee3c12bf5decb30eef495064d2a544584 | refs/heads/master | 2020-12-09T00:41:13.872279 | 2019-11-15T02:59:30 | 2019-11-15T02:59:30 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Jun 13 13:11:40 2018
@author: behinger
"""
import functions.add_path
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import functions.et_preprocess as preprocess
import functions.pl_surface as pl_surface
from functions import et_import
from shared_modules import accuracy_visualizer
from functions.pl_recalib import pl_recalibV2
import logging
def find_closest_gridstart(elcaliberror, gridstart):
# because we often recalibrate multiple times, we have to look for the most recent one. The most recent one of each block is the one, that is closest to the grid start trigger
out = pd.DataFrame()
for t in gridstart:
minix = np.argmin(np.abs(t - elcaliberror.msg_time))
out = pd.concat([out, elcaliberror.iloc[minix]], axis=1)
return (out.T)
def el_accuracy(subject):
samp, evt, elnotes = et_import.raw_el_data(subject, datapath='/net/store/nbp/projects/etcomp/')
ix = elnotes["trialid "].str.find("!CAL VALIDATION HV13 ")
elcaliberror = elnotes[ix == 0]
elcaliberror = elcaliberror.join(elcaliberror["trialid "].str.extract("ERROR ([\d.]*) avg. ([\d.]*)"))
elcaliberror = elcaliberror.rename(columns={0: "avg", 1: "max", "trialid_time": "msg_time"})
elcaliberror = elcaliberror.assign(subject=subject, eyetracker='el')
elcaliberror.loc[:, 'msg_time'] = elcaliberror.loc[:, 'msg_time'] / 1000
elcaliberror = elcaliberror.drop(axis=1, labels=["py_trial_marker", "trialid "])
elcaliberror = elcaliberror.reset_index()
# get trialstart times
gridstart = elnotes.loc[elnotes["trialid "].str.find("Instruction for LARGEGG start") == 0, 'trialid_time'] / 1000
elcaliberror = find_closest_gridstart(elcaliberror, gridstart)
return (elcaliberror)
def pl_accuracy(subject):
logger = logging.getLogger(__name__)
logger.info('loading subject %s' % (subject))
pldata = et_import.raw_pl_data(subject=subject) # basically just importing all of the raw data
# stored from now on on pldata
# get calibration and accuracy data
# this next line finds the pupil data where the message was sent of starting calibration.
# this is why it's important to send the correct messages, now we can trim that data easily.
data = [n for n in pldata['notifications'] if
n['subject'] in ['calibration.calibration_data', 'accuracy_test.data']]
notes = [n['subject'] for n in data] # not sure why we need now to trim by subject?
# where are accuracy tests?
ix_acc = np.where(np.asarray(notes) == 'accuracy_test.data')[0]
# there is an accuracy test after every calibration, so if we find the timestamp when the accuracy test was done,
# we just need then to subtract one and we'll find the calibration.
# give me the calibrations immediately before
ix_cal = ix_acc - 1
# if calib test was started multiple times, remove them
ix_cal = ix_cal[np.diff(np.append(-1, ix_acc)) != 1]
ix_acc = ix_acc[np.diff(np.append(ix_acc, -1)) != 1]
logger.info("found %i calibrations" % (ix_cal.shape))
fake_gpool = pl_surface.fake_gpool_surface(folder='/net/store/nbp/projects/etcomp/%s/raw' % (subject))
class fake_accuracy(accuracy_visualizer.Accuracy_Visualizer):
def __init__(self):
# self.outlier_threshold = 5
self.succession_threshold = np.cos(np.deg2rad(.5))
self._outlier_threshold = 5
combined = [(data[c], data[a]) for c, a in
zip(ix_cal, ix_acc)] # extract the data for accuracy test and calibration
accu = [] # stores accuracy of the calibration
prec = [] # no idea of what this does
time = [] # gets timestamp of the gaze position
# combined = combined[6:9]
tmp = fake_accuracy()
for cal, acc in combined:
# here comes the actual calibration!:
gaze_pos = pl_recalibV2(cal['pupil_list'], cal['ref_list'], acc['pupil_list'], calibration_mode='2d',
eyeID=None)
results = tmp.calc_acc_prec_errlines(gaze_pos, acc['ref_list'], fake_gpool.capture.intrinsics)
accu.append(results[0].result)
prec.append(results[1].result)
time.append(cal['pupil_list'][0]['timestamp'])
plcaliberror = pd.DataFrame({"avg": accu, "msg_time": time, 'subject': subject, 'eyetracker': 'pl'})
# get trial start notifications
# but some subject do not have accuracy messages (no idea why!!, pupil bug?)
if plcaliberror.shape[0] > 0:
gridstart = [n['recent_frame_timestamp'] for n in pldata['notifications'] if
'label' in n.keys() and type(n['label'] == str) and len(n['label']) > 0 and n['label'].find(
"Instruction for LARGEGG start") == 0]
plcaliberror = find_closest_gridstart(plcaliberror, gridstart)
return (plcaliberror)
| UTF-8 | Python | false | false | 4,912 | py | 20 | CALIBRATION.py | 11 | 0.663884 | 0.653298 | 0 | 119 | 40.277311 | 179 |
longhao54/leetcode | 412,316,894,138 | 1839486d04c43d77c217214050c4567b4f6758b6 | f11600b9a256bf6a2b584d127faddc27a0f0b474 | /normal/1209.py | 1a1d434fe16ac98ba1e2549caf76849307bff7b5 | []
| no_license | https://github.com/longhao54/leetcode | 9c1f0ce4ca505ec33640dd9b334bae906acd2db5 | d156c6a13c89727f80ed6244cae40574395ecf34 | refs/heads/master | 2022-10-24T07:40:47.242861 | 2022-10-20T08:50:52 | 2022-10-20T08:50:52 | 196,952,603 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # 贴边通过
class Solution:
def removeDuplicates(self, s: str, k: int) -> str:
ans = []
lenth = 0
strs = "abcdefghijklmnopqrstuvwxyz"
for i in strs:
t = i*k
while t in s:
s = s.replace(t,"")
for i in s:
ans.append(i)
lenth += 1
while lenth >= k:
c = True
if len(set(ans[-1:-k-1:-1])) == 1:
ans = ans[0:-k]
lenth -= k
else:
break
return "".join(ans)
# 利用栈
class Solution:
def removeDuplicates(self, s: str, k: int) -> str:
ans = ""
count = []
strs = "abcdefghijklmnopqrstuvwxyz"
for i in strs:
t = i*k
while t in s:
s = s.replace(t,"")
last = ""
lenth = 0
for i in s:
ans += i
lenth += 1
if i != last:
count.append(1)
else:
count[-1] += 1
if count[-1] == k:
count.pop()
ans = ans[0:lenth-k]
lenth -= k
last = ans[-1] if ans else ""
return ans
| UTF-8 | Python | false | false | 1,274 | py | 548 | 1209.py | 547 | 0.35873 | 0.346825 | 0 | 48 | 25.0625 | 54 |
HungMingWu/CppCloud | 9,921,374,498,864 | 0ea5d4adc10e1ed718328213ad6d487a25ff0ce0 | 5eeed7d553e82439561685ff03d207345813c1e9 | /python_sdk/cppcloud/tcpcli.py | cb7e13059372f6e4147b7dddbc05c61574b53835 | [
"Apache-2.0"
]
| permissive | https://github.com/HungMingWu/CppCloud | 1ab9594c6dd43daf34d6d05d1df7b4555cb7a5de | e3ae799d3b137351311a1901fed516942b2e2bc8 | refs/heads/master | 2023-01-28T17:50:36.277101 | 2023-01-14T08:59:31 | 2023-01-15T07:38:16 | 174,890,286 | 0 | 0 | null | true | 2019-03-10T22:44:55 | 2019-03-10T22:44:54 | 2019-03-01T01:25:07 | 2019-02-26T06:53:55 | 2,247 | 0 | 0 | 0 | null | false | null | #! /usr/bin/pytdon
# -*- coding:utf-8 -*-
'''
与cppcloud_serv通信模块,封tcp报文收发
用法比较简单,可参见文件末的__main__处.
'''
import os
import sys
import time
import socket
import struct
import json
from .const import CMD_WHOAMI_REQ,CMD_WHOAMI_RSP,CMD_GETCLI_REQ
#from cliconfig import config as cfg
g_version = 1
g_headlen = 10
def Connect(host, port):
return TcpCliBase.Connect(host, port)
def Recv(clisock, tomap):
return TcpCliBase.Recv(clisock, tomap)
def Send(clisock, cmdid, seqid, body):
return TcpCliBase.Send(clisock, cmdid, seqid, body)
def TellExit():
TcpCliBase.TellExit()
class TcpCliBase(object):
exit_flag = 0
def __init__(self, svraddr):
self.svraddr = svraddr
self.cliIp = ''
self.cli = None
self.svrid = 0 # 这个是服务端为本连接分配置的ID, 由whoami请求获得
self.clitype = 200 # 默认普通py应用
self.step = 0
self.svrname = "unknow"
self.desc = ""
self.tag = ""
self.aliasname = ""
# 统计信息
self.send_bytes = 0
self.recv_bytes = 0
self.send_pkgn = 0
self.recv_pkgn = 0
@staticmethod
def Connect(host, port):
ret = None
try:
port = int(port)
cli = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
cli.connect((host, port))
ret = cli
except socket.error as e:
print(('connect to %s:%d fail: %s'%(host, port, e)))
cli.close()
return ret
# return 大于0正常; 0失败
def checkConn(self):
if self.step <= 0:
clisock = TcpCliBase.Connect(self.svraddr[0], self.svraddr[1])
if not clisock: return 0
self.cli = clisock
print(('connected tcp to ' + str(self.svraddr) ))
self.cliIp = self.cli.getsockname()[0]
self.step = 1
self.tell_whoami()
return self.step
@staticmethod
def TellExit():
TcpCliBase.exit_flag = 1
# 发送包,可对外提供接口
@staticmethod
def Send(clisock, cmdid, seqid, body):
if isinstance(body, dict) or isinstance(body, list):
body = json.dumps(body)
bodylen = len(body)
head = struct.pack("!bbIHH", g_version,
g_headlen, bodylen, cmdid, seqid)
head += body.encode('utf-8')
try:
# print('sndMsg bodylen=',bodylen)
retsnd = clisock.send(head)
return retsnd
except socket.error as e:
print(('except happen ', e))
return 0
# return 大于0成功,0失败
def sndMsg(self, cmdid, seqid, body):
if self.checkConn() <= 0:
return 0
retsnd = self.Send(self.cli, cmdid, seqid, body)
if retsnd <= 0:
self.cli.close()
self.cli = None
else:
self.send_pkgn += 1
self.send_bytes += retsnd
return retsnd
@staticmethod
def Recv(clisock, tomap):
recvBytes = 0;
try:
# 接收头部
headstr = b''
while len(headstr) < g_headlen: # 在慢网环境
recvmsg = clisock.recv(g_headlen - len(headstr))
if TcpCliBase.exit_flag:
return -1,0, 0,0,'programe exit'
headstr += recvmsg
# 当收到空时,可能tcp连接已断开
if len(recvmsg) == 0:
return -10,0, 0,0,'recv 0 peerclose'
else:
recvBytes += len(recvmsg)
ver, headlen, bodylen, cmdid, seqid = struct.unpack("!bbIHH", headstr)
if ver != g_version or headlen != g_headlen or bodylen > 1024*1024*5:
print(("Recv Large Package| ver=%d headlen=%d bodylen=%d cmdid=0x%X"%(
ver, headlen, bodylen, cmdid)))
body = b''
while len(body) < bodylen:
body += clisock.recv(bodylen)
if TcpCliBase.exit_flag:
return -1,0, 0,0,'programe exit'
recvBytes += len(body)
body = body.decode('utf-8')
except socket.error as e:
print(('except happen ', e))
return -11,0, 0, 0, ('sockerr %s' % e)
if (tomap):
body = json.loads(body)
return 0,recvBytes, cmdid,seqid,body
# 失败第1个参数是0;
def rcvMsg(self, tomap=True):
if self.step <= 0:
self.checkConn()
return 0, 0, ''
result,rbytes, cmdid,seqid,body = self.Recv(self.cli, tomap)
if 0 == result:
self.recv_pkgn += 1
self.recv_bytes += rbytes
else:
self.close()
cmdid = 0
return cmdid,seqid,body
def close(self):
if self.cli:
self.cli.close()
self.cli = None
self.step = 0 # closed
def shutdownWrite(self):
if self.cli: self.cli.shutdown(socket.SHUT_WR)
# return 大于0正常
def tell_whoami(self):
ret = self.sndMsg(*self.whoami_str())
if ret > 0:
ret, seqid, rsp = self.rcvMsg()
if ret == CMD_WHOAMI_RSP:
self.svrid = rsp["svrid"]
print(('svrid setto %d'%self.svrid))
return ret
return -1
def whoami_str(self, seqid=1):
hhh, ppp = self.cli.getsockname()
shellstr = ' '.join(sys.argv)
shellstr = shellstr.replace('\\', '/')
rqbody = {
"localsock": hhh+":"+str(ppp),
"svrid": self.svrid,
"pid": os.getpid(),
"svrname": self.svrname,
"desc": self.desc,
"clitype": self.clitype,
"tag": self.tag,
"aliasname": self.aliasname,
"begin_time": int(time.time()),
"shell": shellstr
}
return CMD_WHOAMI_REQ, seqid, rqbody
if __name__ == '__main__':
scomm_sevr_addr = ("192.168.228.44", 4800)
cliobj = TcpCliBase(scomm_sevr_addr, 20)
sndret = cliobj.sndMsg(CMD_GETCLI_REQ, 1, {})
print(("sndret=%d" % sndret))
rspcmd, seqid, rspmsg = cliobj.rcvMsg(False)
print(("response cmd=0x%X" % rspcmd))
print(rspmsg)
cliobj.close()
print("main exit 0")
| UTF-8 | Python | false | false | 6,683 | py | 193 | tcpcli.py | 154 | 0.496983 | 0.481974 | 0 | 221 | 27.226244 | 86 |
sanchitkalra/Classes | 12,876,311,978,186 | 9f224313ce224d949cdadd09050dffba60d78720 | 9dc081c0568b8098d729e1ad070b6720f34f7a6b | /arrays_and_lists/assignment_intersection.py | 17bf66c7d60e835938e65c7247ac81817aa0f635 | []
| no_license | https://github.com/sanchitkalra/Classes | a8a2b59365ea8d1b8d215fd95258e735ba49a240 | a9a88a117ae034dadc791d0cc1f2c370cb27b144 | refs/heads/master | 2023-05-26T08:19:38.396196 | 2021-06-13T14:48:34 | 2021-06-13T14:48:34 | 296,529,382 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import sys
# alternative solution:
# def intersections(arr1, n, arr2, m) :
# for i in range(n):
# for j in range(m):
# if arr1[i]==arr2[j]:
# print(arr1[i],end=' ')
# arr2[j]=sys.maxsize
# break
def intersections(arr1, n, arr2, m) :
intersection = []
for k in arr1:
for j in arr2:
if k == j:
arr2.remove(j)
intersection.append(j)
break
x = ""
for k in intersection:
x += str(k) + " "
print(x)
return x
#Taking Input Using Fast I/O
def takeInput() :
n = int(sys.stdin.readline().strip())
if n == 0:
return list(), 0
arr = list(map(int, sys.stdin.readline().strip().split(" ")))
return arr, n
#main
t = int(sys.stdin.readline().strip())
while t > 0 :
arr1, n = takeInput()
arr2, m = takeInput()
intersections(arr1, n, arr2, m)
print()
t -= 1
#---------------------------------------------------
# logical testing
#---------------------------------------------------
# arr = [
# [6, 9, 8, 5],
# [9, 2, 4, 1, 8]
# ]
# # arr = [
# # [2, 6, 1, 2],
# # [1, 2, 3, 4, 2]
# # ]
# temp1 = []
# temp2 = []
# intersection = []
# for k in arr[0]:
# for j in arr[1]:
# if k == j:
# arr[1].remove(j)
# intersection.append(j)
# print(intersection)
#-------------------------------------------------------------------------
# everything below this line are experiments, some do not work as intended
#-------------------------------------------------------------------------
# for k in range(len(arr[1])):
# for j in arr[0]:
# try:
# if arr[1][k] == j:
# print("curr itr: k = {} and j = {}".format(k, j))
# print("arr before operations")
# print(arr[1])
# print(arr[0])
# intersection.append(j)
# arr[1].remove(j)
# arr[0].remove(j)
# print("arr after operations")
# print(arr[1])
# print(arr[0])
# print("-----------------------")
# print("intersection is")
# print(intersection)
# print("")
# except:
# print("index error occured")
# if len(arr[0]) <= len(arr[1]:
# for i in range(len(arr[0])):
# for k in arr[1]:
# if k == arr[0][i]:
# arr[0].remove(k)
# arr[1].remove(k)
# print(arr[0])
# print(arr[1])
# intersection.append(k)
# else:
# for i in range(len(arr[1])):
# for k in arr[0]:
# if k == arr[1][i]:
# intersection.append(k)
# for i in range(len(arr[0])):
# for k in arr[1]:
# if k == arr[0][i]:
# intersection.append(k)
# print(intersection) | UTF-8 | Python | false | false | 2,959 | py | 127 | assignment_intersection.py | 126 | 0.395066 | 0.372761 | 0 | 120 | 23.666667 | 74 |
VitorEmanuelDev/INF032D | 11,811,160,084,585 | 2e268cd48ec2c4b9dd4c75e66adb4b3110ec1d9f | 364eb8459e315dd50f91ce449856dc952ea1e81e | /Lista002/02_media_ponderada.py | 73848ea71a5d11f0da1d30787d8d5a992898a619 | []
| no_license | https://github.com/VitorEmanuelDev/INF032D | 98cf1f3038f544dd46ed0c36ff9016a3c49a7e1b | 976bdc9f7e742b1f4e31ebf0d40848b9cbc54844 | refs/heads/main | 2023-02-05T17:15:24.807520 | 2020-12-22T15:21:21 | 2020-12-22T15:21:21 | 316,109,533 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #2. Entrar com quatro números e imprimir a media ponderada, sabendo-se que os pesos são respectivamente 1,2,3,4.
print("Informe as quatro notas:")
nota_01 = float(input())
nota_02 = float(input())
nota_03 = float(input())
nota_04 = float(input())
media_ponderada = (nota_01 + (nota_02 * 2) + (nota_03 * 3) + (nota_04 * 4)) / 10
print("Média ponderada:", media_ponderada) | UTF-8 | Python | false | false | 376 | py | 61 | 02_media_ponderada.py | 60 | 0.678284 | 0.608579 | 0 | 11 | 33 | 112 |
thuvaragak/RRT | 6,631,429,506,454 | 629817b21528f044fc1c4b04676e9e6fa93dca9e | 1121cfb68c86f4d1f67805d4ddf0c969ce77017e | /obstacles.py | e433b366681201fd2494eecf483d49ab3ba31c32 | []
| no_license | https://github.com/thuvaragak/RRT | 3f7911f456b43db1dd1cb6b739643287050a2353 | 67f8078a090bc860557d6581c00883b976d967a3 | refs/heads/main | 2023-08-23T09:48:48.206687 | 2021-10-19T09:36:08 | 2021-10-19T09:36:08 | 418,855,962 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import numpy as np
import cv2
# Create a black image
img = np.zeros((500,500,3), np.uint8)
img.fill(255)
#Diagonal Blue line
#img = cv2.line(img,(0,0),(511,511),(255,0,0),5)
#Cicle
img = cv2.circle(img,(150,200),10,(0,0,255),-1)
#Ellipse
#cv2.ellipse(image, centerpoint, axes, angle, startAngle, endAngle, color,thickness)
#img = cv2.ellipse(img,(400,450),(50,25),0,0,180,(0,255,0),-1)
#Polyline1
pts = np.array([[200,100],[270,160],[230,140],[270,50]], np.int32)
pts = pts.reshape((-1,1,2))
#img = cv2.polylines(img,[pts],True,(0,0,0),)
cv2.rectangle(img, (60, 80), (90, 120), (255,0,0), 2)
cv2.rectangle(img, (100, 300), (150, 350), (255,0,0), 2)
cv2.rectangle(img, (300, 400), (350, 450), (255,0,0), 2)
cv2.rectangle(img, (150, 100), (180, 150), (255,0,0), 2)
cv2.imwrite('img.png', img)
'''
cv2.imshow('img',img)
cv2.waitKey(0)
cv2.destroyAllWindows()
''' | UTF-8 | Python | false | false | 874 | py | 4 | obstacles.py | 3 | 0.629291 | 0.427918 | 0 | 39 | 21.435897 | 86 |
micro1964/streamnow | 8,392,366,126,759 | 93ee22f2e96e7551f7840e3d3d8bd759fbb956fd | bd226f79c8ca112fefbbd0e9525bdc7f9167cf71 | /usr/share/apport/testsuite/test_python_crashes.py | 51b6f291aaa3026843d05a5aae96632db3de801c | []
| no_license | https://github.com/micro1964/streamnow | e53f1a1b33ad0386978e6c17c878140e9e945d93 | ba2a4f1a5293d91b0279339c3722aecbdc08ca4c | refs/heads/master | 2022-11-30T17:34:48.175628 | 2019-03-30T22:05:48 | 2019-03-30T22:05:48 | 178,615,166 | 0 | 2 | null | false | 2022-11-25T07:46:15 | 2019-03-30T21:52:51 | 2019-03-30T22:13:09 | 2019-03-30T22:11:10 | 522,107 | 0 | 1 | 1 | C | false | false | # Test apport_python_hook.py
#
# Copyright (c) 2006 - 2011 Canonical Ltd.
# Authors: Robert Collins <robert@ubuntu.com>
# Martin Pitt <martin.pitt@ubuntu.com>
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 2 of the License, or (at your
# option) any later version. See http://www.gnu.org/copyleft/gpl.html for
# the full text of the license.
import unittest, tempfile, subprocess, os, stat, shutil, atexit
import dbus
temp_report_dir = tempfile.mkdtemp()
os.environ['APPORT_REPORT_DIR'] = temp_report_dir
atexit.register(shutil.rmtree, temp_report_dir)
import apport.fileutils
import apport.report
class T(unittest.TestCase):
def tearDown(self):
for f in apport.fileutils.get_all_reports():
os.unlink(f)
def _test_crash(self, extracode='', scriptname=None):
'''Create a test crash.'''
# put the script into /var/tmp, since that isn't ignored in the
# hook
if scriptname:
script = scriptname
fd = os.open(scriptname, os.O_CREAT | os.O_WRONLY)
else:
(fd, script) = tempfile.mkstemp(dir='/var/tmp')
self.addCleanup(os.unlink, script)
os.write(fd, ('''#!/usr/bin/env %s
import apport_python_hook
apport_python_hook.install()
def func(x):
raise Exception(b'This should happen. \\xe2\\x99\\xa5'.decode('UTF-8'))
%s
func(42)
''' % (os.getenv('PYTHON', 'python3'), extracode)).encode())
os.close(fd)
os.chmod(script, 0o755)
env = os.environ.copy()
env['PYTHONPATH'] = '.:/my/bogus/path'
p = subprocess.Popen([script, 'testarg1', 'testarg2'],
stderr=subprocess.PIPE, env=env)
err = p.communicate()[1].decode()
self.assertEqual(p.returncode, 1,
'crashing test python program exits with failure code')
if not extracode:
self.assertTrue('This should happen.' in err, err)
self.assertFalse('IOError' in err, err)
return script
def test_general(self):
'''general operation of the Python crash hook.'''
script = self._test_crash()
# did we get a report?
reports = apport.fileutils.get_new_reports()
pr = None
self.assertEqual(len(reports), 1, 'crashed Python program produced a report')
self.assertEqual(stat.S_IMODE(os.stat(reports[0]).st_mode),
0o640, 'report has correct permissions')
pr = apport.report.Report()
with open(reports[0], 'rb') as f:
pr.load(f)
# check report contents
expected_keys = ['InterpreterPath', 'PythonArgs', 'Traceback',
'ProblemType', 'ProcEnviron', 'ProcStatus',
'ProcCmdline', 'Date', 'ExecutablePath', 'ProcMaps',
'UserGroups']
self.assertTrue(set(expected_keys).issubset(set(pr.keys())),
'report has necessary fields')
self.assertTrue('bin/python' in pr['InterpreterPath'])
self.assertEqual(pr['ExecutablePath'], script)
self.assertEqual(pr['ExecutableTimestamp'],
str(int(os.stat(script).st_mtime)))
self.assertEqual(pr['PythonArgs'], "['%s', 'testarg1', 'testarg2']" % script)
self.assertTrue(pr['Traceback'].startswith('Traceback'))
self.assertTrue("func\n raise Exception(b'This should happen." in
pr['Traceback'], pr['Traceback'])
def test_existing(self):
'''Python crash hook overwrites seen existing files.'''
script = self._test_crash()
# did we get a report?
reports = apport.fileutils.get_new_reports()
self.assertEqual(len(reports), 1, 'crashed Python program produced a report')
self.assertEqual(stat.S_IMODE(os.stat(reports[0]).st_mode),
0o640, 'report has correct permissions')
# touch report -> "seen" case
apport.fileutils.mark_report_seen(reports[0])
reports = apport.fileutils.get_new_reports()
self.assertEqual(len(reports), 0)
script = self._test_crash(scriptname=script)
reports = apport.fileutils.get_new_reports()
self.assertEqual(len(reports), 1)
# "unseen" case
script = self._test_crash(scriptname=script)
reports = apport.fileutils.get_new_reports()
self.assertEqual(len(reports), 1)
def test_symlink(self):
'''Python crash of a symlinked program resolves to target'''
script = self._test_crash()
# load report for this
reports = apport.fileutils.get_new_reports()
self.assertEqual(len(reports), 1, 'crashed Python program produced a report')
pr1 = apport.Report()
with open(reports[0], 'rb') as f:
pr1.load(f)
for f in apport.fileutils.get_all_reports():
os.unlink(f)
script_link = os.path.join(os.path.dirname(script), 'script-link')
os.symlink(os.path.basename(script), script_link)
self.addCleanup(os.unlink, script_link)
# run script through symlink name
p = subprocess.Popen([script_link], stderr=subprocess.PIPE)
err = p.communicate()[1].decode()
self.assertEqual(p.returncode, 1,
'crashing test python program exits with failure code')
self.assertTrue('This should happen.' in err, err)
# get report for symlinked crash
reports = apport.fileutils.get_new_reports()
self.assertEqual(len(reports), 1, 'crashed Python program produced a report')
pr2 = apport.Report()
with open(reports[0], 'rb') as f:
pr2.load(f)
# check report contents
self.assertTrue('bin/python' in pr2['InterpreterPath'])
self.assertEqual(pr1['ExecutablePath'], script)
self.assertEqual(pr2['ExecutablePath'], script)
self.assertEqual(pr1.crash_signature(), pr2.crash_signature())
def test_no_argv(self):
'''with zapped sys.argv.'''
self._test_crash('import sys\nsys.argv = None')
# did we get a report?
reports = apport.fileutils.get_new_reports()
pr = None
self.assertEqual(len(reports), 1, 'crashed Python program produced a report')
self.assertEqual(stat.S_IMODE(os.stat(reports[0]).st_mode),
0o640, 'report has correct permissions')
pr = apport.report.Report()
with open(reports[0], 'rb') as f:
pr.load(f)
# check report contents
expected_keys = ['InterpreterPath', 'Traceback', 'ProblemType',
'ProcEnviron', 'ProcStatus', 'ProcCmdline', 'Date',
'ExecutablePath', 'ProcMaps', 'UserGroups']
self.assertTrue(set(expected_keys).issubset(set(pr.keys())),
'report has necessary fields')
self.assertTrue('bin/python' in pr['InterpreterPath'])
# we have no actual executable, so we should fall back to the
# interpreter
self.assertEqual(pr['ExecutablePath'], pr['InterpreterPath'])
if 'ExecutableTimestamp' in pr:
self.assertEqual(pr['ExecutableTimestamp'],
str(int(os.stat(pr['ExecutablePath']).st_mtime)))
self.assertTrue(pr['Traceback'].startswith('Traceback'))
def test_python_env(self):
'''Python environmental variables appear in report'''
self._test_crash()
# did we get a report?
reports = apport.fileutils.get_new_reports()
pr = None
self.assertEqual(len(reports), 1, 'crashed Python program produced a report')
pr = apport.report.Report()
with open(reports[0], 'rb') as f:
pr.load(f)
# check report contents
self.assertTrue('PYTHONPATH' in pr['ProcEnviron'],
'report contains PYTHONPATH')
self.assertTrue('/my/bogus/path' in pr['ProcEnviron'],
pr['ProcEnviron'])
def _assert_no_reports(self):
'''Assert that there are no crash reports.'''
reports = apport.fileutils.get_new_reports()
self.assertEqual(len(reports), 0,
'no crash reports present (cwd: %s)' % os.getcwd())
def test_interactive(self):
'''interactive Python sessions never generate a report.'''
orig_cwd = os.getcwd()
try:
for d in ('/tmp', '/usr/local', '/usr'):
os.chdir(d)
p = subprocess.Popen(['python'], stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(out, err) = p.communicate(b'raise ValueError')
out = out.decode()
err = err.decode()
assert p.returncode != 0
assert out == ''
assert 'ValueError' in err
self._assert_no_reports()
finally:
os.chdir(orig_cwd)
def test_ignoring(self):
'''the Python crash hook respects the ignore list.'''
# put the script into /var/crash, since that isn't ignored in the
# hook
(fd, script) = tempfile.mkstemp(dir=apport.fileutils.report_dir)
orig_home = os.getenv('HOME')
if orig_home is not None:
del os.environ['HOME']
ifpath = os.path.expanduser(apport.report._ignore_file)
orig_ignore_file = None
if orig_home is not None:
os.environ['HOME'] = orig_home
try:
os.write(fd, ('''#!/usr/bin/env %s
import apport_python_hook
apport_python_hook.install()
def func(x):
raise Exception('This should happen.')
func(42)
''' % os.getenv('PYTHON', 'python3')).encode('ascii'))
os.close(fd)
os.chmod(script, 0o755)
# move aside current ignore file
if os.path.exists(ifpath):
orig_ignore_file = ifpath + '.apporttest'
os.rename(ifpath, orig_ignore_file)
# ignore
r = apport.report.Report()
r['ExecutablePath'] = script
r.mark_ignore()
r = None
p = subprocess.Popen([script, 'testarg1', 'testarg2'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
err = p.communicate()[1].decode()
self.assertEqual(p.returncode, 1,
'crashing test python program exits with failure code')
self.assertTrue('Exception: This should happen.' in err, err)
finally:
os.unlink(script)
# clean up our ignore file
if os.path.exists(ifpath):
os.unlink(ifpath)
if orig_ignore_file:
os.rename(orig_ignore_file, ifpath)
# did we get a report?
reports = apport.fileutils.get_new_reports()
self.assertEqual(len(reports), 0)
def test_no_flooding(self):
'''limit successive reports'''
count = 0
limit = 5
try:
while count < limit:
self._test_crash(scriptname='/var/tmp/pytestcrash')
reports = apport.fileutils.get_new_reports()
if not reports:
break
self.assertEqual(len(reports), 1, 'crashed Python program produced one report')
apport.fileutils.mark_report_seen(reports[0])
count += 1
finally:
os.unlink('/var/tmp/pytestcrash')
self.assertGreater(count, 1)
self.assertLess(count, limit)
def test_dbus_service_unknown_invalid(self):
'''DBus.Error.ServiceUnknown with an invalid name'''
self._test_crash(extracode='''import dbus
obj = dbus.SessionBus().get_object('com.example.NotExisting', '/Foo')
''')
pr = self._load_report()
self.assertTrue(pr['Traceback'].startswith('Traceback'), pr['Traceback'])
self.assertTrue('org.freedesktop.DBus.Error.ServiceUnknown' in pr['Traceback'], pr['Traceback'])
self.assertEqual(pr['DbusErrorAnalysis'], 'no service file providing com.example.NotExisting')
def test_dbus_service_unknown_wrongbus_notrunning(self):
'''DBus.Error.ServiceUnknown with a valid name on a different bus (not running)'''
subprocess.call(['killall', 'gvfsd-metadata'])
self._test_crash(extracode='''import dbus
obj = dbus.SystemBus().get_object('org.gtk.vfs.Metadata', '/org/gtk/vfs/metadata')
''')
pr = self._load_report()
self.assertTrue('org.freedesktop.DBus.Error.ServiceUnknown' in pr['Traceback'], pr['Traceback'])
self.assertTrue(pr['DbusErrorAnalysis'].startswith('provided by /usr/share/dbus-1/services/gvfs-metadata.service'),
pr['DbusErrorAnalysis'])
self.assertTrue('gvfsd-metadata is not running' in pr['DbusErrorAnalysis'], pr['DbusErrorAnalysis'])
def test_dbus_service_unknown_wrongbus_running(self):
'''DBus.Error.ServiceUnknown with a valid name on a different bus (running)'''
self._test_crash(extracode='''import dbus
# let the service be activated, to ensure it is running
obj = dbus.SessionBus().get_object('org.gtk.vfs.Metadata', '/org/gtk/vfs/metadata')
assert obj
obj = dbus.SystemBus().get_object('org.gtk.vfs.Metadata', '/org/gtk/vfs/metadata')
''')
pr = self._load_report()
self.assertTrue('org.freedesktop.DBus.Error.ServiceUnknown' in pr['Traceback'], pr['Traceback'])
self.assertTrue(pr['DbusErrorAnalysis'].startswith('provided by /usr/share/dbus-1/services/gvfs-metadata.service'),
pr['DbusErrorAnalysis'])
self.assertTrue('gvfsd-metadata is running' in pr['DbusErrorAnalysis'], pr['DbusErrorAnalysis'])
def test_dbus_service_timeout_running(self):
'''DBus.Error.NoReply with a running service'''
# ensure the service is running
metadata_obj = dbus.SessionBus().get_object('org.gtk.vfs.Metadata', '/org/gtk/vfs/metadata')
self.assertNotEqual(metadata_obj, None)
# timeout of zero will always fail with NoReply
try:
subprocess.call(['killall', '-STOP', 'gvfsd-metadata'])
self._test_crash(extracode='''import dbus
obj = dbus.SessionBus().get_object('org.gtk.vfs.Metadata', '/org/gtk/vfs/metadata')
assert obj
i = dbus.Interface(obj, 'org.freedesktop.DBus.Peer')
i.Ping(timeout=1)
''')
finally:
subprocess.call(['killall', '-CONT', 'gvfsd-metadata'])
# check report contents
reports = apport.fileutils.get_new_reports()
self.assertEqual(len(reports), 0, 'NoReply is an useless exception and should not create a report')
# This is disabled for now as we cannot get the bus name from the NoReply exception
#pr = self._load_report()
#self.assertTrue('org.freedesktop.DBus.Error.NoReply' in pr['Traceback'], pr['Traceback'])
#self.assertTrue(pr['DbusErrorAnalysis'].startswith('provided by /usr/share/dbus-1/services/gvfs-metadata.service'),
# pr['DbusErrorAnalysis'])
#self.assertTrue('gvfsd-metadata is running' in pr['DbusErrorAnalysis'], pr['DbusErrorAnalysis'])
# This is disabled for now as we cannot get the bus name from the NoReply exception
# def test_dbus_service_timeout_notrunning(self):
# '''DBus.Error.NoReply with a crashing method'''
#
# # run our own mock service with a crashing method
# subprocess.call(['killall', 'gvfsd-metadata'])
# service = subprocess.Popen([os.getenv('PYTHON', 'python3')],
# stdin=subprocess.PIPE,
# universal_newlines=True)
# service.stdin.write('''import os
#import dbus, dbus.service, dbus.mainloop.glib
#from gi.repository import GLib
#
#class MockMetadata(dbus.service.Object):
# @dbus.service.method('com.ubuntu.Test', in_signature='', out_signature='i')
# def Crash(self):
# os.kill(os.getpid(), 5)
#
#dbus.mainloop.glib.DBusGMainLoop(set_as_default=True)
#dbus_name = dbus.service.BusName('org.gtk.vfs.Metadata', dbus.SessionBus())
#svr = MockMetadata(bus_name=dbus_name, object_path='/org/gtk/vfs/metadata')
#GLib.MainLoop().run()
#''')
# service.stdin.close()
# self.addCleanup(service.terminate)
# time.sleep(0.5)
#
# self._test_crash(extracode='''import dbus
#obj = dbus.SessionBus().get_object('org.gtk.vfs.Metadata', '/org/gtk/vfs/metadata')
#assert obj
#dbus.Interface(obj, 'com.ubuntu.Test').Crash()
#''')
#
# pr = self._load_report()
# self.assertTrue('org.freedesktop.DBus.Error.NoReply' in pr['Traceback'], pr['Traceback'])
# self.assertTrue(pr['DbusErrorAnalysis'].startswith('provided by /usr/share/dbus-1/services/gvfs-metadata.service'),
# pr['DbusErrorAnalysis'])
# self.assertTrue('gvfsd-metadata is not running' in pr['DbusErrorAnalysis'], pr['DbusErrorAnalysis'])
def test_dbus_service_other_error(self):
'''Other DBusExceptions get an unwrapped original exception'''
self._test_crash(extracode='''import dbus
obj = dbus.SessionBus().get_object('org.gtk.vfs.Daemon', '/org/gtk/vfs/Daemon')
dbus.Interface(obj, 'org.gtk.vfs.Daemon').Nonexisting(1)
''')
pr = self._load_report()
self.assertTrue(pr['Traceback'].startswith('Traceback'), pr['Traceback'])
self.assertIn('org.freedesktop.DBus.Error.UnknownMethod', pr['Traceback'])
self.assertNotIn('DbusErrorAnalysis', pr)
# we expect it to unwrap the actual exception from the DBusException
self.assertIn('dbus.exceptions.DBusException(org.freedesktop.DBus.Error.UnknownMethod):',
pr.crash_signature())
def test_generic_os_error(self):
'''OSError with errno and no known subclass'''
self._test_crash(extracode='''def g():
raise OSError(99, 'something bad')
g()''')
pr = self._load_report()
# we expect it to append errno
exe = pr['ExecutablePath']
self.assertEqual(pr.crash_signature(),
'%s:OSError(99):%s@11:g' % (exe, exe))
def test_generic_os_error_no_errno(self):
'''OSError without errno and no known subclass'''
self._test_crash(extracode='''def g():
raise OSError('something bad')
g()''')
pr = self._load_report()
# we expect it to not stumble over the missing errno
exe = pr['ExecutablePath']
self.assertEqual(pr.crash_signature(),
'%s:OSError:%s@11:g' % (exe, exe))
def test_subclassed_os_error(self):
'''OSError with known subclass'''
self._test_crash(extracode='''def g():
raise OSError(2, 'no such file /notexisting')
g()''')
pr = self._load_report()
# we expect it to not append errno, as it's already encoded in the subclass
exe = pr['ExecutablePath']
self.assertEqual(pr.crash_signature(),
'%s:FileNotFoundError:%s@11:g' % (exe, exe))
def _load_report(self):
'''Ensure that there is exactly one crash report and load it'''
reports = apport.fileutils.get_new_reports()
self.assertEqual(len(reports), 1, 'crashed Python program produced a report')
pr = apport.Report()
with open(reports[0], 'rb') as f:
pr.load(f)
return pr
unittest.main()
| UTF-8 | Python | false | false | 19,631 | py | 267 | test_python_crashes.py | 86 | 0.605012 | 0.599307 | 0.000153 | 492 | 38.900407 | 124 |
rezzycavalheiro/SpaceApps-CodeRed | 9,036,611,193,136 | 5d5ec2b1bc8d7d636ec7d687e76cf63d14a8fd9e | 558b3e8e8cbcb39c8b5f1096381487410257de54 | /teste.py | 0858c2d95bb0407676d8513527c71c7cc84c93c7 | []
| no_license | https://github.com/rezzycavalheiro/SpaceApps-CodeRed | 70d53d0e6571e10c8d114486c43546a6054d56d5 | 958242be155b2dbb446dd96ec97e161af903b2ec | refs/heads/master | 2022-02-22T08:02:52.418915 | 2019-10-22T23:40:58 | 2019-10-22T23:40:58 | 216,293,011 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import requests
month = '01'
name_month = 'janeiro'
for day in range(1,31):
if (day >= 1 and day < 10):
day = '0' + str(day)
url = 'http://viirsfire.geog.umd.edu/web_data/GLOBAL/NOAA/2018' + month + str(day) + '_NOAA.txt'
r = requests.get(url, allow_redirects=True)
open(name_month + str(day) + '.csv', 'wb').write(r.content) | UTF-8 | Python | false | false | 351 | py | 9 | teste.py | 4 | 0.603989 | 0.566952 | 0 | 11 | 31 | 100 |
yuzhouxianzhi/Dynamic-Congestion-Prediction | 8,177,617,742,052 | 6f53d43cf59ff350db26ece9c34c589823543b88 | 1e766f9f33f0f450638c13e4fb580b120dbb4912 | /src/main/python/model_test/run_scenarios.py | fb92f47b08a43137bc671f461afa547f55a2f74b | []
| no_license | https://github.com/yuzhouxianzhi/Dynamic-Congestion-Prediction | 9eb9ee5208c11b5b3f948d42ba86d8361c967ad4 | 65765c19cabc6eb5800abbae6d00abb74cdeb49d | refs/heads/master | 2022-03-29T15:10:07.227083 | 2020-02-01T16:37:39 | 2020-02-01T19:39:42 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from generate_demand import generate_initial_demand
from generate_io_curves import get_links_output_curve, get_congestion_links_input_curve_after_merging,\
get_congestion_links_input_curve_from_demand, get_freeway_links_input_curve_after_diverging, \
get_congestion_links_input_curve_after_toll, get_total_toll_collected
from route_choice import check_route_choice
from queue_spillover import check_queue_spillover
from calculate_link_demand_and_congestion import get_link_congestion, \
get_link_demand, get_congestion_marginal_impact, get_cumulative_congestion_value
from plot_curves import plot_io_curves, plot_demand_congestion, plot_fundamental_diagrams
import numpy as np
import os
MINS_PER_DAY = 1440
np.random.seed(0)
class Parameters():
# Initialize with dictionary or default parameters (Scenario 1)
def __init__(self,dict=None):
if not dict:
dict = {}
self.min_intervals = dict.get('min_intervals',5)
self.num_bins = MINS_PER_DAY / self.min_intervals
self.num_zones = dict.get('num_zones',4)
self.demand_start_times = dict.get('demand_start_times',[0, 480, 960])
self.demand_end_times = dict.get('demand_end_times',[240, 720, 1200])
self.demand_slopes = dict.get('demand_slopes',[0.1,0.1,0.1])
self.congestion_links_capacity = dict.get('congestion_links_capacity',[12,12,12,5])
self.threshold_output_for_congestion = dict.get('threshold_output_for_congestion',[1,1,1,1])
self.threshold_beta_for_congestion_impact = dict.get('threshold_output_for_congestion_impact',[0.01,0.01,0.01,0.01])
self.congestion_links_fftt = dict.get('congestion_links_fftt',[20,20,20,20])
self.congestion_links_jam_density = dict.get('congestion_links_jam_density',[100,100,100,100])
self.congestion_links_length = dict.get('congestion_links_length',[5,5,5,5])
self.freeway_links_capacity = dict.get('freeway_links_capacity',[20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20,20, 20, 20, 20])
self.freeway_links_fftt = dict.get('freeway_links_fftt',[100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100])
self.freeway_links_jam_density = dict.get('freeway_links_jam_density',[100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100])
self.freeway_links_length = dict.get('freeway_links_length',[25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25])
self.congestion_nn_smoothening_number = dict.get('congestion_nn_smoothening_number',[10,10,10,10])
self.congestion_marginal_impact_nn_smoothening_number = dict.get('congestion_marginal_impact_nn_smoothening_number',[10,10,10,10])
self.check_route_choice = dict.get('check_route_choice',False)
self.plot_congestion_io_curves = dict.get('plot_congestion_io_curves',True)
self.plot_demand_congestion_curves = dict.get('plot_demand_congestion_curves',True)
self.plot_route_choice_io_curves = dict.get('plot_route_choice_io_curves',False)
self.check_queue_spillover = dict.get('check_queue_spillover',False)
self.file_directory = dict.get('file_directory','./scenario_1')
self.get_curves_data = dict.get('get_curves_data',False)
self.incident_prob = dict.get('incident_prob',0)
self.incident_time = dict.get('incident_time',int(np.random.rand()*MINS_PER_DAY))
self.plot_demand_congestion_marginal_impact_curves = dict.get('plot_demand_congestion_marginal_impact_curves',True)
self.value_of_time_early = dict.get('value_of_time_early',[0.5,0.5,0.5])
self.value_of_time_late = dict.get('value_of_time_late',[0.5,0.5,0.5])
self.implement_toll = dict.get('implement_toll',False)
self.toll_curves = dict.get('toll_curves',[lambda x: 0,lambda x: 0,lambda x: 0])
self.plot_cum_input_curves_toll = dict.get('plot_cum_input_curves_toll',False)
self.demand_nn_smoothening_number = dict.get('demand_nn_smoothening_number',[8,8,8])
def run(parameters):
plot_fundamental_diagrams(parameters=parameters)
od_demand_funcs = generate_initial_demand(num_zones=parameters.num_zones, start_times=parameters.demand_start_times,
end_times=parameters.demand_end_times, slopes=parameters.demand_slopes)
congestion_links_input_curve_from_zone = get_congestion_links_input_curve_from_demand(num_zones=parameters.num_zones,
od_demand_funcs=od_demand_funcs,
min_intervals=parameters.min_intervals)
if parameters.implement_toll:
congestion_links_input_curve_from_zone = \
get_congestion_links_input_curve_after_toll(congestion_links_input_curve_from_demand=congestion_links_input_curve_from_zone,
toll_curves=parameters.toll_curves, min_intervals=parameters.min_intervals,
value_of_time_early=parameters.value_of_time_early,
value_of_time_late=parameters.value_of_time_late, num_zones=parameters.num_zones,
plot_cum_input_curves_toll=parameters.plot_cum_input_curves_toll,
file_directory=parameters.file_directory)
congestion_links_output_curve_from_zone = get_links_output_curve(links_input_curve=
congestion_links_input_curve_from_zone,
links_capacity=parameters.congestion_links_capacity,
links_fftt=parameters.congestion_links_fftt,
min_intervals=parameters.min_intervals,
num_bins=parameters.num_bins)
freeway_links_input_curve = get_freeway_links_input_curve_after_diverging(congestion_links_output_curve=
congestion_links_output_curve_from_zone,
num_zones=parameters.num_zones,
min_intervals=parameters.min_intervals,
num_bins=parameters.num_bins)
freeway_links_output_curve = get_links_output_curve(links_input_curve=freeway_links_input_curve,
links_capacity=parameters.freeway_links_capacity,
links_fftt=parameters.freeway_links_fftt,
min_intervals=parameters.min_intervals,
num_bins=parameters.num_bins)
if parameters.check_route_choice:
best_route_input_curve = freeway_links_input_curve[parameters.num_zones - 1]
best_route_output_curve = freeway_links_output_curve[parameters.num_zones - 1]
alternate_route = [(freeway_links_input_curve[parameters.num_zones - 2],
freeway_links_output_curve[parameters.num_zones - 2]),
(freeway_links_input_curve[parameters.num_zones * (parameters.num_zones - 1) - 1],
freeway_links_output_curve[parameters.num_zones * (parameters.num_zones - 1) - 1])]
best_route_fftt = parameters.freeway_links_fftt[parameters.num_zones - 1]
best_route_bottleneck_capacity = parameters.freeway_links_capacity[parameters.num_zones - 1]
alternative_route_fftts = [parameters.freeway_links_fftt[parameters.num_zones - 2],
parameters.freeway_links_fftt[parameters.num_zones * (parameters.num_zones - 1) - 1]]
incident_prob = parameters.incident_prob
incident_occurance = np.random.rand() < incident_prob
incident_bin = parameters.incident_time / parameters.min_intervals if incident_occurance else MINS_PER_DAY / parameters.min_intervals
check_route_choice(best_route_input_curve=best_route_input_curve, best_route_output_curve=best_route_output_curve,
alternate_route=alternate_route,best_route_fftt=best_route_fftt,
best_route_bottleneck_capacity=best_route_bottleneck_capacity,
alternate_route_fftts=alternative_route_fftts,
min_intervals=parameters.min_intervals, num_bins=parameters.num_bins, incident_bin=incident_bin)
if parameters.plot_route_choice_io_curves:
io_series = [
(freeway_links_input_curve.as_matrix()[:, parameters.num_zones - 1],
freeway_links_output_curve.as_matrix()[:, parameters.num_zones - 1]),
(freeway_links_input_curve.as_matrix()[:, parameters.num_zones - 2],
freeway_links_output_curve.as_matrix()[:, parameters.num_zones - 2]),
(freeway_links_input_curve.as_matrix()[:, parameters.num_zones * (parameters.num_zones - 1) - 1],
freeway_links_output_curve.as_matrix()[:, parameters.num_zones * (parameters.num_zones - 1) - 1])
]
plot_io_curves(io_series=io_series, filepath=parameters.file_directory + '/sample_plots/io_curve_route_choice_links.png',
min_intervals=parameters.min_intervals)
congestion_links_input_curve_to_zone = get_congestion_links_input_curve_after_merging(freeway_links_output_curve=
freeway_links_output_curve,
num_zones=parameters.num_zones,
min_intervals=parameters.min_intervals)
congestion_links_output_curve_to_zone = get_links_output_curve(links_input_curve=congestion_links_input_curve_to_zone,
links_capacity=parameters.congestion_links_capacity,
links_fftt=parameters.congestion_links_fftt,
min_intervals=parameters.min_intervals,
num_bins=parameters.num_bins)
if parameters.check_queue_spillover:
check_queue_spillover(links_input_curve=congestion_links_input_curve_to_zone,
links_output_curve=congestion_links_output_curve_to_zone,
fftts=parameters.congestion_links_fftt,
links_capacity=parameters.congestion_links_capacity,
links_jam_density=parameters.congestion_links_jam_density,
links_length=parameters.congestion_links_length,
min_intervals=parameters.min_intervals,
num_bins=parameters.num_bins)
for i in range(parameters.num_bins):
freeway_links_output_curve[parameters.num_zones - 1].iloc[i] = min(freeway_links_output_curve[parameters.num_zones - 1].iloc[i],
congestion_links_input_curve_to_zone[parameters.num_zones - 1].iloc[i])
spillover_freeway_link_indices = [(parameters.num_zones * (parameters.num_zones - 1) + k) for k in range(1, parameters.num_zones)]
spillover_freeway_link_indices = spillover_freeway_link_indices + [parameters.num_zones - 1]
spillover_freeway_links_input_curves = freeway_links_input_curve[spillover_freeway_link_indices].copy()
spillover_freeway_links_output_curves = freeway_links_output_curve[spillover_freeway_link_indices].copy()
check_queue_spillover(links_input_curve=spillover_freeway_links_input_curves,
links_output_curve=spillover_freeway_links_output_curves,
fftts=parameters.freeway_links_fftt[:parameters.num_zones],
links_capacity= parameters.freeway_links_capacity[:parameters.num_zones],
links_jam_density=parameters.freeway_links_jam_density[:parameters.num_zones],
links_length=parameters.freeway_links_length[:parameters.num_zones],
min_intervals=parameters.min_intervals, num_bins=parameters.num_bins)
for i in range(parameters.num_bins):
freeway_links_input_curve[parameters.num_zones - 1].iloc[i] = \
spillover_freeway_links_input_curves[parameters.num_zones - 1].iloc[i]
congestion_links_output_curve_from_zone[0].iloc[i] = \
spillover_freeway_links_input_curves[parameters.num_zones - 1].iloc[i]
congestion_spillover = get_link_congestion(link_input_curve=congestion_links_input_curve_from_zone[0],
link_output_curve=congestion_links_output_curve_from_zone[0],
threshold_output_for_congestion=parameters.threshold_output_for_congestion[0],
congestion_nn_smoothening_number=parameters.congestion_nn_smoothening_number[0],
min_intervals=parameters.min_intervals, num_bins=parameters.num_bins)
congestion_values = get_link_congestion(link_input_curve=congestion_links_input_curve_to_zone[parameters.num_zones - 1],
link_output_curve=congestion_links_output_curve_to_zone[parameters.num_zones - 1],
threshold_output_for_congestion=parameters.threshold_output_for_congestion[parameters.num_zones - 1],
congestion_nn_smoothening_number=parameters.congestion_nn_smoothening_number[parameters.num_zones - 1],
min_intervals=parameters.min_intervals, num_bins=parameters.num_bins)
link_demands = [get_link_demand(link_input_curve=congestion_links_input_curve_from_zone[i],
num_bins=parameters.num_bins, implement_tolls=parameters.implement_toll,
demand_nn_smoothening_number=parameters.demand_nn_smoothening_number[i])
for i in range(parameters.num_zones - 1)]
if parameters.implement_toll:
total_toll_collected = get_total_toll_collected(link_demands,parameters.toll_curves,
parameters.num_zones, parameters.min_intervals)
congestion_marginal_impacts = get_congestion_marginal_impact(link_input_curve=congestion_links_input_curve_to_zone[parameters.num_zones - 1],
link_output_curve=congestion_links_output_curve_to_zone[parameters.num_zones - 1],
congestion_values=congestion_values,
marginal_impact_nn_smoothening_number=parameters.
congestion_marginal_impact_nn_smoothening_number[parameters.num_zones - 1],
min_intervals=parameters.min_intervals, num_bins=parameters.num_bins,
threshold_beta_for_congestion_impact=
parameters.threshold_beta_for_congestion_impact[parameters.num_zones - 1])
cum_congestion = get_cumulative_congestion_value(congestion_values,parameters.min_intervals,parameters.num_bins - 1)
io_series = [
(congestion_links_input_curve_to_zone.as_matrix()[:, parameters.num_zones - 1],
congestion_links_output_curve_to_zone.as_matrix()[:, parameters.num_zones - 1])]
if not os.path.exists(parameters.file_directory):
os.makedirs(parameters.file_directory)
if parameters.plot_congestion_io_curves:
plot_io_curves(io_series=io_series, filepath=parameters.file_directory+'/sample_plots/io_curve_congestion_zone_link.png',
min_intervals=parameters.min_intervals)
if parameters.plot_demand_congestion_curves:
plot_demand_congestion(demands=link_demands, congestion=congestion_values,
filepath=parameters.file_directory+'/sample_plots/demand_congestion_plot.png',
num_bins=parameters.num_bins, min_intervals=parameters.min_intervals)
if parameters.check_queue_spillover:
plot_demand_congestion(demands=link_demands, congestion=congestion_values,
filepath=parameters.file_directory+'/sample_plots/spillover_congestion_plot.png',
congestion_spillover=congestion_spillover,
num_bins=parameters.num_bins, min_intervals=parameters.min_intervals)
if parameters.plot_demand_congestion_marginal_impact_curves:
plot_demand_congestion(demands=[link_demand[:-1] for link_demand in link_demands], congestion=congestion_marginal_impacts,
filepath=parameters.file_directory + '/sample_plots/demand_marginal_impact_plot.png',
num_bins=parameters.num_bins - 1, min_intervals=parameters.min_intervals)
if parameters.get_curves_data:
dict_return = {'link_demands':link_demands, 'congestion_values':congestion_values,
'congestion_marginal_impact_values': congestion_marginal_impacts,
'congestion_links_input_curve_from_zone':congestion_links_input_curve_from_zone,
'congestion_links_output_curve_from_zone':congestion_links_output_curve_from_zone,
'freeway_links_input_curve':freeway_links_input_curve,
'freeway_links_output_curve':freeway_links_output_curve,
'congestion_links_input_curve_to_zone':congestion_links_input_curve_to_zone,
'congestion_links_output_curve_to_zone': congestion_links_output_curve_to_zone,
'cum_congestion': cum_congestion}
if parameters.check_queue_spillover:
dict_return['congestion_spillover'] = congestion_spillover
if parameters.implement_toll:
dict_return['total_toll_collected'] = total_toll_collected
return dict_return
| UTF-8 | Python | false | false | 18,951 | py | 31 | run_scenarios.py | 21 | 0.595219 | 0.578175 | 0 | 221 | 84.751131 | 147 |
bely66/data_analysis | 14,766,097,572,093 | 42d4937296f14935f5306bf4d1f5ee1e6cfbcbbc | 276e4b49342a4232cf26870a2fc52a83f3f97dae | /panfun.py | a295aa45a97a75b949d0a0e637fa7323037a9f5e | []
| no_license | https://github.com/bely66/data_analysis | ce51b4c2dae20f874620ce5cf379a23443ecc004 | f6943d42c6177febac8775b868895fc5630404e5 | refs/heads/master | 2020-05-05T00:18:43.035021 | 2019-04-08T13:09:27 | 2019-04-08T13:09:27 | 179,570,576 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Thu Apr 4 22:44:25 2019
@author: USER
"""
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
def return_series(d):
series = pd.Series(d)
return series
def return_series_rng (d ,x , y):
series = pd.Series(d)
series = series [series<y]
series = series [series>x]
return series
def find_word (d,s):
series = pd.Series(d)
return series[s]
| UTF-8 | Python | false | false | 434 | py | 4 | panfun.py | 3 | 0.631336 | 0.603687 | 0 | 23 | 17.869565 | 35 |
shirblc/FSND-fyyur | 12,592,844,147,146 | 1d084cdac53d9cdecf92b7a26c29e64a57cb658b | 37f58e2076dbc053637e66e47d88ed15bc56d170 | /app.py | da24999f612da141205cb57928ec2005582ee920 | []
| no_license | https://github.com/shirblc/FSND-fyyur | 1b3a33aaba04316fa1903fffde30951afcbf7d80 | d49bf00b0c68e2e75f8f14424b0a17316648b23a | refs/heads/master | 2021-04-02T04:11:50.022826 | 2020-03-21T16:36:48 | 2020-03-21T16:36:48 | 248,242,501 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #----------------------------------------------------------------------------#
# Imports
#----------------------------------------------------------------------------#
import json
import dateutil.parser
import babel
from flask import Flask, render_template, request, Response, flash, redirect, url_for
from flask_moment import Moment
from flask_sqlalchemy import SQLAlchemy
import logging
from logging import Formatter, FileHandler
from flask_wtf import Form
from forms import *
from flask_migrate import Migrate
from datetime import datetime
#----------------------------------------------------------------------------#
# App Config.
#----------------------------------------------------------------------------#
app = Flask(__name__)
moment = Moment(app)
app.config.from_object('config')
db = SQLAlchemy(app)
migrate = Migrate(app, db)
#----------------------------------------------------------------------------#
# Models.
#----------------------------------------------------------------------------#
class Venue(db.Model):
__tablename__ = 'venues'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String, nullable=False)
city = db.Column(db.String(120), nullable=False)
state = db.Column(db.String(120), nullable=False)
address = db.Column(db.String(120), nullable=False)
phone = db.Column(db.String(120))
image_link = db.Column(db.String(500))
facebook_link = db.Column(db.String(120))
genres = db.Column(db.String(240), nullable=False)
website = db.Column(db.String(120))
seeking_talent = db.Column(db.Boolean)
seeking_description = db.Column(db.String(240))
shows = db.relationship('Show', backref='show_venue')
class Artist(db.Model):
__tablename__ = 'artists'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String, nullable=False)
city = db.Column(db.String(120), nullable=False)
state = db.Column(db.String(120), nullable=False)
phone = db.Column(db.String(120))
genres = db.Column(db.String(120), nullable=False)
image_link = db.Column(db.String(500))
facebook_link = db.Column(db.String(120))
website = db.Column(db.String(120))
seeking_venue = db.Column(db.Boolean)
seeking_description = db.Column(db.String(240))
shows = db.relationship('Show', backref='show_artist')
class Show(db.Model):
__tablename__ = 'shows'
id = db.Column(db.Integer, primary_key=True)
venue_id = db.Column(db.Integer, db.ForeignKey('venues.id'), nullable=False)
artist_id = db.Column(db.Integer, db.ForeignKey('artists.id'), nullable=False)
start_time = db.Column(db.DateTime, nullable=False)
#----------------------------------------------------------------------------#
# Filters.
#----------------------------------------------------------------------------#
def format_datetime(value, format='medium'):
date = dateutil.parser.parse(value)
if format == 'full':
format="EEEE MMMM, d, y 'at' h:mma"
elif format == 'medium':
format="EE MM, dd, y h:mma"
return babel.dates.format_datetime(date, format)
app.jinja_env.filters['datetime'] = format_datetime
#----------------------------------------------------------------------------#
# Controllers.
#----------------------------------------------------------------------------#
@app.route('/')
def index():
return render_template('pages/home.html')
# Venues
# ----------------------------------------------------------------
@app.route('/venues')
def venues():
# Get the current date and time
current_datetime = datetime.now()
# Get the data from the database
data = []
city_count = db.session.query(Venue.city, Venue.state, db.func.count(Venue.id)).group_by(Venue.city, Venue.state).all()
venue_show_data = db.session.query(Venue.id, Venue.name, db.func.count(Show.id)).outerjoin(Show).group_by(Venue.id, Venue.name)
# Add data to the data list
for city in city_count:
data.append({
'city': city[0],
'state': city[1],
'venues': []
})
city_venue_data = venue_show_data.filter(Venue.city == city[0]).filter(Venue.state == city[1]).all()
num_venues = city[2]
# Add venues information
for i in range(num_venues):
data[len(data) - 1]['venues'].append({
'id': city_venue_data[i][0],
'name': city_venue_data[i][1],
'num_upcoming_shows': city_venue_data[i][2]
})
return render_template('pages/venues.html', areas=data);
@app.route('/venues/search', methods=['POST'])
def search_venues():
# Gets the search term from the text field and searches in the database
search_term = request.form.get('search_term', '')
search_results = db.session.query(Venue.id, Venue.name).filter(Venue.name.ilike('%' + search_term + '%')).all()
num_search_results = db.session.query(Venue.id, Venue.name).filter(Venue.name.ilike('%' + search_term + '%')).count()
return render_template('pages/search_venues.html', results=search_results, search_term=search_term, num_search_results=num_search_results)
@app.route('/venues/<int:venue_id>')
def show_venue(venue_id):
# shows the venue page with the given venue_id
# Get the current date and time
current_datetime = datetime.now()
# Get the venue details from the database
venue_data = db.session.query(Venue).get(venue_id)
venue_data.genres = str(venue_data.genres).split(',')
past_shows = db.session.query(Show.venue_id, Show.start_time, Show.artist_id,
Artist.name.label('artist_name'), Artist.image_link.label('artist_image_link')).join(Artist).filter(Show.venue_id == venue_id).filter(Show.start_time < current_datetime).all()
future_shows = db.session.query(Show.venue_id, Show.start_time, Show.artist_id,
Artist.name.label('artist_name'), Artist.image_link.label('artist_image_link')).join(Artist).filter(Show.venue_id == venue_id).filter(Show.start_time > current_datetime).all()
past_shows_count = db.session.query(Show.venue_id, Show.start_time).filter(Show.venue_id == venue_id).filter(Show.start_time < current_datetime).count()
future_shows_count = db.session.query(Show.venue_id, Show.start_time).filter(Show.venue_id == venue_id).filter(Show.start_time > current_datetime).count()
return render_template('pages/show_venue.html', venue=venue_data, past_shows=past_shows,
future_shows=future_shows, num_past_shows=past_shows_count, num_future_shows=future_shows_count)
# Create Venue
# ----------------------------------------------------------------
@app.route('/venues/create', methods=['GET'])
def create_venue_form():
form = VenueForm()
return render_template('forms/new_venue.html', form=form)
@app.route('/venues/create', methods=['POST'])
def create_venue_submission():
#Venue details as entered in the submitted form
venue_name = request.form.get('name')
venue_city = request.form.get('city')
venue_state = request.form.get('state')
venue_address = request.form.get('address')
venue_phone = request.form.get('phone')
venue_genres = ','.join(request.form.getlist('genres'))
venue_fb_link = request.form.get('facebook_link')
venue_website = request.form.get('website')
venue_image = request.form.get('image_link')
seeking_talent = True if request.form.get('seeking_talent') == 'y' else False
seeking_description = request.form.get('seeking_description')
data = {}
error = False
#Try to add the data to the database
try:
#New venue object
venue = Venue(name=venue_name, city=venue_city, state=venue_state,
address=venue_address, phone=venue_phone, genres=venue_genres,
facebook_link=venue_fb_link, website=venue_website, image_link=venue_image,
seeking_talent=seeking_talent, seeking_description=seeking_description)
db.session.add(venue)
db.session.commit()
data['name'] = venue.name
#If there's an error, rollback the session
except:
db.session.rollback()
error = True
#Close the connection either way
finally:
db.session.close()
#If an error occurred, flash an error message
if error:
flash('An error occurred and the venue was not listed. Please try again.')
#If there was no error, alert the user the venue was listed
if not error:
# on successful db insert, flash success
flash('Venue ' + data['name'] + ' was successfully listed!')
return render_template('pages/home.html')
@app.route('/venues/<venue_id>', methods=['DELETE'])
def delete_venue(venue_id):
redirect_to = ''
error = False
#Try to delete the venue and all associated events from the database
try:
venue = db.session.query(Venue).get(venue_id)
db.session.query(Show).filter(Show.venue_id == venue_id).delete()
db.session.delete(venue)
db.session.commit()
#If there's an error, rollback the session
except:
db.session.rollback()
error = True
#Close the connection either way
finally:
db.session.close()
#If an error occurred, flash an error message
if error:
flash('Failed to delete the venue. Please try again.')
redirect_to = '/venues/' + venue_id
#If there was no error, alert the user the venue was deleted and redirect to index
if not error:
flash('Venue successfully deleted!')
redirect_to = 'index'
return redirect(url_for(redirect_to))
# Artists
# ----------------------------------------------------------------
@app.route('/artists')
def artists():
data = db.session.query(Artist.id, Artist.name).all()
return render_template('pages/artists.html', artists=data)
@app.route('/artists/search', methods=['POST'])
def search_artists():
# Gets the search term from the text field and searches the database
search_term = request.form.get('search_term', '')
search_results = db.session.query(Artist.name, Artist.id).filter(Artist.name.ilike('%' + search_term + '%')).all()
num_search_results = db.session.query(Artist.name, Artist.id).filter(Artist.name.ilike('%' + search_term + '%')).count()
return render_template('pages/search_artists.html', results=search_results, search_term=search_term, num_search_results=num_search_results)
@app.route('/artists/<int:artist_id>')
def show_artist(artist_id):
# shows the artist page with the given artist_id
# Gets the current date
current_datetime = datetime.now()
# Gets the artist data to display on the page
artist_data = db.session.query(Artist).get(artist_id)
artist_data.genres = str(artist_data.genres).split(',')
past_shows = db.session.query(Show.venue_id, Show.start_time, Venue.name.label('venue_name'),
Venue.image_link.label('venue_image_link')).select_from(Show).join(Venue).filter(Show.artist_id == artist_id).filter(Show.start_time < current_datetime).all()
future_shows = db.session.query(Show.venue_id, Show.start_time, Venue.name.label('venue_name'),
Venue.image_link.label('venue_image_link')).select_from(Show).join(Venue).filter(Show.artist_id == artist_id).filter(Show.start_time > current_datetime).all()
num_past_shows = db.session.query(Show.artist_id, Show.start_time).filter(Show.artist_id == artist_id).filter(Show.start_time < current_datetime).count()
num_future_shows = db.session.query(Show.artist_id, Show.start_time).filter(Show.artist_id == artist_id).filter(Show.start_time > current_datetime).count()
return render_template('pages/show_artist.html', artist=artist_data, past_shows=past_shows,
future_shows=future_shows, num_past=num_past_shows, num_future=num_future_shows)
# Update
# ----------------------------------------------------------------
@app.route('/artists/<int:artist_id>/edit', methods=['GET'])
def edit_artist(artist_id):
# Gets the artist details and creates a pre-filled forms
artist = db.session.query(Artist).get(artist_id)
genres = str(db.session.query(Artist.genres).filter(Artist.id == artist_id).all()).replace("[(", "").replace(")]", "").replace("'", "")
genres = genres.split(',')
form = ArtistForm(obj=artist)
form.genres.data = genres
return render_template('forms/edit_artist.html', form=form, artist=artist)
@app.route('/artists/<int:artist_id>/edit', methods=['POST'])
def edit_artist_submission(artist_id):
error = False
artist_name = ''
# Try to update the selected artist's details
try:
# Get the artist's details
artist = db.session.query(Artist).get(artist_id)
# Change the details according to the form details
artist.name = request.form.get('name')
artist.city = request.form.get('city')
artist.state = request.form.get('state')
artist.phone = request.form.get('phone')
artist.genres = ",".join(request.form.getlist('genres'))
artist.image_link = request.form.get('image_link')
artist.facebook_link = request.form.get('facebook_link')
artist.website = request.form.get('website')
artist.seeking_venue = True if request.form.get('seeking_venue') == 'y' else False
artist.seeking_description = request.form.get('seeking_description')
# Attempt to commit to the database
db.session.commit()
artist_name = artist.name
#If there's an error, rollback the session
except:
db.session.rollback()
error = True
#Close the connection either way
finally:
db.session.close()
#If an error occurred, flash an error message
if error:
flash('Edit failed due to an error. Please try again.')
#If there was no error, alert the user the venue was listed
if not error:
flash('Updated ' + artist_name + ' successfully!')
return redirect(url_for('show_artist', artist_id=artist_id))
@app.route('/venues/<int:venue_id>/edit', methods=['GET'])
def edit_venue(venue_id):
# Gets the venue details and creates a pre-filled form with the details
venue = db.session.query(Venue).get(venue_id)
genres = str(db.session.query(Venue.genres).filter(Venue.id == venue_id).all()).replace("[(", "").replace(")]", "").replace("'", "")
genres = genres.split(',')
form = VenueForm(obj=venue)
form.genres.data = genres
return render_template('forms/edit_venue.html', form=form, venue=venue)
@app.route('/venues/<int:venue_id>/edit', methods=['POST'])
def edit_venue_submission(venue_id):
error = False
venue_name = ''
# Try to update the selected artist's details
try:
# Get the venue's details
venue = db.session.query(Venue).get(venue_id)
# Update the details based on the form submission
venue.name = request.form.get('name')
venue.city = request.form.get('city')
venue.state = request.form.get('state')
venue.address = request.form.get('address')
venue.phone = request.form.get('phone')
venue.image_link = request.form.get('image_link')
venue.facebook_link = request.form.get('facebook_link')
venue.genres = ','.join(request.form.getlist('genres'))
venue.website = request.form.get('website')
venue.seeking_talent = True if request.form.get('seeking_talent') == 'y' else False
venue.seeking_description = request.form.get('seeking_description')
# Try to commit to the database
db.session.commit()
venue_name = venue.name
#If there's an error, rollback the session
except:
db.session.rollback()
error = True
#Close the connection either way
finally:
db.session.close()
#If an error occurred, flash an error message
if error:
flash('Update failed due to an error. Please try again.')
#If there was no error, alert the user the venue was listed
if not error:
flash('Updated ' + venue_name + ' successfully!')
return redirect(url_for('show_venue', venue_id=venue_id))
# Create Artist
# ----------------------------------------------------------------
@app.route('/artists/create', methods=['GET'])
def create_artist_form():
form = ArtistForm()
return render_template('forms/new_artist.html', form=form)
@app.route('/artists/create', methods=['POST'])
def create_artist_submission():
#Artist details as entered in the form
artist_name = request.form.get('name')
artist_city = request.form.get('city')
artist_state = request.form.get('state')
artist_phone = request.form.get('phone')
artist_genres = ','.join(request.form.getlist('genres'))
artist_fb_link = request.form.get('facebook_link')
artist_image = request.form.get('image_link')
artist_website = request.form.get('website')
seeking_venue = True if request.form.get('seeking_venue') == 'y' else False
seeking_description = request.form.get('seeking_description')
data = {}
error = False
#Try to add the data to the database
try:
artist = Artist(name=artist_name, city=artist_city, state=artist_state,
phone=artist_phone, genres=artist_genres, facebook_link=artist_fb_link,
image_link=artist_image, website=artist_website, seeking_venue=seeking_venue,
seeking_description=seeking_description)
db.session.add(artist)
db.session.commit()
data['name'] = artist.name
#If there's an error, rollback the session
except:
db.session.rollback()
error = True
#Close the connection either way
finally:
db.session.close()
#If an error occurred, flash an error message
if error:
flash('An error occurred and the venue was not listed. Please try again.')
#If there was no error, alert the user the venue was listed
if not error:
# on successful db insert, flash success
flash('Artist ' + data['name'] + ' was successfully listed!')
return render_template('pages/home.html')
# Shows
# ----------------------------------------------------------------
@app.route('/shows')
def shows():
# displays list of shows at /shows.
data = db.session.query(Show.artist_id, Show.venue_id, Show.start_time,
Artist.name, Artist.image_link , Venue.name).join(Artist).join(Venue).all()
return render_template('pages/shows.html', shows=data)
@app.route('/shows/search', methods=['POST'])
def search_shows():
# Gets the search term from the text field and searches the database
search_term = request.form.get('search_term', '')
search_results = db.session.query(Show.artist_id, Show.venue_id, Show.start_time,
Artist.name, Artist.image_link, Venue.name).join(Artist).join(Venue).filter(Artist.name.ilike('%' + search_term + '%') | Venue.name.ilike('%' + search_term + '%')).all()
num_search_results = db.session.query(Show.artist_id, Show.venue_id, Show.start_time,
Artist.name, Artist.image_link, Venue.name).join(Artist).join(Venue).filter(Artist.name.ilike('%' + search_term + '%') | Venue.name.ilike('%' + search_term + '%')).count()
print(search_results)
return render_template('pages/search_shows.html', shows=search_results, search_term=search_term, num_search_results=num_search_results)
@app.route('/shows/create')
def create_shows():
# renders form. do not touch.
form = ShowForm()
return render_template('forms/new_show.html', form=form)
@app.route('/shows/create', methods=['POST'])
def create_show_submission():
# called to create new shows in the db, upon submitting new show listing form
show_venue = request.form.get('venue_id')
show_artist = request.form.get('artist_id')
show_time = datetime.strptime(request.form.get('start_time'), '%Y-%m-%d %H:%M:%S')
error = False
#Try to add the data to the database
try:
show = Show(venue_id=show_venue, artist_id=show_artist,
start_time=show_time)
db.session.add(show)
db.session.commit()
#If there's an error, rollback the session
except Exception as e:
db.session.rollback()
error = True
print(e)
#Close the connection either way
finally:
db.session.close()
#If an error occurred, flash an error message
if error:
flash('An error occurred and the show was not listed. Please try again.')
#If there was no error, alert the user the venue was listed
if not error:
# on successful db insert, flash success
flash('Show was successfully listed!')
return render_template('pages/home.html')
@app.errorhandler(404)
def not_found_error(error):
return render_template('errors/404.html'), 404
@app.errorhandler(500)
def server_error(error):
return render_template('errors/500.html'), 500
if not app.debug:
file_handler = FileHandler('error.log')
file_handler.setFormatter(
Formatter('%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]')
)
app.logger.setLevel(logging.INFO)
file_handler.setLevel(logging.INFO)
app.logger.addHandler(file_handler)
app.logger.info('errors')
#----------------------------------------------------------------------------#
# Launch.
#----------------------------------------------------------------------------#
# Default port:
if __name__ == '__main__':
app.run()
# Or specify port manually:
'''
if __name__ == '__main__':
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
'''
| UTF-8 | Python | false | false | 20,906 | py | 2 | app.py | 2 | 0.642591 | 0.638477 | 0 | 521 | 39.126679 | 177 |
kcarnold/suggestion | 4,681,514,376,088 | 92e3620f92f3ca3fdbe5014bdabca2b599b18bad | 99ef323acf05ae8c76f8ade8d2fe03d455c00302 | /annotation_ui/make_persuasive_data.py | 2310fd0c820366516db2fffa8b5dad3bd5474ba9 | [
"LicenseRef-scancode-warranty-disclaimer",
"MIT"
]
| permissive | https://github.com/kcarnold/suggestion | b166fd3d986eace138a9d6c877ee4f71d94a796a | 27650cbf724b77361f8f4e609774ecb9bcf9e3c9 | refs/heads/master | 2021-01-13T15:24:43.195938 | 2018-12-21T23:02:31 | 2018-12-21T23:02:31 | 76,397,078 | 1 | 0 | null | false | 2017-02-21T16:19:02 | 2016-12-13T20:48:02 | 2017-02-06T00:54:41 | 2017-02-21T16:19:02 | 133 | 1 | 0 | 0 | Python | null | null | #!/usr/bin/env python
import json
import pandas as pd
import toolz
todos = json.load(open('../gruntwork/persuade_0_persuasive_anno_todo.json'))
BATCH_SIZE=6
print("Total:", len(todos))
batches = list(toolz.partition_all(BATCH_SIZE, todos))
print(pd.Series([len(batch) for batch in batches]).value_counts())
if len(batches[-1]) != BATCH_SIZE:
print("Tacking on extra to the last batch.")
batches[-1] = (batches[-1] + batches[0])[:BATCH_SIZE]
assert len(batches[-1]) == BATCH_SIZE
pd.DataFrame(dict(task=[json.dumps(batch) for batch in batches])).to_csv('persuasive-task.csv', index=False)
todos = json.load(open('../gruntwork/persuade_0_avoid_anno_todo.json'))
BATCH_SIZE=6
print("Total:", len(todos))
batches = list(toolz.partition_all(BATCH_SIZE, todos))
print(pd.Series([len(batch) for batch in batches]).value_counts())
if len(batches[-1]) != BATCH_SIZE:
print("Tacking on extra to the last batch.")
batches[-1] = (batches[-1] + batches[0])[:BATCH_SIZE]
assert len(batches[-1]) == BATCH_SIZE
pd.DataFrame(dict(task=[json.dumps(batch) for batch in batches])).to_csv('avoid-task.csv', index=False)
| UTF-8 | Python | false | false | 1,117 | py | 139 | make_persuasive_data.py | 112 | 0.703671 | 0.691137 | 0 | 26 | 41.961538 | 108 |
rezabanitaba/ACIT2515_ASSIGNMENT4 | 10,290,741,651,625 | 187976775fd00001dfd8f1f9da9d862c7f9ab01d | 756a498df559c170e0cfcc34c7f5938441a3ae04 | /drink.py | be0400719156e98b05f164a692f077be5d13cddf | []
| no_license | https://github.com/rezabanitaba/ACIT2515_ASSIGNMENT4 | 23d03ba4e9f224c7bad008ddd8f609f8c6a1a977 | a4f57d3f54622354f6118d54ccbf9c881b3245ea | refs/heads/master | 2022-12-22T02:15:27.253339 | 2019-12-06T19:54:08 | 2019-12-06T19:54:08 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from abstract_menu_item import AbstractMenuItem
from sqlalchemy import Column, String, Integer, Float, DateTime
class Drink(AbstractMenuItem):
""" creates drink """
MENU_ITEM_TYPE = "drink"
manufacturer = Column(String(100))
size = Column(String(20))
is_fizzy = Column(Integer)
is_hot = Column(Integer)
def __init__(self, menu_item_name, menu_item_no, date_added, price, calories, manufacturer, size, is_fizzy, is_hot):
super().__init__( menu_item_name, menu_item_no, date_added, price, calories, Drink.MENU_ITEM_TYPE)
self.manufacturer= manufacturer
self.size= size
self.is_fizzy= is_fizzy
self.is_hot= is_hot
def to_dict(self):
"""Returns a dictionary representation of menu item of type food"""
item_dict = {
"menu_item_name": self.menu_item_name,
"menu_item_no": self.menu_item_no,
"date_added": self.date_added.strftime("%Y-%m-%d"),
"price": self.price,
"calories": self.calories,
"manufacturer": self.manufacturer,
"size": self.size,
"is_fizzy": self.is_fizzy,
"is_hot": self.is_hot,
"type": self.get_type()
}
return item_dict
def menu_item_description(self):
if self._is_fizzy is True:
return "%s is a fizzy drink item with menu index %s added on %s with the price of %s, containing %s calories made by %s and is %s ml " % (self._menu_item_name, self._menu_item_no, self._date_added, self._price, self._calories, self._manufacturer, self._size)
elif self._is_fizzy is False:
return "%s is a non-fizzy drink item with menu index %s added on %s with the price of %s, containing %s calories made by %s and is %s ml " % (self._menu_item_name, self._menu_item_no, self._date_added, self._price, self._calories, self._manufacturer, self._size)
elif self._is_hot is True:
return "%s is a hot drink item with menu index %s added on %s with the price of %s, containing %s calories made by %s and is %s ml " % (self._menu_item_name, self._menu_item_no, self._date_added, self._price, self._calories, self._manufacturer, self._size)
elif self._is_hot is False:
return "%s is a cold drink item with menu index %s added on %s with the price of %s, containing %s calories made by %s and is %s ml " % (self._menu_item_name, self._menu_item_no, self._date_added, self._price, self._calories, self._manufacturer, self._size)
def get_type(self):
""" returns menu item type """
return Drink.MENU_ITEM_TYPE
def get_manufacturer(self):
return self.manufacturer
def get_size(self):
return self.size
| UTF-8 | Python | false | false | 2,815 | py | 12 | drink.py | 12 | 0.609591 | 0.607815 | 0 | 57 | 47.280702 | 274 |
MarshallPramukh/TableauWorkshop | 13,357,348,327,154 | 89162dcbcb26216d40cbb74feda967722c2cfd70 | 40e49e9be32a1a8424440d038dc3270b06739885 | /GUI/SixthProj.py | 79282c82d61d361fcf3035975d6777a298e40fef | []
| no_license | https://github.com/MarshallPramukh/TableauWorkshop | 70244bef4b9aaefb925917af67f00779d68e5653 | bf96db755e175c317ea4e091b8e6e91c74b8d99f | refs/heads/master | 2020-04-29T07:23:03.806643 | 2019-03-16T09:36:34 | 2019-03-16T09:36:34 | 175,951,073 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from tkinter import *
class myBut:
def __init__(self,master):
frame=Frame(master)
frame.pack()
self.printBut = Button(frame,text="Print",command=self.printMsg)
self.printBut.pack(side=LEFT)
self.quitBut = Button(frame,text="Quit",command=frame.quit)
self.quitBut.pack(side=LEFT)
def printMsg(self):
print("This thing works !!!")
root=Tk()
b=myBut(root)
root.mainloop()
| UTF-8 | Python | false | false | 437 | py | 12 | SixthProj.py | 11 | 0.629291 | 0.629291 | 0 | 18 | 23.277778 | 72 |
mpatlasov/antlir | 7,413,113,565,428 | 77df95b36cc0f1cdb3f20fd29dd5ec01ff3d1f16 | 166a84dd531cfcee1b0cb9321605661316faf435 | /antlir/bzl/tests/shapes/test_shape_bzl.py | 201d1203baff7d3a136ec7964e0fd8f2d85e8b50 | [
"MIT"
]
| permissive | https://github.com/mpatlasov/antlir | 5e1848a32ddc168f43b8aa0575e50c8a9ba36fc0 | 3bdd4731c60e5ee7397f1d2eb3822171c03540a6 | refs/heads/main | 2023-01-03T05:17:09.442210 | 2020-10-28T00:28:51 | 2020-10-28T00:30:28 | 307,865,454 | 0 | 0 | null | true | 2020-10-28T00:43:33 | 2020-10-28T00:43:33 | 2020-10-28T00:30:40 | 2020-10-28T00:30:38 | 15,394 | 0 | 0 | 0 | null | false | false | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import unittest
from .shape_bzl import Fail, _check_type, _codegen_shape, shape, struct
class TestShapeBzl(unittest.TestCase):
def setUp(self):
self.maxDiff = None
def test_check_type(self):
def check_type(x, t):
res = _check_type(x, t)
assert res is None, res
for x, t in (
(2, int),
(False, bool),
("hello", str),
("hello", shape.field(str)),
("hello", shape.field(str, optional=True)),
(None, shape.field(str, optional=True)),
({"a": "b"}, shape.dict(str, str)),
("/hello/world", shape.path()),
):
with self.subTest(x=x, t=t):
check_type(x, t)
for x, t in (
(2, bool),
("hello", int),
(True, shape.field(str)),
("hello", shape.field(int, optional=True)),
({"a": 1}, shape.dict(str, str)),
({1: "b"}, shape.dict(str, str)),
("nope", shape.dict(str, str)),
("nope", shape.list(str)),
("nope", shape.tuple(str)),
(1, shape.path()),
):
with self.subTest(x=x, t=t):
with self.assertRaises(AssertionError):
check_type(x, t)
def test_shape_with_defaults(self):
t = shape.shape(answer=shape.field(int, default=42))
self.assertEqual(shape.new(t), struct(answer=42))
self.assertEqual(shape.new(t, answer=3), struct(answer=3))
def test_simple_shape(self):
t = shape.shape(answer=int)
for answer in ("hello", True, {"a": "b"}):
with self.subTest(answer=answer):
with self.assertRaises(Fail):
shape.new(t, answer=answer)
self.assertEqual(shape.new(t, answer=42), struct(answer=42))
def test_nested_simple_shape(self):
t = shape.shape(nested=shape.shape(answer=int))
for answer in ("hello", True, {"a": "b"}):
with self.subTest(answer=answer):
with self.assertRaises(Fail):
shape.new(t, nested=shape.new(t.nested, answer=answer))
self.assertEqual(
shape.new(t, nested=shape.new(t.nested, answer=42)),
struct(nested=struct(answer=42)),
)
def test_simple_list(self):
t = shape.shape(lst=shape.list(int))
self.assertEqual(shape.new(t, lst=[1, 2, 3]), struct(lst=[1, 2, 3]))
with self.assertRaises(Fail):
shape.new(t, lst=[1, 2, "3"])
def test_simple_dict(self):
t = shape.shape(dct=shape.dict(str, int))
self.assertEqual(
shape.new(t, dct={"a": 1, "b": 2}), struct(dct={"a": 1, "b": 2})
)
with self.assertRaises(Fail):
shape.new(t, dct={"a": "b"})
def test_simple_tuple(self):
t = shape.shape(tup=shape.tuple(str, int))
self.assertEqual(
shape.new(t, tup=("hello", 1)), struct(tup=("hello", 1))
)
with self.assertRaises(Fail):
shape.new(t, tup=("hello", "2"))
def test_nested_list(self):
t = shape.shape(lst=shape.list(shape.shape(answer=int)))
self.assertEqual(
shape.new(t, lst=[shape.new(t.lst.item_type, answer=42)]),
struct(lst=[struct(answer=42)]),
)
def test_nested_dict(self):
t = shape.shape(dct=shape.dict(str, shape.shape(answer=int)))
self.assertEqual(
shape.new(t, dct={"a": shape.new(t.dct.item_type[1], answer=42)}),
struct(dct={"a": struct(answer=42)}),
)
def test_nested_collection_with_shape(self):
bottom = shape.shape(answer=int)
t = shape.shape(dct=shape.dict(str, shape.list(bottom)))
self.assertEqual(
shape.new(t, dct={"a": [shape.new(bottom, answer=42)]}),
struct(dct={"a": [struct(answer=42)]}),
)
def test_codegen(self):
# the generated code is tested in test_shape.py, but this is our
# opportunity to test it as text
nested = shape.shape(inner=bool)
t = shape.shape(
hello=str,
world=shape.field(str, optional=True),
answer=shape.field(int, default=42),
file=shape.path(),
nested=shape.field(nested, default=shape.new(nested, inner=True)),
dct=shape.dict(str, str),
lst=shape.list(int),
tup=shape.tuple(bool, int, str),
nested_lst=shape.list(shape.shape(inner_lst=bool)),
nested_dct=shape.dict(str, shape.shape(inner_dct=bool)),
dct_of_lst_of_shape=shape.dict(
str, shape.list(shape.shape(answer=int))
),
)
code = "\n".join(_codegen_shape(t, "shape"))
self.assertEqual(
code,
"""class shape(Shape):
__GENERATED_SHAPE__ = True
hello: str
world: Optional[str]
answer: int = 42
file: Path
class _2UNYP6wnsQdfqkEJEKDmwaEjpoGm8_8tlX3BIHNt_sQ(Shape):
__GENERATED_SHAPE__ = True
inner: bool
nested: _2UNYP6wnsQdfqkEJEKDmwaEjpoGm8_8tlX3BIHNt_sQ = _2UNYP6wnsQdfqkEJEKDmwaEjpoGm8_8tlX3BIHNt_sQ(**{'inner': True})
dct: Mapping[str, str]
lst: Tuple[int, ...]
tup: Tuple[bool, int, str]
class _NRjZd_W5gdohVquSVb4iz3YwOUh_dtUKmLgIHb4h_m0(Shape):
__GENERATED_SHAPE__ = True
inner_lst: bool
nested_lst: Tuple[_NRjZd_W5gdohVquSVb4iz3YwOUh_dtUKmLgIHb4h_m0, ...]
class _ZOuD9rKDIF_qItVd5ib0hWFXRe4UKS1dPdfwP_rEGl0(Shape):
__GENERATED_SHAPE__ = True
inner_dct: bool
nested_dct: Mapping[str, _ZOuD9rKDIF_qItVd5ib0hWFXRe4UKS1dPdfwP_rEGl0]
class __wWKYeDaABhdYr5uCMdTzSclY0GG2FUB0OvzGPn42OE(Shape):
__GENERATED_SHAPE__ = True
answer: int
dct_of_lst_of_shape: Mapping[str, Tuple[__wWKYeDaABhdYr5uCMdTzSclY0GG2FUB0OvzGPn42OE, ...]]""",
)
| UTF-8 | Python | false | false | 6,070 | py | 32 | test_shape_bzl.py | 12 | 0.563097 | 0.546293 | 0 | 165 | 35.787879 | 120 |
weirdname404/async-console-game | 17,463,337,028,076 | cf0f3d5371bbf9d9f8ee23a50e0cebf089c50f5a | 5a778ea59c529b22719339c826585281e02bfa8e | /utils/curses_tools.py | 7b6fe7900a3f3fb66126910cf2545ce136178b80 | []
| no_license | https://github.com/weirdname404/async-console-game | f1bf981b77b834fdd3ea27d263e946e20ea1efb7 | 4e0325040cabf6681823b6b93a24757d375b3172 | refs/heads/master | 2023-01-15T20:38:31.967069 | 2020-11-17T20:23:28 | 2020-11-17T20:23:28 | 266,588,302 | 1 | 0 | null | false | 2020-11-17T20:23:30 | 2020-05-24T17:12:50 | 2020-10-21T16:42:22 | 2020-11-17T20:23:29 | 52 | 1 | 0 | 0 | Python | false | false | import random
import config
from core.types import Coordinate, Size
from typing import Iterator
SPACE_KEY_CODE = 32
LEFT_KEY_CODE = 260
RIGHT_KEY_CODE = 261
UP_KEY_CODE = 259
DOWN_KEY_CODE = 258
def read_controls(canvas):
"""Read keys pressed and returns tuple witl controls state."""
rows_direction = columns_direction = 0
space_pressed = False
while True:
pressed_key_code = canvas.getch()
if pressed_key_code == -1:
# https://docs.python.org/3/library/curses.html#curses.window.getch
break
if pressed_key_code == UP_KEY_CODE:
rows_direction = -1
if pressed_key_code == DOWN_KEY_CODE:
rows_direction = 1
if pressed_key_code == RIGHT_KEY_CODE:
columns_direction = 1
if pressed_key_code == LEFT_KEY_CODE:
columns_direction = -1
if pressed_key_code == SPACE_KEY_CODE:
space_pressed = True
break
return rows_direction, columns_direction, space_pressed
def draw_frame(canvas,
start_x: int,
start_y: int,
text: str,
negative: bool = False):
"""
Draws multiline text fragment on canvas,
erases text instead of drawing if negative=True is specified.
"""
height, width = canvas.getmaxyx()
for y, line in enumerate(text.splitlines(), round(start_y)):
if y < 0:
continue
if y >= height:
break
for x, symbol in enumerate(line, round(start_x)):
if x < 0:
continue
if x >= width:
break
if symbol == ' ':
continue
# Check that position is not in a lower right corner of the window
# Curses will raise exception in that case. Don`t ask why…
# https://docs.python.org/3/library/curses.html#curses.window.addch
if y == height - 1 and x == width - 1:
continue
symbol = symbol if not negative else ' '
canvas.addch(y, x, symbol)
def clean_draw(canvas, prev_xy, xy, prev_frame, frame, draw=True):
'''deletes prev frame and draws new one'''
x0, y0 = prev_xy
x1, y1 = xy
if prev_frame:
draw_frame(canvas, x0, y0, prev_frame, negative=True)
if draw:
draw_frame(canvas, x1, y1, frame)
def get_frame_size(text: str) -> Size:
"""
Calculates size of multiline text fragment,
returns pair — number of rows and colums.
"""
lines = text.splitlines()
height = len(lines)
width = max([len(line) for line in lines])
return width, height
def get_random_coordinate(max_x: int,
max_y: int,
density: float = None) -> Iterator[Coordinate]:
used_xy = set()
density = config.STAR_DENSITY if density is None else density
for _ in range(int(max_x * max_y * density)):
x_y = random.randint(1, max_x - 2), random.randint(1, max_y - 2)
if x_y in used_xy:
continue
used_xy.add(x_y)
yield x_y
| UTF-8 | Python | false | false | 3,136 | py | 18 | curses_tools.py | 17 | 0.563538 | 0.551405 | 0 | 120 | 25.1 | 79 |
lfparis/forge-python-wrapper | 18,811,956,791,151 | 17280df702ee62a9d502be7227fa2b400f9b413b | 6111b5292672efae5b7c8940f8c4721fc74028ff | /forge/extra/decorators.py | fbd2a1bcf6ae9a6c8c548bc6a5b0bfc96c06e360 | [
"MIT"
]
| permissive | https://github.com/lfparis/forge-python-wrapper | 22d43ce37e7a1fc0a2141a9d71e1d119cab4d680 | 63ee20a1c1b23676201a23f4fca31c1b30e183b1 | refs/heads/master | 2023-09-04T23:04:33.654977 | 2021-11-23T15:22:36 | 2021-11-23T15:22:36 | 240,263,540 | 14 | 7 | MIT | false | 2021-11-23T15:22:37 | 2020-02-13T13:12:30 | 2021-11-23T15:18:22 | 2021-11-23T15:22:36 | 53,044 | 13 | 4 | 2 | Python | false | false | # -*- coding: utf-8 -*-
"""Validation Decorators"""
from __future__ import absolute_import
from datetime import datetime
from functools import wraps
def _async_validate_token(func):
"""DM & HQ"""
@wraps(func)
async def inner(self, *args, **kwargs):
now = datetime.now()
timedelta = int((now - self.app.auth.timestamp).total_seconds()) + 1
if timedelta >= int(self.app.auth.expires_in):
self.app.auth.timestamp = now
self.app.auth.refresh()
self.app._session.headers = self.app.auth.header
return await func(self, *args, **kwargs)
return inner
| UTF-8 | Python | false | false | 634 | py | 31 | decorators.py | 25 | 0.615142 | 0.611987 | 0 | 24 | 25.416667 | 76 |
wdwoodee/pyTest | 15,642,270,936,849 | f61df487edd3f04854bd4701842274c5b26907eb | ca8d6b001fb3a2550bba689a97e96194df644abb | /test/make file.py | 1fc6f911861cf6d02c6c5aa102353ce49bf9fc21 | []
| no_license | https://github.com/wdwoodee/pyTest | 3681d60154daeaa96d38a233052b1cda3ec6829e | a7229a9ca97d6346e387ddae5a5b03477df1c0ef | refs/heads/master | 2016-04-13T01:46:57.604493 | 2016-03-03T02:46:17 | 2016-03-03T02:46:17 | 45,385,382 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import os
path='c:/'
for i in range(1,11):
path=os.path.join(path,str(i))
os.mkdir(path)
#ceate file in file
| UTF-8 | Python | false | false | 119 | py | 192 | make file.py | 124 | 0.630252 | 0.605042 | 0 | 8 | 13.875 | 34 |
OFRIN/PyTorch_Tutorials | 14,139,032,349,566 | 9d6b394d603456f4e31377c9fa9efc3f964b2045 | 5f5abb30d86254a81f55c7837c3ea3fef84a21c9 | /2. DeepLearning Tutorials/2.1. Dataset/2.1.2. DataAugmentation/main.py | 7df9cae664f4aaac84137f08c34dd62beae910e5 | []
| no_license | https://github.com/OFRIN/PyTorch_Tutorials | 01f38342801a9617c3a544c68bcca36b2cd0dffd | e050a416ce408203c546042f032ce554baa72ce2 | refs/heads/master | 2023-01-09T01:27:44.141900 | 2020-11-19T09:34:44 | 2020-11-19T09:34:44 | 296,813,833 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import os
import cv2
import glob
# from opencv_transforms import transforms
import numpy as np
from PIL import Image
import torchvision
from torchvision import datasets, models, transforms
random_crop_fn = transforms.Compose([
Image.fromarray,
transforms.RandomResizedCrop(224),
np.asarray
])
random_hflip_fn = transforms.Compose([
Image.fromarray,
transforms.RandomHorizontalFlip(),
np.asarray
])
root_dir = '../../Toy_Dataset/'
for class_name in os.listdir(root_dir):
for image_path in glob.glob(root_dir + class_name + '/*'):
image = cv2.imread(image_path)
crop_image = random_crop_fn(image)
hflip_image = random_hflip_fn(image)
cv2.imshow('original', image)
cv2.imshow('crop', crop_image)
cv2.imshow('flip', hflip_image)
cv2.waitKey(0)
| UTF-8 | Python | false | false | 850 | py | 61 | main.py | 50 | 0.662353 | 0.650588 | 0 | 38 | 21.368421 | 62 |
aag147/rcnn | 8,607,114,483,659 | 383ec2d1f17cf69e91c76b67265a0f9829cebcf8 | eb1e491378eb5bb68461a38b38a999e4c4495dbe | /detection/scripts/save_hoi_input_recall.py | 239e96188b5b8d4aeb2988fa5fe67d1cda5582da | []
| no_license | https://github.com/aag147/rcnn | 78b5664124d8821574d825b1f27cddc1f40532cb | 8f98b33b11e9c5dd8e1bc1923510c15a8de52354 | refs/heads/master | 2020-03-08T16:25:39.313800 | 2018-08-05T18:34:41 | 2018-08-05T18:34:41 | 128,239,222 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Thu May 17 13:48:23 2018
@author: aag14
"""
import sys
sys.path.append('../../../')
sys.path.append('../../')
sys.path.append('../../shared/')
sys.path.append('../models/')
sys.path.append('../filters/')
sys.path.append('../data/')
import extract_data
from rpn_generators import DataGenerator
import filters_helper as helper,\
filters_rpn,\
filters_hoi
import hoi_test
import numpy as np
import utils
import cv2 as cv
if True:
# Load data
data = extract_data.object_data(False)
cfg = data.cfg
obj_mapping = data.class_mapping
hoi_mapping = data.hoi_labels
# Create batch generators
# genTrain = DataGenerator(imagesMeta = data.trainGTMeta, cfg=cfg, data_type='train', do_meta=True)
genTest = DataGenerator(imagesMeta = data.valGTMeta, cfg=cfg, data_type='test', do_meta=True)
generator = genTest
print(cfg.my_input_path+'testnew/')
sys.stdout.flush()
#if True:
nb_total = np.zeros(cfg.nb_hoi_classes)
nb_tp = np.zeros(cfg.nb_hoi_classes)
nb_empty = 0
nb_total_preds = 0
for j, (imageID, imageMeta) in enumerate(generator.imagesMeta.items()):
# imageID = 'HICO_train2015_00028567'
# imageMeta = generator.imagesMeta[imageID]
if (j+1) % 100 == 0:
utils.update_progress_new((j+1), generator.nb_images, imageID)
img = cv.imread(generator.images_path + imageMeta['imageName'])
X, imageDims = filters_rpn.prepareInputs(imageMeta, generator.images_path, cfg)
objs = imageMeta['objects']
gt_rels = imageMeta['rels']
gtbboxes = helper._transformGTBBox(objs, obj_mapping, None, scale=imageDims['scale'], rpn_stride=cfg.rpn_stride, dosplit=False)
checks = np.zeros(len(gt_rels))
if np.max(gtbboxes[:,2]) > 2+imageDims['output_shape'][1] or np.max(gtbboxes[:,3]) > 2+imageDims['output_shape'][0]:
print('bad bbs', imageID, np.max(gtbboxes[:,2]), np.max(gtbboxes[:,3]), imageDims['output_shape'])
imageInputs = utils.load_obj(cfg.my_input_path + 'testnew/' + imageID)
if imageInputs is None:
idxs = []
nb_empty += 1
else:
idxs = np.where(np.array(imageInputs['val_map'])==3)[0]
hbboxes = np.array(imageInputs['hbboxes'])[idxs,:] / 1000.0
obboxes = np.array(imageInputs['o_bboxes'])[idxs,:] / 1000.0
labels = np.array(imageInputs['hoi_labels'])[idxs]
nb_preds = len(idxs)
for i in range(nb_preds):
hbbox = np.copy(hbboxes[i,:4])
hbbox[2] += hbbox[0]
hbbox[3] += hbbox[1]
obbox = np.copy(obboxes[i,:4])
obbox[2] += obbox[0]
obbox[3] += obbox[1]
label = labels[i]
nb_total_preds += 1
gth_ols = helper._computeIoUs(hbbox, gtbboxes)
gto_ols = helper._computeIoUs(obbox, gtbboxes)
# print(gth_ols)
# print(gto_ols)
for gtidx, rel in enumerate(gt_rels):
if checks[gtidx]:
continue
if gth_ols[rel[0]] >= 0.5 and gto_ols[rel[1]] >= 0.5 and rel[2] in label:
checks[gtidx] = 1
nb_tp[rel[2]] += 1
# print(checks, label, imageID)
for rel in gt_rels:
nb_total[rel[2]] += 1
continue
import draw
img = np.copy(X[0])
img += cfg.PIXEL_MEANS
img = img.astype(np.uint8)
draw.drawGTBoxes(img, imageMeta, imageDims)
Y_tmp = filters_hoi.loadData(imageInputs, imageDims, cfg)
hbboxes, obboxes, target_labels, val_map = Y_tmp
# hbboxes, obboxes, target_labels, val_map = filters_hoi.reduceTargets(Y_tmp, cfg)
patterns = filters_hoi.createInteractionPatterns(hbboxes, obboxes, cfg)
draw.drawPositiveHoI(img, hbboxes[0], obboxes[0], None, target_labels[0], imageMeta, imageDims, cfg, obj_mapping)
hcrops, ocrops = filters_hoi.convertBB2Crop(X, hbboxes, obboxes, imageDims)
# draw.drawPositiveCropHoI(hbboxes[0], obboxes[0], hcrops, ocrops, patterns[0], target_labels[0], imageMeta, imageDims, cfg, obj_mapping)
break
if j == 5:
break
res = np.array([nb_tp[i] / nb_total[i] if nb_tp[i]>0 else 0 for i in range(cfg.nb_hoi_classes)])
rare_idxs = np.where(nb_total<10)[0]
unrare_idxs = np.where(nb_total>=10)[0]
print('all', np.mean(res))
print('rare', np.mean(res[rare_idxs]))
print('unrare', np.mean(res[unrare_idxs]))
print('nulls', nb_empty) | UTF-8 | Python | false | false | 4,782 | py | 74 | save_hoi_input_recall.py | 73 | 0.5688 | 0.548097 | 0 | 136 | 34.169118 | 144 |
CoutinhoThiago/Linguagens-Forais-e-Automatos | 4,355,096,841,253 | d87b80c9bd40408492cfb529ea5f19ddf22b0695 | 303403b9b6511a907bf43af0c9eee3bf228a45e6 | /Sistemas Multimida/Atividades/Hello World! (Jude 2.0)/Hello World! (Jude 2.0).py | 388710b4c7547b9ff9546520728c26d0fe679519 | []
| no_license | https://github.com/CoutinhoThiago/Linguagens-Forais-e-Automatos | c0a05603eb188e4566f3a9791ad579a2b66e30e3 | b24a475edcbfcb4af31f53d763c82434ae0bc824 | refs/heads/main | 2023-08-15T01:51:24.302363 | 2023-07-11T21:33:30 | 2023-07-11T21:33:30 | 415,649,185 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | print("Hello World!")
print("Hello Jude 2.0!") | UTF-8 | Python | false | false | 46 | py | 241 | Hello World! (Jude 2.0).py | 189 | 0.673913 | 0.630435 | 0 | 2 | 22.5 | 24 |
ruthraprabhu89/Hiku-tester | 5,162,550,717,450 | 6e9ae94839a7efc0d8ccbeef85970a63ab84916e | c379fedab65fde80d1032c13145248b10ce29040 | /hiker.py | 8c8f244551d45fb1e516079ced2c1df8c0971d3b | []
| no_license | https://github.com/ruthraprabhu89/Hiku-tester | 3b6030632f75a21aa28d9072f30bd369470a52e2 | 87cda8141f14be79a2abcfe9f0de491348d98463 | refs/heads/master | 2022-10-07T00:07:08.300839 | 2020-05-31T23:24:57 | 2020-05-31T23:24:57 | 268,381,602 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import re
class Hiker:
def haiku(self,line):
if(len(line)>202):
raise ValueError('Haiku cannot be longer than 200 characters.invalid input line')
if(any(x.isupper()for x in line)):
raise ValueError('Haiku can only contain lower case letters. input contains Upper Case letters: invalid input line')
if(any(x.isdigit()for x in line)):
raise ValueError('Numbers are not allowed in a Haiku: invalid input line')
string_check= re.compile('[@_!#$%^&*()<>?\|}{~:]')
if string_check.search(line) is not None:
raise ValueError('No special characters: invalid input line')
lines = line.split('/')
if not len(lines) == 3:
raise ValueError('Haiku must only contain three lines. No of lines found:{}'.format(len(lines)))
syl = [0,0,0]
for i in range(len(lines)):
words = lines[i].split()
for j in range(len(words)):
is_vow = False
cur_word = words[j]
for k in range(len(cur_word)):
letter = cur_word[k]
if letter in ('a','e','i','o','u','y'):
if not is_vow == True:
syl[i] += 1
is_vow = True
else:
is_vow = False
if (syl[i] == 0):
syl[i]=len(words)
if (syl[0] == 5 and syl[1] == 7 and syl[2] == 5):
result = 'Yes'
else:
result = 'No'
print("{}/{}/{},{}".format(syl[0],syl[1],syl[2],result))
return syl[0],syl[1],syl[2],result
if __name__ == "__main__":
filepath = 'test.txt'
try:
file = open(filepath)
with open(filepath) as fp:
for cnt,line in enumerate(fp):
haiku(cnt,line)
except FileNotFoundError:
pass | UTF-8 | Python | false | false | 1,581 | py | 5 | hiker.py | 4 | 0.590765 | 0.575585 | 0 | 62 | 24.516129 | 119 |
cagdasaktay/PyQt5-Tutorials | 5,480,378,311,584 | 52396cc0d451bd19a6955e2fee0670e299ac9c42 | e287aa1088a611c19884ea92a54f4f260a5442db | /QCheckBox.py | 49bde6cd5477a81f5f3623d4d5b6babb2e6b2dec | []
| no_license | https://github.com/cagdasaktay/PyQt5-Tutorials | a130fec3711a8c9d41ac1c776cf9a74fafb13daf | baf61f98fc99d1e3fa77599010fa27040e1994d5 | refs/heads/master | 2023-07-17T08:15:26.049739 | 2021-09-08T13:01:07 | 2021-09-08T13:01:07 | 404,348,838 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/env python
import sys
from PyQt5 import QtWidgets
def arayuz():
object = QtWidgets.QApplication(sys.argv)
window = QtWidgets.QWidget()
window.setWindowTitle("Arayüz") # Arayüzü isimlendirme.
window.setGeometry(300,300,600,480) # Arayüzün konumu ve çözünürlüğünü belirleme.
kontrol1 = QtWidgets.QCheckBox(window) # Butonu ekle.
kontrol2 = QtWidgets.QCheckBox(window)
kontrol3 = QtWidgets.QCheckBox(window)
kontrol1.setText("Seçenek 1") # Butonu isimlendir
kontrol2.setText("Seçenek 2")
kontrol3.setText("Seçenek 3")
kontrol1.move(100,50) # Butonu taşı
kontrol2.move(100,70)
kontrol3.move(100,90)
kontrol3.setCheckable(False) # Butona tıklamayı engelle.
window.show() # Arayüzü göster.
sys.exit(object.exec_())
arayuz()
| UTF-8 | Python | false | false | 843 | py | 11 | QCheckBox.py | 11 | 0.695122 | 0.645122 | 0 | 35 | 22.428571 | 85 |
alisiahkoohi/survae_flows | 3,521,873,204,127 | 6b9d9afa749ca7f55ad7eafd6d345f695afe13dc | c8a9d66efb29a9a5d5a42b74c252eeb91f20e2f8 | /survae/tests/transforms/bijections/conditional/elementwise_linear.py | 40e759d8647fe0339cbbf16adc41c8de1563eddc | [
"MIT"
]
| permissive | https://github.com/alisiahkoohi/survae_flows | 60ebfa86506f4139ce1fd9508e9c1e0dbdd1bdff | e1747b05524c7ab540a211ed360ab3e67bc3e96d | refs/heads/master | 2022-12-19T15:18:42.587276 | 2020-09-17T12:34:51 | 2020-09-17T12:34:51 | 290,247,391 | 1 | 0 | MIT | true | 2020-08-25T15:08:02 | 2020-08-25T15:08:01 | 2020-08-25T14:00:53 | 2020-08-25T14:00:50 | 4,186 | 0 | 0 | 0 | null | false | false | import numpy as np
import torch
import torch.nn as nn
import torchtestcase
import unittest
from survae.transforms import ConditionalAdditiveBijection, ConditionalAffineBijection
from survae.tests.transforms.bijections.conditional import ConditionalBijectionTest
class ConditionalAdditiveBijectionTest(ConditionalBijectionTest):
def test_bijection_is_well_behaved(self):
batch_size = 10
shape = (6,)
x = torch.randn(batch_size, 6)
context = torch.randn(batch_size, 3)
self.eps = 1e-6
bijection = ConditionalAdditiveBijection(nn.Linear(3,6))
self.assert_bijection_is_well_behaved(bijection, x, context, z_shape=(batch_size, *shape))
class ConditionalAffineBijectionTest(ConditionalBijectionTest):
def test_bijection_is_well_behaved(self):
batch_size = 10
shape = (6,)
x = torch.randn(batch_size, 6)
context = torch.randn(batch_size, 3)
self.eps = 1e-6
bijection = ConditionalAffineBijection(nn.Linear(3,6*2))
self.assert_bijection_is_well_behaved(bijection, x, context, z_shape=(batch_size, *shape))
if __name__ == '__main__':
unittest.main()
| UTF-8 | Python | false | false | 1,177 | py | 245 | elementwise_linear.py | 238 | 0.697536 | 0.681393 | 0 | 39 | 29.179487 | 98 |
pawbednarz/hash_tools | 15,556,371,576,739 | ee8b2189e9c4f3cd8c6d7409094bdc8de0b16b56 | 1a796c8579d486fc25f2fb1c6afce1dd6d291242 | /crack_hash.py | 05d08389497c924df798667dda58c2e924d9e35a | []
| no_license | https://github.com/pawbednarz/hash_tools | d6bfad59d5349fb1ec11853a19f281591e3cfdf0 | 5faad543b6acadf165dd531a7a4690b8e6971004 | refs/heads/master | 2020-12-12T17:06:34.989303 | 2020-07-11T19:58:44 | 2020-07-11T19:58:44 | 234,180,904 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/python3
import hashlib
import argparse
from functools import partial
# add command line arguments i.e. brutemd5 --md5 /path/to/file <hash_value>
# refactor entire script to better code
def md5(word):
hashMD5 = hashlib.md5()
hashMD5.update(word.encode())
return hashMD5.hexdigest()
def sha1(word):
hashSHA1 = hashlib.sha1()
hashSHA1.update(word.encode())
return hashSHA1.hexdigest()
def sha224(word):
hashSHA224 = hashlib.sha224()
hashSHA224.update(word.encode())
return hashSHA224.hexdigest()
def sha256(word):
hashSHA256 = hashlib.sha256()
hashSHA256.update(word.encode())
return hashSHA256.hexdigest()
def sha512(word):
hashSHA512 = hashlib.sha512()
hashSHA512.update(word.encode())
return hashSHA512.hexdigest()
def hash_mode(hash_type):
switcher = {
'md5': md5,
'sha1': sha1,
'sha224': sha224,
'sha256': sha256,
'sha512': sha512
}
func = switcher.get(hash_type)
return func
# TODO better implementation of cli - make arguments like -host <host> -username <username>
# maybe its better to check hashes while reading the file than read, then iterate through it again
# define default set of files to search for hash?
def read_file(path_to_file):
print('Getting data from ' + path_to_file + '...')
try:
f = open(path_to_file, "r", encoding="ISO-8859-1")
except:
print('Couldn\'t open file ' + path_to_file)
quit()
words = []
for line in f:
words.append(line[:-1])
f.close()
return words
def init_args():
parser = argparse.ArgumentParser()
parser.add_argument("hash_type", help="type of hash to crack (md5, sha1, sha224, sha256, sha512", choices=['md5', 'sha1', 'sha224', 'sha256', 'sha512'])
parser.add_argument("hash_value", help="value of hash to crack")
parser.add_argument("-w", help="path to file with words to check (default - /usr/share/wordlists/rockyou.txt")
args = parser.parse_args()
return args
def main():
args = init_args()
hash_func = hash_mode(args.hash_type)
file_path = ''
if(args.w):
file_path = args.w
else:
file_path = '/usr/share/wordlists/rockyou.txt'
words = read_file(file_path)
i = 1
print('Checking words from wordlist...')
for word in words:
print(str(i) + '/' + str(len(words)), end = '\r')
if args.hash_value == hash_func(word):
print('\nFound solution for entered hash: ' + word)
exit(0)
i += 1
print('\nCouldn\'t find any matching word.')
main()
| UTF-8 | Python | false | false | 2,910 | py | 2 | crack_hash.py | 2 | 0.565979 | 0.527835 | 0 | 89 | 31.696629 | 160 |
baquinn4/pdorg | 15,822,659,524,787 | c61a1dcf2d5625246ed1c57d2c629276e7582943 | 5f3f8566bcc16a154da636002301b37f4dc94bd9 | /pdorg/src/pdorg2.py | 73b23c3ae16639ac837e99a255842a6b11fb51b4 | []
| no_license | https://github.com/baquinn4/pdorg | 58b70b587c87e5ffc6accd5ea4db43df5cfe49c5 | 9f8810d4a6cee74cc7b4f6d52ebfca67c8ae553c | refs/heads/master | 2020-05-31T19:29:36.847299 | 2019-06-05T23:31:02 | 2019-06-05T23:31:02 | 190,456,646 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import os
os.chdir("../../")
try:
os.mkdir("snapchat")
except FileExistsError:
pass
| UTF-8 | Python | false | false | 91 | py | 4 | pdorg2.py | 3 | 0.637363 | 0.637363 | 0 | 9 | 8.888889 | 23 |
bsafran/proglab | 11,510,512,357,032 | 54f113c811cbab61f73f17583cb9d6771f6d264e | 8ef766ce76a61e927f6a2073704b6ea349611020 | /week2_Sympy/180401038_Ders02.py | 892a300e2b461df0e588df86470f1b1b9a4c6534 | []
| no_license | https://github.com/bsafran/proglab | 622c0789880cb542dc0a6fc0a97cf14b8b0cc9b8 | 9dd5efd522e81796696c39b879f70817106edcb5 | refs/heads/master | 2021-03-08T00:23:25.760639 | 2020-05-15T20:08:11 | 2020-05-15T20:08:11 | 246,306,557 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import sympy
from sympy import Symbol
from sympy import pprint
import sympy as sym
import sympy.plotting as syp
import sympy as sy
import matplotlib.pyplot as plt
sigma = Symbol('sigma')
mu = Symbol('mu')
x = Symbol('x')
part_1 = 1/(sym.sqrt(2*sym.pi*sigma*sigma))
part_2 = sym.exp(-1*((x-mu)**2)/(2*sigma**2))
my_gauss_function = part_1*part_2
print(pprint(my_gauss_function))
print(syp.plot(my_gauss_function.subs({mu:1,sigma:3}), (x, -10, 10), title='gauss'))
x_values = []
y_values = []
for value in range(-50,50):
y = my_gauss_function.subs({mu:0,sigma:10,x:value}).evalf()
y_values.append(y)
x_values.append(value)
print(value,y)
plt.plot(x_values,y_values)
| UTF-8 | Python | false | false | 684 | py | 19 | 180401038_Ders02.py | 17 | 0.679825 | 0.646199 | 0 | 27 | 24.333333 | 84 |
mranish592/JOBFAIR | 13,984,413,564,688 | b9ef978c5f4dfccf85a21622b32ebb03d2e81b8a | 939ee8db49d61e212bec5da54660079d4c70eb3b | /jobfair_website/jobfair_app/admin.py | 0bd35e954edce368404cd43a8489f1a96aaa23cf | []
| no_license | https://github.com/mranish592/JOBFAIR | e11218fc52fd5b2c99729e70b4cc103592983ba8 | 81b0b25ab163aca850376401dbef89a6f0d1823a | refs/heads/main | 2023-04-13T23:07:17.802952 | 2021-04-27T13:52:19 | 2021-04-27T13:52:19 | 360,259,871 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null | from django.contrib import admin
from jobfair_app.models import Project, UserProfileInfo
# Register your models here.
admin.site.register(Project)
admin.site.register(UserProfileInfo)
| UTF-8 | Python | false | false | 185 | py | 26 | admin.py | 10 | 0.832432 | 0.832432 | 0 | 6 | 29.833333 | 55 |
nicolausboby/Data-Stream-and-Graph-Mining | 17,841,294,155,825 | b6070bb1f891da343815ab267d852f0148a7823a | 7cd80e84362f760bfc88f795ef6bfea9b71bf891 | /twitter_stream.py | 67947ab0a02bcfc97d978dcde5936d3ba681a5f7 | []
| no_license | https://github.com/nicolausboby/Data-Stream-and-Graph-Mining | 2144d01c4a238ade3aee7df14dde62dd2d82d0aa | 1e79b289ca2a9913c15a108624f9ac39fff4b23f | refs/heads/master | 2020-04-10T04:43:53.826569 | 2018-12-16T02:10:15 | 2018-12-16T02:10:15 | 160,807,428 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import dotenv
import os
import json
import socket
import tweepy
from pathlib import Path
from signal import signal, SIGPIPE, SIG_DFL
from tweepy import OAuthHandler, Stream
from tweepy.streaming import StreamListener
env_path = Path('.') / '.env'
dotenv.load_dotenv(dotenv_path='./.env', verbose=True)
CONSUMER_KEY = os.getenv("OAUTH_KEY")
CONSUMER_SECRET = os.getenv("OAUTH_SECRET")
ACCESS_TOKEN = os.getenv("ACCESS_TOKEN")
ACCESS_SECRET = os.getenv("ACCESS_SECRET")
filter_words = ["trade war", "trade wars", "tradewar", "#tradewar", "tradewars", "#tradewars"]
class TwitterListener(StreamListener):
def __init__(self, conn):
self.conn = conn
def on_data(self, data):
try:
data = data.replace(r'\n', '')
json_data = json.loads(data)
if("text" in json_data):
username = json_data["user"]["screen_name"]
print((username, json.dumps(json_data["text"])))
self.conn.send(data.encode())
except:
print("No Tweet")
def on_error(self, status):
if status == 420:
print("Stream Disconnected")
return False
def sendTwitterData(conn):
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_TOKEN, ACCESS_SECRET)
api = tweepy.API(auth)
stream = tweepy.Stream(auth = api.auth, listener=TwitterListener(conn))
stream.filter(track=filter_words)
# stream.filter(track=["trade war", "trade wars", "tradewar", "#tradewar", "tradewars", "#tradewars"])
if __name__ == "__main__":
host = os.getenv("HOST")
port = int(os.getenv("PORT"))
s = socket.socket()
s.bind((host, port))
print("Listening on port:", str(port))
s.listen(5)
conn, addr = s.accept()
print("Received request from:", str(addr))
sendTwitterData(conn)
| UTF-8 | Python | false | false | 1,865 | py | 6 | twitter_stream.py | 2 | 0.629491 | 0.627346 | 0 | 63 | 28.603175 | 106 |
bertocarl/Gjenge-System | 1,640,677,550,672 | b77371cc52da51bb5882a1c10457046f7e6dcd9d | b30b54611b93ed3e0f7e41eb44183ced5e2a2720 | /backend/backend/settings/prod.py | e5b2695676546a660a48ce70aac40ce399221a6c | [
"MIT"
]
| permissive | https://github.com/bertocarl/Gjenge-System | cbd42e6cbdd1b702eaa34a4b8521453da3743657 | b5ead6085e5c698b8826704cd551306dc8f5249e | refs/heads/master | 2023-01-08T08:33:32.570637 | 2019-06-13T21:53:40 | 2019-06-13T21:53:40 | 188,376,302 | 0 | 0 | NOASSERTION | false | 2023-01-04T00:39:02 | 2019-05-24T07:41:09 | 2019-06-13T22:12:04 | 2023-01-04T00:39:01 | 6,581 | 0 | 0 | 34 | Python | false | false | from .local import * # noqa
STATIC_ROOT = '/home/berto/backend_static'
| UTF-8 | Python | false | false | 73 | py | 40 | prod.py | 31 | 0.69863 | 0.69863 | 0 | 3 | 23.333333 | 42 |
jbaillie80/code | 11,905,649,369,144 | 83199fe5e4f3598288f29a0d9a5e4d7470641e36 | 33b71af7cd0918997445586aee86c8007ab7d638 | /loops.py | 7a4421cb89b9444e63db15479ca0bc85622c1d1d | []
| no_license | https://github.com/jbaillie80/code | ada79ab0fe882f515a273373c99cba90c3e90a63 | 6245c9762a96dd342d5c16071e8b8775821e4fcd | refs/heads/master | 2020-12-28T11:56:58.436683 | 2020-03-16T01:06:30 | 2020-03-16T01:06:30 | 238,322,287 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | '''
For this assignment you should read the task, then below the task do what it asks you to do
based on what the task tells you do first.
EXAMPLE TASK:
'''
#EX) Declare a variable set to 3. Make a while loop that prints the variable
# you just created and decrements the variable by one each time through
# the loop. Meanwhile, make the loop run until the variable you created
# equals 0.
i = 3;
while i > 0:
print(i)
i -= 1
'''
END OF EXAMPLE
'''
'''
START HERE
'''
'''While Loops'''
#1) Declare a variable set to 4. Make a while loop that prints the variable
# you just created and decrements the variable by one each time through
# the loop. Meanwhile, make the loop run until the variable you created
# equals 1.
j = 4;
while j > 1:
print(j)
j -= 1
#2) Declare a variable set to 14. Make a while loop that prints the variable
# you just created and increments the variable by one each time through
# the loop. Meanwhile, make the loop run until the variable you created
# equals 20.
k = 14;
while k < 20:
print(k)
k += 1
#3) Declare a variable set to 55. Make a while loop that prints the variable
# you just created. Then make an if statement that makes the loop break when
# the variable is equal to 50.
l = 55;
while l > 50:
print(l)
l -= 1
'''For Loops'''
#4) Create a list named sports. Put three sports into the list. Create
# a for loop that prints each sport in the list
sports = ["lacrosse", "football", "soccer"]
for x in sports:
print(x)
if (x == "soccer"):
break
#5) Create a for loop that loops through each letter in a string of one of your
# favorite songs. Each iteration should print should a letter of the word.
song = ["T", "h", "e", " ", "F", "a", "l", "l" "."]
for y in song:
print(y)
if (y == "."):
break
#6) Create a list named movies. Put five of your favorite movies into the list.
# However, make sure one of the movies is Avatar.
# Create a for loop that iterates over the list. In the loop print the movie
# being looped over, but create an if statement that breaks out of the
# loop if it is Avatar.
movie = ["Lego Movie", "Spare Parts", "Indiana Jones", "Ready Player One", "Avatar"]
for z in movie:
print(z)
if (z == "Avatar"):
break | UTF-8 | Python | false | false | 2,314 | py | 3 | loops.py | 3 | 0.660328 | 0.645635 | 0 | 78 | 28.679487 | 91 |
daseinpwt/nao-slam-monitor | 472,446,410,214 | 8b30e504ec95bf1c7f73893712501c1adb56efba | b59757fad318a17c496289f1235fe82ddbd237e9 | /control.py | b198c33d04ae9f17f5f342580c622b4896a6373f | []
| no_license | https://github.com/daseinpwt/nao-slam-monitor | f7ffc740c21b7620b0070177c67e942dc2ee6f6f | 4a8edd19b041618d2de697c8a9af214d6d2c1269 | refs/heads/master | 2020-04-02T03:48:57.016255 | 2019-05-08T21:08:20 | 2019-05-08T21:08:20 | 153,985,855 | 2 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import matplotlib.backends.tkagg as tkagg
import matplotlib.image as mpimg
from matplotlib.lines import Line2D
import matplotlib.colors
import numpy as np
import thread
import time
from random import randint, random
import cv2
from naomanager import Nao
from logger import timestamp
im_odo = mpimg.imread('robot_odometry.png')
last_landmark = None
LEN_QUEUE = 1000
head_traj = 0
tail_traj = 0
q_traj = [None] * LEN_QUEUE
def refresh_canvas(canvas_pack):
ax, fig, fig_photo, fig_canvas_agg = canvas_pack
fig.canvas.draw()
tkagg.blit(fig_photo, fig_canvas_agg.get_renderer()._renderer, colormode=2)
def draw_robot():
ax = canvas_pack_traj[0]
if draw_robot.state_odo is not None:
draw_robot.state_odo[0].remove()
draw_robot.state_odo[1].remove()
draw_robot.state_odo = (
ax.imshow(im_odo, extent=[pos[0]-16, pos[0]+16, pos[1]-16, pos[1]+16]),
ax.arrow(pos[0], pos[1], 15*np.cos(pos[2]), 15*np.sin(pos[2]), head_width=10, head_length=10, edgecolor='red')
)
refresh_canvas(canvas_pack_traj)
draw_robot.state_odo = None
def draw_line(pnt1, pnt2):
ax = canvas_pack_traj[0]
l = Line2D([pnt1[0], pnt2[0]], [pnt1[1], pnt2[1]], color='#ff5050')
ax.add_line(l)
refresh_canvas(canvas_pack_traj)
def nop(event):
pass
def select_all_text(event):
c = event.widget.winfo_class()
if c == "Text":
event.widget.tag_add("sel", "1.0", "end")
elif c == "Entry":
event.widget.select_range(0, 'end')
def remove_focus(event):
event.widget.focus()
def move_forward(event):
logger.log('Move forward')
x = 0.5
nao.motion.setWalkTargetVelocity(x, 0, 0, frequency)
def move_backward(event):
logger.log('Move backward')
x = -0.5
nao.motion.setWalkTargetVelocity(x, 0, 0, frequency)
def turn_left(event):
logger.log('Turn left')
theta = 0.25
nao.motion.setWalkTargetVelocity(x, y, theta, 0.15)
def turn_right(event):
logger.log('Turn right')
theta = -0.25
nao.motion.setWalkTargetVelocity(x, y, theta, 0.15)
def standup(event):
logger.log("Standup")
nao.posture.post.goToPosture("StandInit", 0.5)
def rest(event):
logger.log("Going to rest")
nao.motion.post.rest()
def stop(event):
logger.log('Stop')
x = 0
nao.motion.setWalkTargetVelocity(x, 0, 0, frequency)
def recog_color(hsv):
h, s, v = hsv
h *= 360
color = None
if h < 100 and s > 0.3:
color = 'yellow'
if 190 < h and h < 230 and s > 0.3:
color = 'blue'
if 100 < h and h < 190 and s > 0.25:
color = 'green'
if color is None:
color = 'white'
return color
def calc_range(w, bearing):
if bearing > 32.0:
if w >= 15:
w -= 2
else:
w -= 1
elif bearing > 21:
w -= 1
A = -14.00487805
B = 439.2439024
return A * w + B
def calc_bearing(off, limit):
THETA_MAX = 60.97 # in degree
return THETA_MAX * off / limit
def display_realtime_img(image, time, new_pos):
global last_landmark
ax = canvas_pack_vstream[0]
image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
# Grab the x and y size and make a copy of the image
ysize = image.shape[0]
xsize = image.shape[1]
detect_image = np.copy(image)
# 320 x 240
left_bottom = [50, 239]
left_top = [60, 120]
right_bottom = [270, 239]
right_top = [260, 120]
# Perform a linear fit (y=Ax+B) to each of the three sides of the triangle
# np.polyfit returns the coefficients [A, B] of the fit
fit_left = np.polyfit((left_bottom[0], left_top[0]), (left_bottom[1], left_top[1]), 1)
fit_right = np.polyfit((right_bottom[0], right_top[0]), (right_bottom[1], right_top[1]), 1)
fit_bottom = np.polyfit((left_bottom[0], right_bottom[0]), (left_bottom[1], right_bottom[1]), 1)
fit_top = np.polyfit((left_top[0], right_top[0]), (left_top[1], right_top[1]), 1)
# Find the region inside the lines
XX, YY = np.meshgrid(np.arange(0, xsize), np.arange(0, ysize))
region_thresholds = (YY > (XX*fit_left[0] + fit_left[1])) & \
(YY > (XX*fit_right[0] + fit_right[1])) & \
(YY < (XX*fit_bottom[0] + fit_bottom[1])) & \
(YY > (XX*fit_top[0] + fit_top[1]))
# Convert RGB to HSV
hsv_image = matplotlib.colors.rgb_to_hsv(image / 255.0)
pink_thresholds = (hsv_image[:,:,0]*360 >= 320) & (hsv_image[:,:,0]*360 <= 350) & \
(hsv_image[:,:,1] > 0.3) & \
(hsv_image[:,:,2] > 0.3) & (hsv_image[:,:,2] < 0.7)
YY, XX = (pink_thresholds & region_thresholds).nonzero()
points = np.array(zip(XX, YY))
dis_select = np.full((image.shape[0], image.shape[1]), False)
if len(points) >= 20:
mx, my = points.mean(axis=0)
def distance(x1, x2, y1, y2):
return ((x1-x2)**2 + (y1-y2)**2) ** 0.5
cnt = len(points)
m_dis = 0.0
for point in points:
m_dis += distance(point[0], mx, point[1], my) / cnt
for y in range(ysize):
for x in range(xsize):
dis_select[y][x] = distance(x, mx, y, my) < m_dis * 1.6
detect_region = pink_thresholds & region_thresholds & dis_select
YY, XX = detect_region.nonzero()
find_landmark = False
if len(YY) > 0: # Found landmark
find_landmark = True
pg_left, pg_right, pg_top, pg_bottom = min(XX), max(XX), min(YY), max(YY)
size = (pg_bottom - pg_top) * (pg_right - pg_left)
detect_border_x = [pg_left, pg_right, pg_right, pg_left, pg_left]
detect_border_y = [pg_bottom, pg_bottom, pg_top, pg_top, pg_bottom]
above_point = (pg_left + pg_right) / 2, pg_top - (pg_bottom - pg_top) / 2
below_point = (pg_left + pg_right) / 2, pg_bottom + (pg_bottom - pg_top) / 2
above_color = recog_color(hsv_image[above_point[1], above_point[0]]) if above_point[1] > 0 else 'none'
below_color = recog_color(hsv_image[below_point[1], below_point[0]]) if below_point[1] < ysize else 'none'
if above_color == 'blue':
landmark = 0
elif below_color == 'blue':
landmark = 1
elif above_color == 'green':
landmark = 2
elif below_color == 'green':
landmark = 3
elif above_color == 'yellow':
landmark = 4
elif below_color == 'yellow':
landmark = 5
else:
# can not recognize the sign
landmark = -1
w_pink = pg_right - pg_left
off_pink = (pg_left + pg_right) / 2 - xsize / 2
z_bearing = calc_bearing(off_pink, xsize / 2)
z_range = calc_range(w_pink, abs(z_bearing))
z_bearing = 90 - z_bearing
if w_pink < 8:
# the landmark is too far away, it is ignored
# to reduce the measurement error
landmark = -1
else:
landmark = -1
# print('no landmark detected.\n')
if landmark != -1:
pos_logger.log("[%s]%s,%s,%s,1,%s,%s,%s" % (time, new_pos[0], new_pos[1], new_pos[2], landmark, z_range, z_bearing))
else:
pos_logger.log("[%s]%s,%s,%s,0" % (time, new_pos[0], new_pos[1], new_pos[2]))
if landmark != last_landmark:
last_landmark = landmark
if landmark != -1:
logger.log("detected landmark %s" % landmark)
else:
logger.log("no landmark detectd.")
scan_border_x = [left_bottom[0], right_bottom[0], right_top[0], left_top[0], left_bottom[0]]
scan_border_y = [left_bottom[1], right_bottom[1], right_top[1], left_top[1], left_bottom[1]]
# refresh frame
if display_realtime_img.video_frame is not None:
for g in display_realtime_img.video_frame:
if g is not None:
g.remove()
display_realtime_img.video_frame = (
ax.imshow(detect_image),
ax.plot(detect_border_x, detect_border_y, '-', lw=2, color='black')[0] if find_landmark else None,
ax.plot(scan_border_x, scan_border_y, 'b--', lw=2)[0]
)
refresh_canvas(canvas_pack_vstream)
display_realtime_img.video_frame = None
def capture(time, new_pos, display=True):
img = None
result = nao.video_device.getImageRemote(nao.capture_device)
if result == None:
logger.log('cannot capture.')
elif result[6] == None:
logger.log('no image data string.')
else:
values = map(ord, list(result[6]))
img = np.reshape(values, (240, 320, 3)).astype('uint8')
if display:
thread.start_new_thread(display_realtime_img, (img, time, new_pos))
return img
def get_pose():
_pos = nao.motion.getRobotPosition(False)
return (_pos[0] * 100, _pos[1] * 100, _pos[2])
def update_traj():
global pos
global q_traj, head_traj, tail_traj
while True:
while head_traj != tail_traj:
head_traj = (head_traj + 1) % LEN_QUEUE
new_pos = q_traj[head_traj]
draw_line((new_pos[0], new_pos[1]), (pos[0], pos[1]))
pos = new_pos
draw_robot()
time.sleep(0) # yield
def position_fetcher():
global pos
global q_traj, head_traj, tail_traj
while True:
new_time = timestamp()
new_pos = get_pose()
capture(new_time, new_pos)
next_p = (tail_traj + 1) % LEN_QUEUE
q_traj[next_p] = new_pos
tail_traj = next_p
time.sleep(0.2)
def mark_point(event):
pos_logger.log("[%s]mark" % timestamp())
def elegant_exit(event):
print('elegant exit')
pos_logger.close()
nao.unsubscribe_camera()
mw.destroy()
def setup_window_control(
main_window,
_logger, _pos_logger,
_canvas_pack_traj,
_canvas_pack_vstream,
nao_ip, nao_port):
global mw
global logger, pos_logger
global canvas_pack_traj, canvas_pack_vstream
global nao, pos, x, y, theta, frequency, CommandFreq
canvas_pack_traj = _canvas_pack_traj
canvas_pack_vstream = _canvas_pack_vstream
mw = main_window
logger = _logger
pos_logger = _pos_logger
mw.bind("<Button-1>", remove_focus)
# Mac
mw.bind('<Command-a>', nop)
# Linux
# mw.bind('<Control-a>', nop)
mw.bind('w', move_forward)
mw.bind('s', move_backward)
mw.bind('a', turn_left)
mw.bind('d', turn_right)
mw.bind('q', rest)
mw.bind('e', standup)
mw.bind('x', stop)
mw.bind('m', mark_point)
mw.bind('<Control-q>', elegant_exit)
thread.start_new_thread(data_fetcher, ())
# ============ Connect to Robot ============
logger.log('connecting to the robot...')
nao = Nao(nao_ip, nao_port)
x = 0.0
y = 0.0
theta = 0.0
frequency = 0.3
CommandFreq = 0.5
nao.motion.wakeUp()
nao.posture.goToPosture("StandInit", 0.5)
time.sleep(1.0)
nao.motion.setStiffnesses("Head", 1.0)
names = ["HeadYaw", "HeadPitch"]
angles = [0.0, -0.12]
nao.motion.setAngles(names, angles, 0.2)
time.sleep(1.0)
logger.log('Ready to go!!!')
pos = get_pose()
logger.log("Init pos: %s" % str(pos))
draw_robot()
thread.start_new_thread(position_fetcher, ())
thread.start_new_thread(update_traj, ())
| UTF-8 | Python | false | false | 11,338 | py | 7 | control.py | 5 | 0.570118 | 0.538543 | 0 | 408 | 26.789216 | 124 |
xhuaustc/cloud-sdk | 19,250,043,450,830 | eaf6c18978ce2be4e693e8a6c7e83f69ae98023d | 99bd15b2fd230a51c7886cd430163d95a0630612 | /cloudsdk/models/__init__.py | 66978d7122da2324c44aef41f4a76aeef16997b2 | []
| no_license | https://github.com/xhuaustc/cloud-sdk | fb29ad4e8ccd94d90a7ef3e48acdfe9b048c762e | 68d0a02887294b413bcc871e7156bd4bcfb5d0c3 | refs/heads/master | 2020-04-16T17:21:42.878478 | 2019-04-09T06:48:49 | 2019-04-09T06:48:49 | 165,773,443 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # -*- coding: utf-8 -*-
"""
-------------------------------------------------
File Name: __init__.py
Description :
Author : 潘晓华
date: 2019/1/15
-------------------------------------------------
"""
# coding=utf8
"""
-------------------------------------------------
File Name: api_model
Description :
Author : 潘晓华
date: 2017/8/21
-------------------------------------------------
"""
from cloudsdk.api.api_cloud import ApiCloud
import math
from cloudsdk.config import ApiConfig
class ApiModel(object):
limit = 99
@classmethod
def get_all_by_page(cls, action='', set='', zone=None, params=None, page=0, per_page=99):
if None == zone:
zone = ApiConfig.api_default_zone
action_data = {
'action': action,
'limit': per_page,
'zone': zone,
'offset': page * per_page
}
if not None == params:
action_data = dict(action_data, **params)
result = ApiCloud.request(action_data=action_data)
if 'total_count' not in result:
return [], 0
total_count = result['total_count']
data = result[set]
return data, total_count
@classmethod
def get_all(cls, action='', set='', zone=None, params=None):
if None == zone:
zone = ApiConfig.api_default_zone
action_data = {
'action': action,
'limit': cls.limit,
'zone': zone
}
if not None == params:
action_data = dict(action_data, **params)
first_result = ApiCloud.request(action_data=action_data)
if 'total_count' not in first_result:
if set not in first_result:
return list()
else:
return first_result[set]
total_count = first_result['total_count']
data = first_result[set]
if total_count <= cls.limit:
return data
pages = math.ceil(total_count / float(cls.limit))
for i in range(1, int(pages)):
action_data['offset'] = i * cls.limit
temp_result = ApiCloud.request(action_data=action_data)
data += temp_result[set]
return data
@classmethod
def get_resource_by_user_id(cls, resource=None, set=None, user_id=None, zone=None):
if None == resource or None == set or None == user_id:
return None
data = ApiModel.get_all(action='Describe' + resource, set=set, zone=zone,
params={'owner': user_id, 'status.1': 'available', 'status.2': 'associated',
'status.3': 'running', 'status.4': 'in-use', 'status.5': 'active',
"verbose": "1"})
return data
@classmethod
def get_resources(cls, resource=None, set=None, zone=None):
if None == resource or None == set:
return None
data = ApiModel.get_all(action='Describe' + resource, set=set, zone=zone,
params={'status.1': 'available', 'status.2': 'associated',
'status.3': 'running', 'status.4': 'in-use', 'status.5': 'active',
"verbose": "1"})
return data
@classmethod
def request(cls, action='', zone=None, params={}):
action_data = {
'action': action,
'zone': zone
}
if not None == params:
action_data = dict(action_data, **params)
result = ApiCloud.request(action_data=action_data)
return result['ret_code']
| UTF-8 | Python | false | false | 3,802 | py | 28 | __init__.py | 27 | 0.473823 | 0.464569 | 0 | 109 | 32.697248 | 108 |
allisson128/SD | 16,578,573,764,724 | 1d4c4718bba205edd706843f1209f435f8b14728 | 811547fb3341effe286b999f35b69175be45f549 | /backup/client/classes/strHandle.py | 59e04d201a915115893e7fb974e5f201969e5304 | []
| no_license | https://github.com/allisson128/SD | 7a7a0abf7f418c8a85bc3d4cd743f2815faa981d | e853503e6bcd9de6c2ff613db13320753055d058 | refs/heads/master | 2020-05-27T13:15:45.867760 | 2015-05-21T15:05:27 | 2015-05-21T15:05:27 | 35,048,411 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #----------------------------------------------------
# METODOS PARA TRATAMENTO DE STRING
#----------------------------------------------------
class StringHandle:
#----------------------------------
# METODOS ESTATICOS
#----------------------------------
"""
Se uma string inicia com string definida como inicio
"""
def strIniciaCom(string, inicio):
return (string[:len(inicio)] == inicio)
"""
Se a string contem o conteudo.
"""
def strContem(string, conteudo):
n = len(string)
i = 0
try:
while(i < n):
i = string.index(conteudo)
m = len(conteudo)
if(string[i:i+m] == conteudo):
return True
string = string[i+1:]
i = 0
return False
except ValueError:
return False
| UTF-8 | Python | false | false | 909 | py | 23 | strHandle.py | 15 | 0.381738 | 0.378438 | 0 | 33 | 26.545455 | 56 |
cuongpiger/CSC_Machine_Learning_with_Python | 13,297,218,796,447 | 27fb1b6eff3c9d8cbfbb24579414788b10ba07da | 9b7ce4d4e1012401420548f8e2eb21a69898a982 | /Chapter 4. Supervised Learning - Naive Bayes/modules/text_transform.py | c57c7ea1784b18b8f447be0ef5a10c20ec48b185 | []
| no_license | https://github.com/cuongpiger/CSC_Machine_Learning_with_Python | 7fcf9c557d51802a6c750ed9da9c75bf6a986e58 | c063e8216affe276bb915bf12389b821137ba3d2 | refs/heads/main | 2023-05-13T21:51:59.614937 | 2021-06-03T04:05:46 | 2021-06-03T04:05:46 | 347,799,324 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import numpy as np
from sklearn.feature_extraction.text import CountVectorizer
class CTextHandler:
def __init__(self, data):
self.data = data
def createBagOfWords(self):
self.handler = CountVectorizer()
self.handler.fit(self.data)
return self.handler.transform(self.data).toarray()
def transform(self, data):
return self.handler.transform(data).toarray() | UTF-8 | Python | false | false | 430 | py | 73 | text_transform.py | 31 | 0.653488 | 0.653488 | 0 | 15 | 27.733333 | 59 |
giginet/django-generic-tagging | 7,378,753,820,747 | a1fe6dbed8760ffe6d508ac38dcde42605c00a27 | 4b0537dafd392ecf4139ceb1b472465e87c3f4a3 | /generic_tagging/migrations/0001_initial.py | 3b300f8ec6b76cc480db02227838d21ec9cab87d | [
"MIT"
]
| permissive | https://github.com/giginet/django-generic-tagging | a4ab8f0397de912d6afc85f85b663954189a5033 | adf5b28caad8ef51759cf57b3b03b6592d11eae5 | refs/heads/master | 2021-07-02T13:01:57.951098 | 2016-04-03T09:06:20 | 2016-04-03T10:59:55 | 54,305,869 | 0 | 0 | MIT | false | 2021-06-10T18:40:24 | 2016-03-20T07:29:25 | 2016-04-03T09:27:15 | 2021-06-10T18:40:22 | 70 | 0 | 0 | 3 | Python | false | false | # -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-03-22 08:48
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('contenttypes', '0002_remove_content_type_name'),
]
operations = [
migrations.CreateModel(
name='Tag',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('label', models.CharField(max_length=255, unique=True, verbose_name='Label')),
],
options={
'verbose_name_plural': 'Tags',
'verbose_name': 'Tag',
'ordering': ('label',),
},
),
migrations.CreateModel(
name='TaggedItem',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('object_id', models.PositiveIntegerField(verbose_name='Object ID')),
('locked', models.BooleanField(default=False, verbose_name='Locked')),
('order', models.IntegerField(blank=True, default=0, verbose_name='Order')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created at')),
('author', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='items', to=settings.AUTH_USER_MODEL, verbose_name='Author')),
('content_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType', verbose_name='Content type')),
('tag', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='items', to='generic_tagging.Tag', verbose_name='Tag')),
],
options={
'verbose_name_plural': 'Tagged items',
'verbose_name': 'Tagged item',
'permissions': (('lock_tagged_item', 'Can lock tagged item'), ('unlock_tagged_item', 'Can unlock tagged item')),
'ordering': ('order', 'created_at'),
},
),
migrations.AlterUniqueTogether(
name='taggeditem',
unique_together=set([('tag', 'content_type', 'object_id')]),
),
]
| UTF-8 | Python | false | false | 2,509 | py | 35 | 0001_initial.py | 25 | 0.582304 | 0.572738 | 0 | 55 | 44.618182 | 188 |
twrightsman/REPET | 8,890,582,352,636 | 979c9947c8c817f17c06f19f688bb61feb08d1f7 | 12f84f41ad64334734f6466a8e843703ce7bf7d1 | /bin/Gff3Writer.py | fbf20fabf54c3b796bb32b368b93ea6a02a37892 | []
| no_license | https://github.com/twrightsman/REPET | d68bbc9487d791d9382a65d11137d0cd93ec0942 | a6662f20532f4c98f126dd2181b544c75a08df51 | refs/heads/master | 2018-04-01T07:37:13.929687 | 2017-05-19T20:30:35 | 2017-05-19T20:30:35 | 88,304,015 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | ../commons/core/writer/Gff3Writer.py | UTF-8 | Python | false | false | 36 | py | 1,018 | Gff3Writer.py | 994 | 0.805556 | 0.777778 | 0 | 1 | 36 | 36 |
fernandobac03/evaluation-backend | 3,556,232,963,488 | fb415ab71d0087e2be7b49faaa416efc5e61592d | 708655e449320db12c1a37e9f082fe967319f8ce | /evaluationSameAs/index.py | ce6790440014ff7b091affffc2b0acfb97c39af2 | []
| no_license | https://github.com/fernandobac03/evaluation-backend | c858feed4ad159ce2ef829ce08686967c662b4cf | 375eeaa9f11e005da96acd08754dbc45dd2d0499 | refs/heads/master | 2020-06-11T13:42:47.095025 | 2019-06-28T17:18:56 | 2019-06-28T17:18:56 | 193,985,632 | 0 | 0 | null | false | 2019-10-30T11:17:58 | 2019-06-26T22:27:37 | 2019-06-28T17:19:10 | 2019-10-30T11:17:57 | 19 | 0 | 0 | 1 | Python | false | false | #!flask/bin/python
# -*- coding: utf-8 -*-
from flask import Flask, jsonify, request
from flask_cors import CORS
from evaluationSameAs.evaluationSameAs.inter import insert_person, get_pairs_data, insert_evaluation
app = Flask(__name__)
CORS(app)
@app.route('/geolinkeddata/service/storage/add', methods=['POST'])
def add_persona():
json = request.get_json(force=True)
return insert_person(json)
@app.route('/geolinkeddata/service/storage/get', methods=['GET'])
def get_pairs():
#return get_direct_results()
if 'param' in request.args:
param = request.args['param']
return get_pairs_data(param)
else:
return "Error: No param arg provided. Please specify an id"
@app.route('/geolinkeddata/service/storage/addevaluation', methods=['POST'])
def set_evaluation():
json = request.get_json(force=True)
return insert_evaluation(json)
@app.route('/geolinkeddata/service/storage/test', methods=['GET'])
def test():
return jsonify({'test': 'ok'})
if __name__ == '__main__':
app.run(debug=True)
| UTF-8 | Python | false | false | 1,064 | py | 13 | index.py | 10 | 0.677632 | 0.676692 | 0 | 39 | 26.102564 | 100 |
PrachiP23/CSE611 | 10,411,000,728,915 | f1da1b8af1272575f8e217afe075a39dc752ee80 | d9e9d6c97f60406e1f88ac00412c8a696500e477 | /process/urbanlegend.py | 1387e223451b035bf8f158727b8fc18257cc39cf | []
| no_license | https://github.com/PrachiP23/CSE611 | efe9d55b605218ed9c9ab2233f23dd4e3db7793a | 5006632daf5c31d72892ae906642e4ee231387d2 | refs/heads/master | 2021-03-27T09:14:37.706006 | 2018-04-11T10:39:07 | 2018-04-11T10:39:07 | 123,155,434 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from process.process import Process
from datetime import datetime
import re
class UrbanLegendProcess(Process):
def process(self):
document = super().process()
document["innerTitle"] = ''.join(document["innerTitle"]).strip()
if document["description"] is not None:
document["description"] = filter(None,document["description"])
document["description"] = ''.join(document["description"])
document["content"] = document["content"].strip()
if document["claim"] is not None:
document["claim"] = filter(None,document["claim"])
document["claim"] = ''.join(document["claim"])
claimReviewed = re.search('\w+:(.+)\(', document["claimReviewed"], re.IGNORECASE)
if claimReviewed is not None:
document["claimReviewed"] = claimReviewed.group(1).strip()
else:
document["claimReviewed"] = "N/A"
document["date"] = ''.join(document["date"]).strip()
document["date"] = datetime.strptime(document["date"], 'Updated %B %d, %Y').strftime('%d-%m-%Y')
# date = scrapy.Field()
# claimReviewed = scrapy.Field()
# referredUrl = scrapy.Field()
return document
| UTF-8 | Python | false | false | 1,231 | py | 33 | urbanlegend.py | 21 | 0.600325 | 0.599513 | 0 | 35 | 34.171429 | 104 |
elamperti/max7219 | 9,019,431,324,700 | d3e354bacb6f0446f006c1406a0ec3b02d339dd8 | 7fb5d789a06ca1070ce3bb287638554632999dd6 | /demo-clock.py | 21fe8625c69dc33001522c2ace02e00b3645ad78 | [
"MIT"
]
| permissive | https://github.com/elamperti/max7219 | f5c64e7f1872ae7f69213239e8fb351c6a2de42f | 3a36fccd5524d93d15fd874373b25e8084c4a14a | refs/heads/master | 2021-01-20T11:06:18.421948 | 2014-05-11T19:00:52 | 2014-05-11T19:00:52 | 16,159,546 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/sbin/python
from datetime import datetime
from time import sleep
from maxdraw import MaxDraw
from fonts import tiems_new_broman
a = MaxDraw(4)
a.init()
c = a.prepareString(datetime.now().strftime('%H:%M:%S'), tiems_new_broman.Font())
while(True):
c = a.prepareString(datetime.now().strftime('%H:%M: %S'), tiems_new_broman.Font())
a._displayCanvas.placeImage(c, 1)
a.paintCanvas()
sleep(1)
| UTF-8 | Python | false | false | 417 | py | 15 | demo-clock.py | 14 | 0.690647 | 0.683453 | 0 | 18 | 22.166667 | 86 |
plumtree87/trophyCaseDatabase | 16,045,997,837,636 | 5ac68da56dba18fd38e82a49e1d2335b68db93d7 | 9ce56fd81c580921df3d233a25ae1e3689e13bd5 | /hunter_project/fish/admin.py | df225efd6ad32adae61872a5f8063fd00158edd4 | []
| no_license | https://github.com/plumtree87/trophyCaseDatabase | c6f966b48bd8c6d0290ed188c813a404e001d4c4 | c9ede6813a6d42540a6471a0cf06379501765940 | refs/heads/main | 2023-05-15T01:56:27.619910 | 2021-06-11T02:23:55 | 2021-06-11T02:23:55 | 371,199,735 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from django.contrib import admin
from .models import Bass
# Register your models here.
admin.site.register(Bass) | UTF-8 | Python | false | false | 112 | py | 29 | admin.py | 28 | 0.8125 | 0.8125 | 0 | 4 | 27.25 | 32 |
zhangsikai123/history-stocks | 2,937,757,673,159 | 2a3e483c56bc42287290727a6c20cf10de47781f | bcde16eece2e47273271362ecb3c25aafc7baa64 | /skystocks/items/history_item.py | 56744a14b111dfbdaf8fdbcd1eea7b78df519cd6 | []
| no_license | https://github.com/zhangsikai123/history-stocks | 945663905034991891e423ed781278e3a7fff271 | 91ed7f152062506305c1286588fa153010410f6d | refs/heads/master | 2022-10-08T15:00:54.579320 | 2020-06-06T14:51:10 | 2020-06-06T14:51:10 | 254,043,480 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # -*- coding: utf-8 -*-
# Define here the models for your scraped items
# See documentation in:
# https://docs.scrapy.org/en/latest/topics/items.html
from scrapy import Field
from scrapy.loader import ItemLoader, Identity
from skystocks.items.base_item import BaseItem
class StockItem(BaseItem):
code = Field()
cn_name = Field()
date = Field(serializer=str) # 日期
open = Field() # 开盘价
close = Field() # 收盘价
rise = Field() # 涨跌额
rise_rate = Field() # 涨跌率: %
lowest = Field() # 最低
highest = Field() # 最高
trading_volume = Field() # 成交量
turnover = Field() # 成交额: 万
turnover_rate = Field() # 换手率: %
def __repr__(self):
"""only print out attr1 after exiting the Pipeline"""
return "{}|{}|{}".format(self.cn_name, self.code, self.date)
class HistoryStocksItem(BaseItem):
stock_items = Field()
def __repr__(self):
return "" # no log out
class StockItemLoader(ItemLoader):
default_input_processor = Identity()
| UTF-8 | Python | false | false | 1,063 | py | 21 | history_item.py | 14 | 0.625621 | 0.623635 | 0 | 38 | 25.5 | 68 |
shidanxu/mengvfinal | 13,649,406,106,707 | f9f14555c96d4dc0d80e804e19bf041d08ebc9bb | 594347c573dc5468a1bcac1944ac4e287b12d460 | /IPLookup/ipToWirelessLTE.py | f5ae83be8321277272b947f3f23c67aa51f69ff6 | []
| no_license | https://github.com/shidanxu/mengvfinal | cd7c1d8935679a4bee7b7ce97050a3b642d647fe | 903aeea798d49c61c557c827ef28f177b3eaa17c | refs/heads/master | 2021-01-01T04:18:21.952006 | 2016-05-26T16:11:00 | 2016-05-26T16:11:00 | 58,878,805 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import pandas as pd
import numpy as np
data = pd.read_csv("ipToISP.csv", names = ['ip', 'city', 'state', 'isp', 'lat', 'lon', 'country'])
print data.head(50)
np.set_printoptions(threshold='nan')
print data['isp'].unique()
print data.groupby('isp').count().sort('ip')
print data.groupby('city').count().sort('ip')
# Two categories, wifi and phone network
# Wifi = Anything that contains college, university, wifi, WiFi,
wifiNetworks = ['University of Massachusetts', 'Comcast Cable', 'Verizon Internet Services', 'Charter Communications', 'Optimum Online', 'Time Warner Cable']
wirelessNetworks = ['Verizon Wireless', 'AT&T Wireless', 'Sprint PCS', 'T-Mobile USA', 'Verizon Fios']
totalNetworks = wifiNetworks + wirelessNetworks
data['wireless'] = data['isp'].isin(wirelessNetworks)
data['wifi'] = data['isp'].isin(wifiNetworks)
data['networksInfoAvailable'] = data['isp'].isin(totalNetworks)
ipToWifiWireless = data[data['networksInfoAvailable'] == True]
print ipToWifiWireless
ipToWifiWireless.to_csv("ipToWifiWireless.csv") | UTF-8 | Python | false | false | 1,033 | py | 25 | ipToWirelessLTE.py | 9 | 0.728945 | 0.727009 | 0 | 27 | 37.296296 | 157 |
zXin1112/Python-Practice | 4,105,988,785,508 | d3764e5a3a818e451c2ab48d6565b75dae7809a9 | cfc373eb6e1e3e800d9de9d86cc606698ed08cf3 | /DataVisualization/DataVisualization/matplotliob/scatter_squares.py | 3ad8d391a6fd6983bca6fa803a640b30d7e6aa12 | []
| no_license | https://github.com/zXin1112/Python-Practice | c57705514fd4970cf78c09e49721054ad24745a0 | c624aaae2d47e3c9171b0f1dbdc7ca87ef440811 | refs/heads/master | 2020-04-13T14:30:40.863766 | 2019-01-17T05:57:51 | 2019-01-17T05:57:51 | 161,575,113 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import matplotlib.pyplot as plt
#散点图
#设置画面大小,12相当于1200 5.5相当于550 宽1200高550
plt.figure(figsize=(12, 5.5))
#数据源
#x_values=[1,2,3,4,5]
#y_values=[1,4,9,16,25]
x_values=list(range(1,1001))
y_values=[x**2 for x in x_values]
#s为点的尺寸,c为点的颜色 也可指定rgb
#plt.scatter(x_values,y_values,c='red',s=40)
#颜色映射 c设置为y值列表,cmap使用那个颜色映射
plt.scatter(x_values,y_values,c=y_values,cmap=plt.cm.Blues,s=40)
#添加标题,横纵轴标题
plt.title("square Number",fontsize=24)
plt.xlabel("value",fontsize=14)
plt.ylabel("Square of Value",fontsize=14)
plt.tick_params(axis='both',labelsize=14)
#设置每个坐标轴的取值范围 X轴 y轴
plt.axis([0,1100,0,1100000])
#保存图片,路径 将空白区域裁减掉
plt.savefig('test.png',bbox_inches='tight')
plt.show() | UTF-8 | Python | false | false | 854 | py | 51 | scatter_squares.py | 42 | 0.738024 | 0.640719 | 0 | 23 | 28.086957 | 64 |
bushitan/history_is_relation | 4,879,082,873,512 | f2f2c5220ca95167c64843ca02dfaff5858c21b1 | aca6c5120ef93fa109c0cf0c8720d2e22d329fb0 | /gd/gd_dispatch/migrations/0005_auto_20160621_1810.py | 2d8c692c8d8cda4f67c566e97fd238f3976ab286 | []
| no_license | https://github.com/bushitan/history_is_relation | 0580b1b343bc1142969e52003a48fee13713fd82 | b667142aa02cf67bfa4e1d678cb06d73d37f0747 | refs/heads/master | 2021-01-21T14:08:17.427033 | 2016-07-30T04:05:04 | 2016-07-30T04:05:04 | 58,323,275 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('gd_dispatch', '0004_relmonitor'),
]
operations = [
migrations.CreateModel(
name='RelPhone',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
],
options={
'verbose_name': '\u7ed1\u5b9a_\u53ef\u89c6\u7535\u8bdd',
'verbose_name_plural': '\u7ed1\u5b9a_\u53ef\u89c6\u7535\u8bdd',
},
),
migrations.AlterModelOptions(
name='camera',
options={'verbose_name': '\u4fe1\u606f_\u6444\u50cf\u5934', 'verbose_name_plural': '\u4fe1\u606f_\u6444\u50cf\u5934'},
),
migrations.AlterModelOptions(
name='relmonitor',
options={'verbose_name': '\u7ed1\u5b9a_\u76d1\u63a7', 'verbose_name_plural': '\u7ed1\u5b9a_\u76d1\u63a7'},
),
migrations.AlterModelOptions(
name='stb',
options={'verbose_name': '\u4fe1\u606f_\u673a\u9876\u76d2', 'verbose_name_plural': '\u4fe1\u606f_\u673a\u9876\u76d2'},
),
migrations.AddField(
model_name='relphone',
name='camera',
field=models.ForeignKey(verbose_name='\u6444\u50cf\u5934', to='gd_dispatch.Camera'),
),
migrations.AddField(
model_name='relphone',
name='stb',
field=models.ForeignKey(verbose_name='\u673a\u9876\u76d2', to='gd_dispatch.STB'),
),
]
| UTF-8 | Python | false | false | 1,660 | py | 49 | 0005_auto_20160621_1810.py | 33 | 0.561446 | 0.481325 | 0 | 46 | 35.086957 | 130 |
karthikpappu/pyc_source | 1,932,735,305,407 | 397ae8a3285ec98c590888f6c28a249c08716521 | 91fa095f423a3bf47eba7178a355aab3ca22cf7f | /pycfiles/sake-0.9.0-py2.7/process.py | da6c3b4a16f50a917248eaf10f16a3a333eca7a4 | []
| no_license | https://github.com/karthikpappu/pyc_source | 0ff4d03e6d7f88c1aca7263cc294d3fa17145c9f | 739e7e73180f2c3da5fd25bd1304a3fecfff8d6e | refs/heads/master | 2023-02-04T11:27:19.098827 | 2020-12-27T04:51:17 | 2020-12-27T04:51:17 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # uncompyle6 version 3.7.4
# Python bytecode 2.7 (62211)
# Decompiled from: Python 3.6.9 (default, Apr 18 2020, 01:56:04)
# [GCC 8.4.0]
# Embedded file name: build\bdist.win32\egg\sake\process.py
# Compiled at: 2011-02-21 21:55:26
"""
process module - implements process concepts. yessirre.
"""
from collections import deque
import logging, sys, traceback, stackless, stacklesslib.main, stacklesslib.locks
from .const import WAIT_INFINITE, WAIT_CHECK
from .errors import TimeoutError
from . import util
class Tasklet(stackless.tasklet):
"""Tasklet wrapper.
Adds top-level exception handler and keeps reference to its owner
"""
def __new__(cls, process, func):
def Handler(*args, **kw):
self.RunFunc(process, func, args, kw)
self = super(Tasklet, cls).__new__(cls, Handler)
self.SetupState(process)
return self
def SetupState(self, process):
"""
Set up the tasklet with initial attributes pertaining to the process, returning
any such previous attributes (oldstate)
"""
try:
oldstate = (
self.name, self.locals, self.process, self.session)
except AttributeError:
oldstate = None
self.name = 'tasklet:%s:%s' % (process.name, process.taskletCounter)
self.locals = None
self.process = None
self.session = util.GetSession()
process.taskletCounter += 1
return oldstate
def RestoreState(self, oldstate):
"""
Restore the tasklet attributes to those that were before SetupState()
"""
if oldstate:
self.name, self.locals, self.process, self.session = oldstate
def RunFunc(self, process, func, args, kwargs):
"""
Run a function, along with the proper top level error handling and
process bookkeeping.
SetupState() must have been called previously.
"""
process.OnTaskletEnter(self)
try:
try:
self.retval = func(*args, **kwargs)
except Exception as e:
process.log.exception("Unhandled top-level exception in process '%s'", process.name)
self.exception = e
finally:
process.OnTaskletLeave(self)
def __str__(self):
return self.name
def __repr__(self):
return '<Tasklet object at %s: %r alive=%s, scheduled=%s, blocked=%s>' % (hex(id(self)), self.name, self.alive, self.scheduled, self.blocked)
def Trace(self):
sys.stdout.write(repr(self) + '\n')
traceback.print_stack(self.frame, file=sys.stdout)
class Process(object):
"""Process monitor"""
processCounter = 1000
taskletCounter = 1
processStartAsync = True
def __init__(self):
self.running = True
self.name = None
t = stackless.getcurrent()
if t.is_main or not hasattr(t, 'process'):
self.session = None
else:
self.session = t.process.session
self.tasklets = set()
self.createCount = 0
self.destroyCount = 0
self.pid = Process.processCounter
Process.processCounter += 1
self.waitqueue = None
return
def __repr__(self):
return '<Process %r with %s running tasklets>' % (self.name, len(self.tasklets))
def StartProcess(self):
"""StartProcess() -> None
Called by app when process or service starts
"""
pass
def StopProcess(self):
"""StopProcess() -> None
Called when process or service stops
"""
pass
def New(self, fn, *args, **kw):
t = self.app.GetService('TaskletPool').Tasklet(self, fn)
return t(*args, **kw)
def Wait(self, timeout=WAIT_INFINITE):
"""Wait(timeout) -> bool
Wait on all tasklets in the process.
If the function returns 'True', all tasklets have exited.
If the function returns 'False, one or more tasklets are still busy.
'timeout' is timeout in milliseconds. Use WAIT_INFINITE for no timeout.
If 'timeout' is WAIT_CHECK, the function returns immediately.
If the calling tasklet is part of the process, the function will return when
all other tasklets have exited (i.e. it will not deadlock itself or raise an error).
"""
if self.tasklets:
t = stackless.getcurrent()
if t in self.tasklets and len(self.tasklets) == 1 or stackless.getruncount() < 2:
return True
if timeout == WAIT_CHECK:
return len(self.tasklets) == 0
if self.waitqueue is None:
self.waitqueue = stacklesslib.locks.Event()
if timeout == WAIT_INFINITE:
timeout = None
try:
self.waitqueue.wait(timeout)
except stacklesslib.util.WaitTimeoutError:
msg = '%s tasklets still alive in %s after %.1f seconds of waiting.'
raise TimeoutError(msg % (len(self.tasklets), self.name, timeout))
return True
def OnTaskletEnter(self, tasklet):
tasklet.process = self
self.tasklets.add(tasklet)
self.createCount += 1
def OnTaskletLeave(self, tasklet):
tasklet.process = None
self.tasklets.remove(tasklet)
self.destroyCount += 1
if not self.tasklets and self.waitqueue:
self.waitqueue.set()
self.waitqueue = None
self.app.dbg.TaskletExiting(tasklet)
return
def Kill(self, reason=None):
if reason:
reason = 'Process killed: ' + reason
else:
reason = 'Process killed.'
self.log.info("Killing process '%s' and its %s tasklets, reason: %s", self.name, len(self.tasklets), reason)
self.running = False
self.StopProcess()
self.app.OnProcessDestroyed(self)
killSelf = False
current = stackless.getcurrent()
for t in self.tasklets.copy():
t.killReason = reason
if current == t:
killSelf = True
else:
t.kill()
del t
self.Wait()
if killSelf:
current.kill()
class TaskletPool(Process):
"""
A class that recycles tasklets. This is useful not as a performance improvement, but to limit the number
of actual tasklet IDs in use, when implementing C level code that uses the tasklet ID as an index to
some sort of TLS structure.
"""
serviceName = 'TaskletPool'
serviceIncludes = []
processStartAsync = False
def __init__(self):
Process.__init__(self)
self.queue = deque()
self.maxlen = 20
def StopProcess(self):
self.Flush()
def Flush(self):
"""
Clear the queue of idle tasklets
"""
while self.queue:
self.DropTasklet()
def Tasklet(self, process, func):
"""
Emulate the constructor of the Tasklet object. Tries to get the most recently used idle
tasklet. If none are available, a new is created.
Returns a callable that can be used to bind arguments to the call.
"""
while len(self.queue):
channel = self.queue.pop()
if channel.balance:
tasklet = channel.queue
oldstate = tasklet.SetupState(process)
def Callable(*args, **kwargs):
channel.send((process, func, args, kwargs, oldstate))
return tasklet
return Callable
tasklet = self.app.CreateRawTasklet(self, self.TaskletFunc)
oldstate = tasklet.SetupState(process)
def Callable2(*args, **kwargs):
return tasklet([process, func, args, kwargs, oldstate])
return Callable2
def DropTasklet(self):
"""
Remove the oldest Tasklet from the queue, telling it to stop.
"""
channel = self.queue.popleft()
if channel.balance:
channel.send(None)
return
def TaskletFunc(self, mutableArgs):
"""
The main worker function for each tasklet managed by the TaskletPool
Takes as arguments the initial process, function and arguments to execute.
"""
process, func, args, kwargs, oldstate = mutableArgs
del mutableArgs[:]
t = stackless.getcurrent()
t.RunFunc(process, func, args, kwargs)
t.RestoreState(oldstate)
while True:
work = process = func = args = kwargs = oldstate = None
work = self.GetWork()
if not work:
return
process, func, args, kwargs, oldstate = work
t.RunFunc(process, func, args, kwargs)
t.RestoreState(oldstate)
return
def GetWork(self):
"""
Get the next piece of work to perform on the behalf of a user, or None
"""
if self.maxlen > 0:
channel = stackless.channel()
channel.preference = 1
self.queue.append(channel)
while len(self.queue) > self.maxlen:
self.DropTasklet()
return channel.receive() | UTF-8 | Python | false | false | 9,264 | py | 114,545 | process.py | 111,506 | 0.585276 | 0.578152 | 0 | 284 | 31.623239 | 149 |
yazeedtaweel/appumSample | 1,743,756,746,366 | 345a58bccf722b55127793197bf548fc2d14c7a0 | fe71c86322912342062b0557f8167a80c2c82488 | /WebAppAutomationTests/Pages/createInspectionPage.py | fcb93df49f85437644129f9d77b68672c1416e9e | []
| no_license | https://github.com/yazeedtaweel/appumSample | 51f35459361b63c783683b3d9e61f86f634a0f60 | 4a14c84f7d4611576fdeb01fd7d58d483224cc28 | refs/heads/main | 2023-05-01T12:37:08.947048 | 2021-05-12T15:07:51 | 2021-05-12T15:07:51 | 366,756,360 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import WebDriverWait
from Locators.createInspectionPageLocators import *
from conftest import *
class CreateInspectionPage:
def __init__(self, driver):
self.driver = driver
self.wait = WebDriverWait(self.driver, pageElementsTimeOut)
def click_start_inspection(self):
self.wait.until(EC.element_to_be_clickable((By.CSS_SELECTOR, start_Inspection_Button_CSS)))
self.driver.find_element_by_css_selector(start_Inspection_Button_CSS).click()
def get_start_inspection_text(self):
self.wait.until(EC.visibility_of_element_located((By.CSS_SELECTOR, start_Inspection_Button_CSS)))
return self.driver.find_element_by_css_selector(start_Inspection_Button_CSS).text
def check_start_inspection_button_visibility(self):
try:
return self.driver.find_element_by_css_selector(start_Inspection_Button_CSS).is_displayed()
except:
return 0
| UTF-8 | Python | false | false | 1,075 | py | 24 | createInspectionPage.py | 22 | 0.730233 | 0.729302 | 0 | 27 | 38.814815 | 105 |
wchao403/flask-project | 17,695,265,288,741 | bc4707f9689c3317ed7e6e6a1fe2bbc2a8cf22e6 | 060e6da873a00318c58bbe4d99a3d2f9ab16ea55 | /config.py | ba8c8951e79246129f7a4706bd448030f95cb697 | []
| no_license | https://github.com/wchao403/flask-project | e36956da259de0d174e166e9d737f314485dbcec | 3d4cebaf503131221326c0dda2e90d91d8228978 | refs/heads/master | 2023-02-04T21:33:54.187632 | 2020-12-25T01:52:19 | 2020-12-25T01:52:19 | 321,391,572 | 2 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null | import redis
class Config():
"""mysq的配置信息"""
USERNAME = 'root'
PASSWORD = 'root'
HOST = '127.0.0.1'
PORT = '3306'
DATABASE = 'home'
DB_URL = 'mysql+pymysql://{}:{}@{}:{}/{}?charset=utf8'.format(USERNAME, PASSWORD, HOST, PORT, DATABASE)
SQLALCHEMY_DATABASE_URI = DB_URL
SQLALCHEMY_TRACK_MODIFICATIONS = False
"""redis的配置信息"""
REDIS_HOST = '127.0.0.1'
REDIS_PORT = 6379
"""flask-session,保存在不同的服务器中,是否签名,是否可以被别人知晓"""
SESSION_TYPE = 'redis'
SESSION_REDIS = redis.Redis(host=REDIS_HOST, port=REDIS_PORT)
SESSION_USE_SIGNER = True
SECRET_KEY = 'ASDFFSASEFEOFIGN'
PERMANENT_SESSION_LIFETIOME = 8640 # 有效时间为一天单位是秒,不设置即为永久登录
class DevConfig(Config):
"""开发环境:用于编写和调试项目代码"""
DEBUG = True
class ProConfig(Config):
"""生产环境:用于项目线上部署运行"""
config_map = {
'dev': DevConfig,
'pro': ProConfig
}
| UTF-8 | Python | false | false | 1,051 | py | 18 | config.py | 17 | 0.627986 | 0.599545 | 0 | 39 | 21.538462 | 107 |
bettyzry/leetcode | 10,642,928,981,356 | f4646537195fbf5dd021e9520c2cc805465fd86e | 50b67aa6afc92184661dbd2807c24a78e08e88db | /2分式化简.py | 5345f6c8644458c9b9c307dfcf7c3dcafce01db3 | []
| no_license | https://github.com/bettyzry/leetcode | 88d4a4091b1edd5473d80d402a2674916bb9ac7d | 89f44b711ea1788f1a25fcd07a974a22539587ef | refs/heads/master | 2021-07-12T09:02:59.850266 | 2020-10-12T10:50:44 | 2020-10-12T10:50:44 | 214,312,068 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | class Solution:
def fraction(self, cont: List[int]) -> List[int]:
down = cont[-1]
up = 1
for i in reversed(cont[:-1]):
up = i*down + up
re = down
down = up
up = re
hcf = self.hcf(down, up)
down = down/hcf
up = up/hcf
answer = [int(down), int(up)]
return answer
def hcf(self, x, y):
if x > y:
smaller = y
else:
smaller = x
for i in range(1,smaller + 1):
if((x % i == 0) and (y % i == 0)):
hcf = i
return hcf
| UTF-8 | Python | false | false | 618 | py | 31 | 2分式化简.py | 31 | 0.398058 | 0.386731 | 0 | 25 | 23.72 | 53 |
medvedeffalexandr/algorithms_and_data_structures | 8,787,503,110,913 | f2fae6cb0e4b2614f6d0536996a6a7f54280e92b | 56fdd2d98f01f4abc431372e20e1330d241c4707 | /stack.py | 15211ac48b6e0271247ddb026cd8208face1bb8c | []
| no_license | https://github.com/medvedeffalexandr/algorithms_and_data_structures | 5845d70b85d3c7ce81ec53edfd627c2c0b8c5e8f | 6a36e2a76f4f9fe004ef21a004bc3359465809d4 | refs/heads/master | 2017-11-11T15:35:20.541735 | 2017-03-07T05:30:17 | 2017-03-07T05:30:17 | 83,986,206 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | class Stack:
def __init__(self):
self.items = []
#проверяет стек на пустоту. Параметры не требуются, возвращает булево значение.
def isEmpty(self):
return self.items == []
#добавляет новый элемент на вершину стека. В качестве параметра выступает элемент; функция ничего не возвращает.
def push(self, item):
self.items.append(item)
#удаляет верхний элемент из стека. Параметры не требуются, функция возвращает элемент. Стек изменяется.
def pop(self):
return self.items.pop()
#возвращает верхний элемент стека, но не удаляет его. Параметры не требуются, стек не модифицируется.
def peek(self):
return self.items[len(self.items)-1]
#возвращает количество элементов в стеке. Параметры не требуются, тип результата - целое число.
def size(self):
return len(self.items)
#Реализация класса Stack с использованием списков Python
#Для согласования с сущностью стека верхний элемент будет ставиться на самое правое место в списке.
########################################################################################################################
class Stack:
def __init__(self):
self.items = []
def isEmpty(self):
return self.items == []
def push(self, item):
self.items.insert(0,item)
def pop(self):
return self.items.pop(0)
def peek(self):
return self.items[0]
def size(self):
return len(self.items)
#Важно отметить, что мы можем выбрать реализацию стека через список, где вершиной считается первый, а не последний элемент.
#В этом случае предыдущие методы append и pop работать не будут.
#Мы должны будем явно использовать pop и insert для позиции с индексом 0 (первый элемент в списке).
| UTF-8 | Python | false | false | 2,543 | py | 1 | stack.py | 1 | 0.632778 | 0.63 | 0 | 51 | 34.294118 | 123 |
BlueGranite/tpc-ds-dataset-generator | 11,192,684,805,409 | e6dd63bbce38aaeeb262823f5ba12959d25aa39c | 07f82964ddb84acef1e9c2f98cc4a20811822601 | /notebooks/TPC-DS-GenerateData.py | 5eb78ff5f0ec295fd8356aa934676e76c9833b83 | []
| no_license | https://github.com/BlueGranite/tpc-ds-dataset-generator | 6efd7eb494a16ebf338426ec1455667d24d9e135 | 180e511b25e866d7994d1a1a1be752d4bb98648e | refs/heads/master | 2022-01-09T23:25:24.847744 | 2022-01-03T19:15:55 | 2022-01-03T19:15:55 | 245,227,348 | 15 | 14 | null | false | 2022-01-03T19:15:56 | 2020-03-05T17:33:01 | 2021-11-18T21:57:49 | 2022-01-03T19:15:55 | 2,270 | 8 | 5 | 1 | HTML | false | false | # Databricks notebook source
# DBTITLE 1,Generate TPC-DS data
# MAGIC %md
# MAGIC Generating data at larger scales can take hours to run, and you may want to run the notebook as a job.
# MAGIC
# MAGIC The cell below generates the data. Read the code carefully, as it contains many parameters to control the process. See the <a href="https://github.com/databricks/spark-sql-perf" target="_blank">Databricks spark-sql-perf repository README</a> for more information.
# COMMAND ----------
# MAGIC %scala
# MAGIC import com.databricks.spark.sql.perf.tpcds.TPCDSTables
# MAGIC
# MAGIC // Set:
# MAGIC val scaleFactor = "1" // scaleFactor defines the size of the dataset to generate (in GB).
# MAGIC val scaleFactoryInt = scaleFactor.toInt
# MAGIC
# MAGIC val scaleName = if(scaleFactoryInt < 1000){
# MAGIC f"${scaleFactoryInt}%03d" + "GB"
# MAGIC } else {
# MAGIC f"${scaleFactoryInt / 1000}%03d" + "TB"
# MAGIC }
# MAGIC
# MAGIC val fileFormat = "parquet" // valid spark file format like parquet, csv, json.
# MAGIC val rootDir = s"/mnt/datalake/raw/tpc-ds/source_files_${scaleName}_${fileFormat}"
# MAGIC val databaseName = "tpcds" + scaleName // name of database to create.
# MAGIC
# MAGIC // Run:
# MAGIC val tables = new TPCDSTables(sqlContext,
# MAGIC dsdgenDir = "/usr/local/bin/tpcds-kit/tools", // location of dsdgen
# MAGIC scaleFactor = scaleFactor,
# MAGIC useDoubleForDecimal = false, // true to replace DecimalType with DoubleType
# MAGIC useStringForDate = false) // true to replace DateType with StringType
# MAGIC
# MAGIC tables.genData(
# MAGIC location = rootDir,
# MAGIC format = fileFormat,
# MAGIC overwrite = true, // overwrite the data that is already there
# MAGIC partitionTables = false, // create the partitioned fact tables
# MAGIC clusterByPartitionColumns = false, // shuffle to get partitions coalesced into single files.
# MAGIC filterOutNullPartitionValues = false, // true to filter out the partition with NULL key value
# MAGIC tableFilter = "", // "" means generate all tables
# MAGIC numPartitions = 4) // how many dsdgen partitions to run - number of input tasks.
# MAGIC
# MAGIC // Create the specified database
# MAGIC sql(s"create database $databaseName")
# MAGIC
# MAGIC // Create the specified database
# MAGIC sql(s"create database $databaseName")
# MAGIC
# MAGIC // Create metastore tables in a specified database for your data.
# MAGIC // Once tables are created, the current database will be switched to the specified database.
# MAGIC tables.createExternalTables(rootDir, fileFormat, databaseName, overwrite = true, discoverPartitions = true)
# MAGIC
# MAGIC // Or, if you want to create temporary tables
# MAGIC // tables.createTemporaryTables(location, fileFormat)
# MAGIC
# MAGIC // For Cost-based optimizer (CBO) only, gather statistics on all columns:
# MAGIC tables.analyzeTables(databaseName, analyzeColumns = true)
# COMMAND ----------
# DBTITLE 1,View TPC-DS data
# examine data
df = spark.read.parquet("/mnt/datalake/raw/tpc-ds/source_files_001TB_parquet/customer")
display(df)
# COMMAND ----------
# MAGIC %md
# MAGIC
# MAGIC ###Sample Results
# MAGIC Below are a few sample results from generating data at the 1 and 1000 scale.
# MAGIC
# MAGIC | File Format | Generate Column Stats | Number of dsdgen Tasks | Partition Tables | TPC-DS Scale | Databricks Cluster Config | Duration | Storage Size |
# MAGIC | ----------- | --------------------- | ---------------------- | ---------------- | ------------ | --------------------------------------- | -------- | ------------ |
# MAGIC | csv | no | 4 | no | 1 | 1 Standard_DS3_v2 worker, 4 total cores | 4.79 min | 1.2 GB |
# MAGIC | parquet | yes | 4 | no | 1 | 1 Standard_DS3_v2 worker, 4 total cores | 5.88 min | 347 MB |
# MAGIC | json | no | 4 | no | 1 | 1 Standard_DS3_v2 worker, 4 total cores | 7.35 min | 5.15 GB |
# MAGIC | parquet | yes | 1000 | yes | 1000 | 4 Standard_DS3_v2 worker, 16 total cores | 4 hours | 333 GB |
| UTF-8 | Python | false | false | 4,097 | py | 3 | TPC-DS-GenerateData.py | 2 | 0.666829 | 0.648279 | 0 | 80 | 50.2125 | 271 |
sp-fm/python-lifecycle-training | 1,494,648,660,125 | 0ae753c6d7c48b5bd934217bc38ed4f27e93e4b8 | 45f532b5f1fd6c075a56fa50b864748ff826d123 | /tests/calculator/test_cli.py | 0eece463c5a4fbbafd55545b27730ec45fe8e08c | [
"MIT"
]
| permissive | https://github.com/sp-fm/python-lifecycle-training | 7b85558d72472af14752cfbcc6a3961dfba963a3 | 27e6d0d7c0da9d98813e3eb8ca7dd9f3b645956c | refs/heads/master | 2023-01-03T01:30:34.721725 | 2022-07-22T06:46:54 | 2022-07-22T06:46:54 | 301,319,612 | 1 | 2 | null | null | null | null | null | null | null | null | null | null | null | null | null | import pytest
import python_lifecycle_training
from python_lifecycle_training.calculator.cli import Main
class TestCalculatorCLI:
@staticmethod
@pytest.mark.parametrize(
"env",
[
"development",
"production",
],
)
def test_log(caplog, env):
python_lifecycle_training.ENV = env
Main(env=env)
assert env in caplog.text
| UTF-8 | Python | false | false | 408 | py | 40 | test_cli.py | 8 | 0.610294 | 0.610294 | 0 | 20 | 19.4 | 57 |
divanoLetto/Multi_Agents_Control_System_Formation | 6,244,882,489,089 | 649a433e566f8f0fe24ef9b700f761c763b12895 | 74da557b1da6b91b5df3a7414fa1c235abeba695 | /LineBuilder.py | 57277a9b940cc23621a537dcee968520451a9e77 | []
| no_license | https://github.com/divanoLetto/Multi_Agents_Control_System_Formation | c2c7d9020a0ddf865d18655ad203e1a2f0b4a92a | 8d2153addbd0a5d8afa8a6a556bd26230d35ca65 | refs/heads/master | 2023-03-26T12:44:14.915643 | 2021-03-26T22:48:00 | 2021-03-26T22:48:00 | 224,838,713 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from matplotlib import pyplot as plt
from matplotlib.widgets import TextBox
import time
import numpy as np
from Formation import *
from Formation import FreeFormationDisplacementSingleIntegrator
from FormationNote import *
from utils import *
class LineBuilderPoint:
def __init__(self, line, num_robots):
self.line = line
self.xs = list(line.get_xdata())
self.ys = list(line.get_ydata())
self.cid = line.figure.canvas.mpl_connect('button_press_event', self)
self.count = 0
self.num_robots = num_robots
def __call__(self, event):
if self.count < self.num_robots:
print('click', event)
if event.inaxes != self.line.axes: return
self.xs.append(event.xdata)
self.ys.append(event.ydata)
self.line.set_data(self.xs, self.ys)
self.line.figure.canvas.draw()
self.count += 1
def deleteLine(self):
self.line.remove()
class LineBuilderLine:
def __init__(self, line, ptsFormation):
self.line = line
self.xs = list(line.get_xdata())
self.ys = list(line.get_ydata())
self.cid = line.figure.canvas.mpl_connect('button_press_event', self)
self.boleano = True
self.ptsFormation = ptsFormation
self.connectLineasFormations = []
self.lineRemember = []
self.count = 0
self.currentPoint = None
def get_line_remember(self):
return self.lineRemember
def __call__(self, event):
if event.button != 3 and self.boleano:
# print('click', event)
if event.inaxes != self.line.axes: return
maybePoint = is_close_to_a_point(self.ptsFormation, [event.xdata, event.ydata])
if maybePoint != [] and are_different_point(maybePoint, self.currentPoint):
if self.count == 0:
self.currentPoint = maybePoint
else:
self.lineRemember.append([self.currentPoint[0], maybePoint[0]])
self.currentPoint = maybePoint
self.xs.append(event.xdata)
self.ys.append(event.ydata)
print("itsapoint!")
self.line.set_data(self.xs, self.ys)
self.line.figure.canvas.draw()
self.count += 1
else:
self.boleano = False
def deleteLine(self):
self.line.remove()
def line_builder_formation(num_robots):
print("line builder formation function")
axes = plt.gca()
line_builder_distance_list = []#vettore dlle linee usato per cancellarle alla fine
axes.set_title("You will set the formation for " + str(num_robots) + " robots, click to set line segment")
plt.gcf().canvas.draw()
line, = axes.plot([], [], marker='o', linestyle='None', linewidth=2, markersize=5) # empty line
linebuilder = LineBuilderPoint(line, num_robots)
ptsFormation = np.asarray(plt.ginput(num_robots, timeout=-1))
line_builder_distance_list.append(linebuilder) # todo useless?
list = []
for i in range(len(ptsFormation)):
elem = [i, ptsFormation[i]]
list.append(elem)
ptsFormation = list
boleano = False
axes.set_title("You will set the costraints between points")
line_remember_total = []
while boleano is False:
axes.set_title("Click left to make costraint, click right to stop")
plt.gcf().canvas.draw()
line, = axes.plot([], [], linestyle='solid', linewidth=2) # empty line
linebuilder = LineBuilderLine(line, ptsFormation)
justForFun = np.asarray(plt.ginput(200, timeout=-1, mouse_stop=3, mouse_pop=2))
axes.set_title("Click left to add more costraint, press the keybar to stop")
plt.gcf().canvas.draw()
line_remember_total.extend(linebuilder.get_line_remember())
line_builder_distance_list.append(linebuilder)
boleano = plt.waitforbuttonpress()
axes.set_title("Submit the formation")
plt.gcf().canvas.draw()
print(ptsFormation)
print(line_remember_total)
struct ={}
struct["points"] = ptsFormation
struct["lines"] = line_remember_total
return struct
| UTF-8 | Python | false | false | 4,202 | py | 20 | LineBuilder.py | 19 | 0.617563 | 0.613279 | 0 | 114 | 35.859649 | 110 |
anyahayda/course_project | 16,999,480,598,961 | ac77fc445d6cac9941515584daedcc5c55af7c94 | adeeadf81ff77bb2c12b81f4779b9cf7a2d11d83 | /trains/train_searcher.py | 63a0cb1190be0f729bab35413c1e8c7026b4dafc | []
| no_license | https://github.com/anyahayda/course_project | 096b693dfeb5ff61c63f14a4327959826d19f7ce | 6a7b2c809ef98fd87cfebb3fd4b7040144aebaf2 | refs/heads/master | 2021-01-24T03:19:47.914403 | 2018-05-27T15:35:20 | 2018-05-27T15:35:20 | 122,884,808 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from flask import Flask, render_template, url_for, request
from search.analizer import Searcher
app = Flask(__name__, static_url_path='/static')
DEFAULT_FORM_DATA = {'from': '',
'to': '',
'price_up_to': 'Any',
'fixed_date': '',
'from_date': '',
'to_date': ''}
@app.route('/')
def home():
return render_template('index.html', result={}, error=None,
form_data=DEFAULT_FORM_DATA, cheapest_ticket=None)
@app.route('/search', methods=['GET', 'POST'])
def search():
result = {}
error = None
cheapest_ticket = None
form_data = request.form
if form_data:
result, cheapest_ticket, error = Searcher().search(form_data)
else:
form_data = DEFAULT_FORM_DATA
return render_template('index.html', result=result,
error=error, form_data=form_data,
cheapest_ticket=cheapest_ticket)
if __name__ == '__main__':
app.run()
| UTF-8 | Python | false | false | 1,049 | py | 7 | train_searcher.py | 5 | 0.524309 | 0.524309 | 0 | 38 | 26.605263 | 77 |
JetBrains/intellij-community | 11,914,239,318,987 | 91ba8a662631fc02ad164029058a9d4fe3591af4 | eb9f655206c43c12b497c667ba56a0d358b6bc3a | /python/testData/postfix/editable/editableTemplateConcreteTypeInapplicable.py | c5d5152aef9f083e1385449cadfe6c8b9d68ff74 | [
"Apache-2.0"
]
| permissive | https://github.com/JetBrains/intellij-community | 2ed226e200ecc17c037dcddd4a006de56cd43941 | 05dbd4575d01a213f3f4d69aa4968473f2536142 | refs/heads/master | 2023-09-03T17:06:37.560889 | 2023-09-03T11:51:00 | 2023-09-03T12:12:27 | 2,489,216 | 16,288 | 6,635 | Apache-2.0 | false | 2023-09-12T07:41:58 | 2011-09-30T13:33:05 | 2023-09-12T03:37:30 | 2023-09-12T06:46:46 | 4,523,919 | 15,754 | 4,972 | 237 | null | false | false | def _():
x = set()
x.foo<caret>
| UTF-8 | Python | false | false | 40 | py | 127,182 | editableTemplateConcreteTypeInapplicable.py | 70,394 | 0.4 | 0.4 | 0 | 3 | 12.333333 | 16 |
kyurchenko/IDAPythonForUEFI | 11,175,504,939,641 | c485d137c5b460adb23f9320a9deb5e57b69eeb9 | b2a7e92054d37ef3e4454096864e2910d2e243d3 | /GUIDFinder/Structures.py | 96ed05e1c93002ce3fabe918da926f33d7a8f50b | []
| no_license | https://github.com/kyurchenko/IDAPythonForUEFI | f3fa57a0ace2ef9d49f4fc9bc41c0dbadcfe0bb1 | 6ca4e3d07d2a97bc58b2a02314f507ab24a38088 | refs/heads/master | 2020-03-09T17:02:23.537631 | 2018-07-19T07:54:06 | 2018-07-19T07:54:06 | 128,900,073 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # coding: cp866
from idaapi import add_struc, get_struc_id
from idc import AddStrucMember
# Добавляет все структуры в базу IdaPro разбираемого модуля
def _build_struc():
add_struc(-1,'GUID',0)
guid_id = get_struc_id('GUID')
AddStrucMember(guid_id,"Data1", 0, 0x20000400, -1, 4)
AddStrucMember(guid_id,"Data2", 0X4, 0x10000400, -1, 2)
AddStrucMember(guid_id,"Data3", 0X6, 0x10000400, -1, 2)
AddStrucMember(guid_id,"Data4", 0X8, 0x000400, -1, 8)
| UTF-8 | Python | false | false | 511 | py | 5 | Structures.py | 2 | 0.695931 | 0.571734 | 0 | 12 | 37.916667 | 59 |
linhx13/leetcode-code | 16,999,480,581,218 | 2e7c96ee5673b8f5f12871dfd0ad0c82ef630685 | afdeedfb743fbb149d36c14cfad65feaf03acd21 | /code/834-sum-of-distances-in-tree.py | 7ba464b22b93a2456474b4b4466b35db4b78405b | []
| no_license | https://github.com/linhx13/leetcode-code | f16cd4a0d35be34c41b86715fc9f3e8ec4b0a577 | c71574acfc68174a091c1751f10985b8f5737a1f | refs/heads/master | 2021-07-04T03:45:20.030275 | 2021-06-09T13:55:18 | 2021-06-09T13:55:18 | 70,423,464 | 0 | 1 | null | false | 2019-08-01T09:37:49 | 2016-10-09T18:48:33 | 2016-10-09T18:48:33 | 2019-08-01T09:36:16 | 8 | 0 | 1 | 0 | null | false | false | from typing import List
from collections import defaultdict
class Solution:
def sumOfDistancesInTree(self, N: int,
edges: List[List[int]]) -> List[int]:
graph = defaultdict(set)
for e in edges:
graph[e[0]].add(e[1])
graph[e[1]].add(e[0])
nodes = [1] * N
dists = [0] * N
def dfs(u, visited):
if u in visited:
return
visited.add(u)
for v in graph[u]:
if v in visited:
continue
dfs(v, visited)
nodes[u] += nodes[v]
dists[u] += nodes[v] + dists[v]
dfs(0, set())
res = [0] * N
def dfs2(u, prev, visited):
if u in visited:
return
visited.add(u)
res[u] = dists[u]
if prev != -1:
res[u] += res[prev] - nodes[u] - dists[u] + N - nodes[u]
for v in graph[u]:
if v in visited:
continue
dfs2(v, u, visited)
dfs2(0, -1, set())
return res
if __name__ == '__main__':
N = 6
edges = [[0, 1], [0, 2], [2, 3], [2, 4], [2, 5]]
print(Solution().sumOfDistancesInTree(N, edges))
| UTF-8 | Python | false | false | 1,293 | py | 883 | 834-sum-of-distances-in-tree.py | 856 | 0.41686 | 0.397525 | 0 | 49 | 25.387755 | 72 |
lightstrike/culination | 9,835,475,139,090 | 065e1029d5ac1014804e1c4decb76a1726aec1b1 | 9b2dc6b3b44caf53b6f5d3c2333df824a01294fe | /conf/staging.py | 8a1306d84a1539471e42a6dff6f1d8b8c54f70f6 | []
| no_license | https://github.com/lightstrike/culination | 5243cdd32ecf1ee8a9df88c510fbbff3b273ac66 | a36bafdba5f730de9096be88f4ac314b899159f4 | refs/heads/master | 2021-01-18T02:53:49.899309 | 2014-03-06T23:21:57 | 2014-03-06T23:21:57 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from production import *
DEBUG = True
| UTF-8 | Python | false | false | 39 | py | 110 | staging.py | 27 | 0.74359 | 0.74359 | 0 | 3 | 12 | 24 |
SpCrazy/crazy | 5,231,270,216,167 | 748ceca0b09f1655054cc066ac1fa2e0d36fed37 | 8c89354f77e55e4006392864660f675aa58586ad | /code/SpiderDay02_process/urllib_learn/urlopen_demo.py | fae9d06c63c529099b0445686bb62ff51ac6ecf6 | []
| no_license | https://github.com/SpCrazy/crazy | d77510d8b67c53b156e628d6a26cd34d72d4e00a | 7d088007b21d3167e689ab92084f6f44f0fb951e | refs/heads/master | 2021-06-21T10:41:48.547291 | 2019-12-06T16:20:23 | 2019-12-06T16:20:23 | 190,356,526 | 5 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from urllib.request import urlopen
url = "http://sohu.com"
response = urlopen(url)
print("响应状态码是:",response.status)
print("响应内容是:",response.read().decode())
| UTF-8 | Python | false | false | 187 | py | 210 | urlopen_demo.py | 182 | 0.714286 | 0.714286 | 0 | 7 | 21.714286 | 40 |
dancergraham/csv_parsing | 4,922,032,523,170 | 33b3989e4474c10cdbac68bc278650565c52d4ec | 8aacd6383fe83844e45a176e53cdc6da2dd2bc57 | /main.py | 2f9d91f4724d38d385b2c5906bc3af4724942a64 | []
| no_license | https://github.com/dancergraham/csv_parsing | 0188cca18e411cc74ad5ed89435edc794c49f79d | 208072ba0d16c11cb569a8bf21f52795d8fcdb29 | refs/heads/master | 2020-12-08T20:38:54.541617 | 2020-01-10T11:21:29 | 2020-01-10T11:21:29 | 233,089,327 | 0 | 0 | null | true | 2020-01-10T16:42:42 | 2020-01-10T16:42:41 | 2020-01-10T11:21:57 | 2020-01-10T11:21:55 | 97 | 0 | 0 | 0 | null | false | false | import csv
from models.pitot_measure import PitotMeasureModel
from tkinter import filedialog
from lib.csv_parser import CSVParser
from os import path
from functools import reduce
#########################################################################
## PROGRAM PARAMETERS ##
#########################################################################
# Correction coeff. of the reference delta P measure
A_REF = 1
# correction coeff. of the gradient delta P measure
A_Z = 0.987644543
# Head coeff. of the Pitot reference
K_REF = 1.029
# Head coeff. of the Pitot reference
K_Z = 1
# air density in kg/m³
RHO = 1.196
#########################################################################
## END PROGRAM PARAMETERS ##
#########################################################################
def flatten(listOfLists):
return reduce(list.__add__, listOfLists)
def transformation_pipeline(csv_file_stream, model):
# Parse CSV content to get an array of data
model_parser = CSVParser(
output_model=model.parsing_output_model, separator="\t")
parsed_model_measures = model_parser.parse(
csv_file_stream)
# Sanitize
sanitized_model_measures = model.sanitize(parsed_model_measures)
# Compute
model_computed_measures = model.compute(sanitized_model_measures)
return model_computed_measures
def get_altitude_from_filename(filename):
return int(path.basename(filename).replace('h', '').replace('_u17', '').replace('.csv', ''))
def main(args=None):
# Open a file selection dialog, restrict to CSV extensions
csv_file_streams = filedialog.askopenfiles(filetypes=[('CSV files', '.csv'), ('All files', '*')])
pipeline_outputs = flatten([
transformation_pipeline(
csv_file_stream,
PitotMeasureModel(
z=get_altitude_from_filename(csv_file_stream.name),
a_ref=A_REF,
a_z=A_Z,
rho=RHO,
k_ref=K_REF,
k_z=K_Z
)
)
for csv_file_stream in csv_file_streams])
# Exploit result, create csv result file
output_file = filedialog.asksaveasfilename(
filetypes=[('CSV files', '.csv'), ('All files', '*')],
defaultextension='.csv')
with open(output_file, "w", newline="") as result_file:
result_writer = csv.writer(result_file)
result_writer.writerow(list(pipeline_outputs[0]._fields))
for row in pipeline_outputs:
result_writer.writerow(list(row))
if __name__ == "__main__":
main()
| UTF-8 | Python | false | false | 2,749 | py | 1 | main.py | 1 | 0.530932 | 0.522198 | 0 | 86 | 29.953488 | 101 |
terratenff/math-tools | 16,965,120,847,623 | 4f0f178e820475e195296ab908d9a2236aed8070 | 4c7c1e7cf19398f193171b1ebcb38daacadbafcc | /unit_testing.py | a725722d32c2db2682f23387172456ef9835cd15 | [
"MIT"
]
| permissive | https://github.com/terratenff/math-tools | d2fc9d3c22a650723e68fc768ee8cf271c974918 | c55e68319f78800e4d206b385e09629c3c2196eb | refs/heads/master | 2020-06-23T14:56:34.869070 | 2019-09-15T15:28:10 | 2019-09-15T15:28:10 | 198,656,003 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/env python
"""
unit_testing.py:
Contains unit tests for the application.
"""
import unittest
from os import getcwd
import numpy as np
import math_modules.module_selector as selector
import utilities.arithmetics.arithmetics as ar
__author__ = "terratenff"
__credits__ = ["terratenff"]
__license__ = "MIT"
__version__ = "0.0.1"
__maintainer__ = "terratenff"
__status__ = "Prototype"
class TestMiscActions(unittest.TestCase):
"""
Class of miscellaneous unit tests.
"""
pass
class TestArithmetics(unittest.TestCase):
"""
Class of unit tests for the utility module 'arithmetics'.
"""
# def setUp(self):
# pass
def test_valid_inputs_scalar(self):
# TODO
self.fail("TODO")
def test_valid_inputs_vector(self):
# TODO
self.fail("TODO")
def test_valid_inputs_matrix(self):
# TODO
self.fail("TODO")
def test_zero_elements_scalar(self):
# TODO
self.fail("TODO")
def test_zero_elements_vector(self):
# TODO
self.fail("TODO")
def test_zero_elements_matrix(self):
# TODO
self.fail("TODO")
def test_different_dimensions_scalar(self):
# TODO
self.fail("TODO")
def test_different_dimensions_vector(self):
# TODO
self.fail("TODO")
def test_different_dimensions_matrix(self):
# TODO
self.fail("TODO")
class TestMathModules(unittest.TestCase):
"""
Class of unit tests for the math modules of the application.
"""
def module_tester(self, key, base_path):
"""
The body of the unit test function for every math module test case.
:param key: Name of the module.
:param base_path: Absolute path to the module.
"""
test_name = "test_" + key
function_path = base_path
test_path = base_path + "\\unit_tests"
function_module = selector.select_module(function_path, suffix=".start")
test_module = selector.select_module(test_path, suffix=".test")
functions = function_module.FUNCTIONS
test_results = test_module.tests(functions, [], test_all=True)
for test_function, function_results in test_results.items():
del function_results["skipped"]
for function_results_key, function_results_value in function_results.items():
error_msg = test_name + " - " + test_function + " - " + function_results_key
with self.subTest(msg=error_msg):
success = function_results_value == "passed"
assert_error_msg = "[[ " + function_results_key + ": " + function_results_value + " ]]"
self.assertEqual(success, True, msg=assert_error_msg)
def test_matrices(self):
"""
Unit test for the math module 'matrices'.
"""
self.module_tester("matrices", getcwd() + "\\math_modules\\matrices")
def test_statistics(self):
"""
Unit test for the math module 'statistics'.
"""
self.module_tester("statistics", getcwd() + "\\math_modules\\statistics")
if __name__ == "__main__":
unittest.main()
| UTF-8 | Python | false | false | 3,198 | py | 37 | unit_testing.py | 19 | 0.595997 | 0.595059 | 0 | 119 | 25.87395 | 107 |
Arraiz/sgvls | 11,836,929,881,035 | c15a8d4f3974e5522c2051ac7a9be5bfb6afc0b6 | d91f66137c426387288fc2748fabcd30d60e9d23 | /src/main/python/dialogs/pure/NewPureSignal.py | fc8a2f806c5c43d5cd5de99cac14482e7d074549 | []
| no_license | https://github.com/Arraiz/sgvls | 815b689ffae03398000610073efe1960f3fdcd9b | 2278dac715a2870ff476c8200d2d0166d7a84998 | refs/heads/master | 2020-04-07T22:06:29.439491 | 2018-12-11T19:11:21 | 2018-12-11T19:11:21 | 158,755,051 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'ui/NewPureSignal.ui'
#
# Created by: PyQt5 UI code generator 5.10.1
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_PureSginalDialog(object):
def setupUi(self, PureSginalDialog):
PureSginalDialog.setObjectName("PureSginalDialog")
PureSginalDialog.resize(591, 573)
self.buttonBox = QtWidgets.QDialogButtonBox(PureSginalDialog)
self.buttonBox.setGeometry(QtCore.QRect(370, 540, 221, 41))
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Cancel|QtWidgets.QDialogButtonBox.Ok)
self.buttonBox.setObjectName("buttonBox")
self.PreviewPlot = PlotWidget(PureSginalDialog)
self.PreviewPlot.setGeometry(QtCore.QRect(0, 0, 591, 281))
self.PreviewPlot.setObjectName("PreviewPlot")
self.layoutWidget = QtWidgets.QWidget(PureSginalDialog)
self.layoutWidget.setGeometry(QtCore.QRect(180, 290, 271, 251))
self.layoutWidget.setObjectName("layoutWidget")
self.verticalLayout_2 = QtWidgets.QVBoxLayout(self.layoutWidget)
self.verticalLayout_2.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.verticalLayout = QtWidgets.QVBoxLayout()
self.verticalLayout.setObjectName("verticalLayout")
self.label = QtWidgets.QLabel(self.layoutWidget)
self.label.setObjectName("label")
self.verticalLayout.addWidget(self.label)
self.horizontalLayout_3 = QtWidgets.QHBoxLayout()
self.horizontalLayout_3.setObjectName("horizontalLayout_3")
self.doubleSpinBoxAmplitude = QtWidgets.QDoubleSpinBox(self.layoutWidget)
self.doubleSpinBoxAmplitude.setMaximum(1.0)
self.doubleSpinBoxAmplitude.setSingleStep(0.1)
self.doubleSpinBoxAmplitude.setObjectName("doubleSpinBoxAmplitude")
self.horizontalLayout_3.addWidget(self.doubleSpinBoxAmplitude)
self.verticalLayout.addLayout(self.horizontalLayout_3)
self.label_2 = QtWidgets.QLabel(self.layoutWidget)
self.label_2.setObjectName("label_2")
self.verticalLayout.addWidget(self.label_2)
self.horizontalLayout_2 = QtWidgets.QHBoxLayout()
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.doubleSpinBoxFrequency = QtWidgets.QDoubleSpinBox(self.layoutWidget)
self.doubleSpinBoxFrequency.setMaximum(20000.0)
self.doubleSpinBoxFrequency.setSingleStep(1.0)
self.doubleSpinBoxFrequency.setObjectName("doubleSpinBoxFrequency")
self.horizontalLayout_2.addWidget(self.doubleSpinBoxFrequency)
self.verticalLayout.addLayout(self.horizontalLayout_2)
self.label_3 = QtWidgets.QLabel(self.layoutWidget)
self.label_3.setObjectName("label_3")
self.verticalLayout.addWidget(self.label_3)
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.doubleSpinBoxPhase = QtWidgets.QDoubleSpinBox(self.layoutWidget)
self.doubleSpinBoxPhase.setMaximum(1.0)
self.doubleSpinBoxPhase.setSingleStep(0.1)
self.doubleSpinBoxPhase.setObjectName("doubleSpinBoxPhase")
self.horizontalLayout.addWidget(self.doubleSpinBoxPhase)
self.verticalLayout.addLayout(self.horizontalLayout)
self.horizontalLayout_4 = QtWidgets.QHBoxLayout()
self.horizontalLayout_4.setObjectName("horizontalLayout_4")
self.labelFormula = QtWidgets.QLabel(self.layoutWidget)
font = QtGui.QFont()
font.setPointSize(20)
self.labelFormula.setFont(font)
self.labelFormula.setText("")
self.labelFormula.setAlignment(QtCore.Qt.AlignCenter)
self.labelFormula.setObjectName("labelFormula")
self.horizontalLayout_4.addWidget(self.labelFormula)
self.verticalLayout.addLayout(self.horizontalLayout_4)
self.verticalLayout_2.addLayout(self.verticalLayout)
self.retranslateUi(PureSginalDialog)
self.buttonBox.accepted.connect(PureSginalDialog.accept)
self.buttonBox.rejected.connect(PureSginalDialog.reject)
QtCore.QMetaObject.connectSlotsByName(PureSginalDialog)
def retranslateUi(self, PureSginalDialog):
_translate = QtCore.QCoreApplication.translate
PureSginalDialog.setWindowTitle(_translate("PureSginalDialog", "Pure Signal"))
self.label.setText(_translate("PureSginalDialog", "Amplitude"))
self.label_2.setText(_translate("PureSginalDialog", "Frequency"))
self.label_3.setText(_translate("PureSginalDialog", "Phase"))
from pyqtgraph import PlotWidget
| UTF-8 | Python | false | false | 4,780 | py | 22 | NewPureSignal.py | 15 | 0.735774 | 0.71569 | 0 | 89 | 52.707865 | 106 |
Karor9/wd | 1,219,770,758,965 | 7e3d239eccb7ef1b20943e7dcc21bc5dfe19661a | 711fc94707ce2a95aacacf70ca01b835075a0139 | /labo7/w7z6.py | f0cb5eb98c1ff38472c52c34dab29d19825593c0 | []
| no_license | https://github.com/Karor9/wd | ab5fb7315eac2d986ddef034956164d4d0420052 | 35910c6f60281331a946f2e32828234042f1cc0c | refs/heads/master | 2021-01-25T23:52:01.830154 | 2020-05-31T19:39:21 | 2020-05-31T19:39:21 | 243,231,065 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import numpy as numpy
import math as m
def obliczCosinus(tab):
size = tab.size
copyTab = tab.ravel()
copyTab = copyTab.astype('float64')
result = tab * 0
result = result.astype('float64')
for i in range (0, size):
copyTab[i] = m.cos(copyTab[i])
result = numpy.reshape(copyTab, (2, 3))
return result
a = numpy.array([[1, 33, 64], [214, 90, 180]])
b = obliczCosinus(a)
print(b)
| UTF-8 | Python | false | false | 423 | py | 107 | w7z6.py | 104 | 0.609929 | 0.560284 | 0 | 18 | 22.5 | 46 |
daniaguirre/python | 8,572,754,734,085 | ce690585efcec2141bbdf13824add530169a8622 | c32fafa72fed47ea53e5d3bb293b6733100b6795 | /edit-files/edit_files.py | 614a0648dd3fd8122bf5065896f22177ca3f4694 | []
| no_license | https://github.com/daniaguirre/python | 263bda0269cf29640f091284c08a439a90073c8f | 938c8dd8fa7d32357fe4037058d419f849cbd93e | refs/heads/master | 2021-01-19T21:27:51.298620 | 2015-03-24T16:27:45 | 2015-03-24T16:27:45 | 29,432,211 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import os
template = 'example'
f = open('example', 'w')
f.writelines(['_RWS := rec(\n',\
'isRWS := true,'])
f.close()
os.c
os.rename('example', 'example1') | UTF-8 | Python | false | false | 173 | py | 6 | edit_files.py | 4 | 0.549133 | 0.543353 | 0 | 12 | 13.5 | 32 |
SaitoTsutomu/leetcode | 6,975,026,917,840 | 252f90300c67aa58fbe9e77f4f499fef8c99cae8 | 85b8a52f1be2c4838f885f0e5a4d6963f4109dfe | /codes_/0941_Valid_Mountain_Array.py | eded6637df72ad013807cbcc12f8e575727a2743 | [
"MIT"
]
| permissive | https://github.com/SaitoTsutomu/leetcode | 4cc5bac4f983b287ec1540d188589ce3dd6e409a | 4656d66ab721a5c7bc59890db9a2331c6823b2bf | refs/heads/master | 2023-03-12T11:37:29.051395 | 2021-02-27T06:11:34 | 2021-02-27T06:11:34 | 281,815,531 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # %% [941. Valid Mountain Array](https://leetcode.com/problems/valid-mountain-array/)
# 問題:前半単調増加して後半単調減少しているかを返せ
# 解法:itertools.takewhileを用いる
class Solution:
def validMountainArray(self, A: List[int]) -> bool:
df = [j - i for i, j in zip(A, A[1:])]
n = len(list(itertools.takewhile(lambda x: x > 0, df)))
return n > 0 and n < len(df) and all(y < 0 for y in df[n:])
| UTF-8 | Python | false | false | 458 | py | 331 | 0941_Valid_Mountain_Array.py | 325 | 0.63198 | 0.614213 | 0 | 8 | 48.25 | 85 |
TatianaFilimonova/labs | 3,667,902,106,327 | 19e025c76898619db99ce8309dc42e66a96eb341 | ed00be4dd43338c39cc713247b3f7d7c04e132ee | /Laba6.Newton/Laba6.Part1.py | 37ada284bedbdf2fe28f4f6df822c20061523bb6 | []
| no_license | https://github.com/TatianaFilimonova/labs | dfdbbdbca8c7dd54c9db7605e7030db26458f82d | 2f887f1aad0b2e1d64b0f8f6124d1a263a2f2929 | refs/heads/main | 2023-06-04T06:57:36.096443 | 2021-06-24T17:49:19 | 2021-06-24T17:49:19 | 353,875,472 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from math import *
from sympy import *
x, y = symbols('x y')
def fun(x):
return log10(x) - 7 / (2 * x + 6)
def der(x):
return 14 / (2 * x + 6)**2 + 1/(x*(log(10)))
# return diff(fun(x))
N = 2
def localisation(a, b, N):
x0 = a
x1 = a + (b - a) / N
while fun(x0) * fun(x1) >= 0:
x0 = x1
x1 += (b - a) / N
if x1 == b:
N *= 2
x0 = a
x1 = a + (b - a) / N
return {"x0": x0, "x1": x1}
def count(x0):
return float(x0 - float(fun(x0) / (der(x0))))
def newton(interval):
a = interval["x0"]
b = interval["x1"]
x0 = float(b)
x1 = count(x0)
while abs(x1 - x0) >= pow(10, -4):
x0 = x1
x1 = count(x0)
return x1
a = float(1 / 3)
b = float(2 / 3)
print('x = ', {newton(localisation(a, b, N))}) | UTF-8 | Python | false | false | 814 | py | 14 | Laba6.Part1.py | 9 | 0.445946 | 0.380835 | 0 | 41 | 18.878049 | 49 |
kiwiheretic/logos-v2 | 8,057,358,683,278 | a10e3a9b3db1aedb26e9b4b0fe2e4c8e0428d20d | b9f3beba935a41029b1cd8d8a495f25df270c697 | /cloud_memos/bot_plugin.py | 08e854634a22133334f736fdf7452a7a6f233d4b | [
"Apache-2.0"
]
| permissive | https://github.com/kiwiheretic/logos-v2 | f656c6295b018109e5ddf4c8b230414dd2395f3a | 91a02d69d40d07289130de48f5e4ad9751d678ed | refs/heads/master | 2023-07-20T08:34:47.897925 | 2023-07-16T03:24:32 | 2023-07-16T03:24:32 | 27,844,572 | 4 | 1 | Apache-2.0 | false | 2022-12-08T01:54:59 | 2014-12-10T23:20:10 | 2022-03-20T01:10:14 | 2022-12-08T01:54:58 | 9,096 | 4 | 1 | 41 | JavaScript | false | false | # test plugin
from bot.pluginDespatch import Plugin
import re
from datetime import datetime
import pytz
from django.contrib.auth.models import User
import logging
from logos.settings import LOGGING
logger = logging.getLogger(__name__)
logging.config.dictConfig(LOGGING)
from cloud_memos.models import Memo, Folder
from bot.logos_decorators import login_required
READ_SIZE = 250
class MemosPlugin(Plugin):
plugin = ('memo', 'Cloud Memos')
def __init__(self, *args, **kwargs):
# Plugin.__init__(self, *args, **kwargs)
super(MemosPlugin, self).__init__(*args, **kwargs)
self.commands = ((r'list$', self.list_memos, 'list all memos'),
(r'list unread$', self.list_unread_memos, 'list new memos'),
(r'list new$', self.list_unread_memos, 'list new memos'),
(r'send (?P<recipient>\S+) (?P<message>.*)$', self.send_memo, 'send new memos'),
(r'check$', self.check, 'check for unread memos'),
(r'read (?P<memo_id>\d+)', self.read, 'read a memo'),
(r'delete (?P<memo_id>\d+)', self.delete_memo, 'delete a memo'),
(r'folders', self.list_folders, 'List memo folders'),
)
# self.user_memos = {}
def update_user_memo_info(self, user=None, folder=None, memos = None):
if not user: return
userl = user.lower()
if userl not in self.user_memos:
self.user_memos[userl] = {}
if folder:
self.user_memos[userl].update({'folder':folder})
else:
self.user_memos[userl].update({'folder':'inbox'})
if memos:
self.user_memos[userl].update({'memos':memos})
def onSignal_login(self, source, data):
nick = data['nick']
# check for unread memos
self._check(nick)
def onSignal_logout(self, source, data):
username = data['username']
logger.debug("cloud memos: onSignal_logout " + repr(username))
# del self.user_memos[username.lower()]
def _get_memos_obj(self, nick, folder_name='inbox'):
username = self.get_auth().get_username(nick)
user = User.objects.get(username = username)
memos = Memo.objects.filter(folder__name=folder_name,
to_user__username = username.lower()).order_by('-id')
return memos
def _check(self, nick, always_respond = False):
""" check for unread memos """
username = self.get_auth().get_username(nick)
user = User.objects.get(username=username)
unread_memos = Memo.objects.filter(folder__name='inbox',
to_user = user, viewed_on__isnull = True).count()
if unread_memos > 0:
self.notice(nick,'You have %d unread memos!' % (unread_memos,))
else:
if always_respond:
self.notice(nick,'You have no unread memos!')
@login_required()
def check(self, regex, chan, nick, **kwargs):
""" check for unread memos """
self._check(nick, always_respond = True)
@login_required()
def list_folders(self, regex, chan, nick, **kwargs):
username = self.get_auth().get_username(nick)
user = User.objects.get(username = username)
for folder in Folder.objects.filter(user=user):
self.notice(nick, str(folder.id)+" " +folder.name)
self.notice(nick,'--end of list--')
@login_required()
def sel_folder(self, regex, chan, nick, **kwargs):
username = self.get_auth().get_username(nick)
user = User.objects.get(username = username)
try:
folder = Folder.objects.get(pk=regex.group('folder_id'),
user = user)
self._update_usernotes_hash(username, {'folder':folder})
self.notice(nick, "--Folder successfully opened--")
except Folder.DoesNotExist:
self.notice(nick, "--Folder does not exist--")
@login_required()
def send_memo(self, regex, chan, nick, **kwargs):
print ("Send Memo ...")
recipient = regex.group('recipient')
message = regex.group('message')
username = self.get_auth().get_username(nick)
user = User.objects.get(username__iexact = username)
try:
recip = User.objects.get(username__iexact = recipient)
except User.DoesNotExist:
self.notice(nick, "I do not know this user")
return
subject = message[:20] + "..."
Memo.send_memo(user, recip, subject, message)
self.notice(nick, "Memo sent")
@login_required()
def list_memos(self, regex, chan, nick, **kwargs):
num_to_list = 10
memos = self._get_memos_obj(nick)
if memos:
for idx, memo in enumerate(memos):
if not memo.viewed_on:
read_status = " **UNREAD** "
else:
read_status = ""
notification = str(idx) + " " + memo.from_user.username + read_status + " " + memo.subject
self.notice(nick, notification )
if idx >= num_to_list: break
else:
self.notice(nick, '** No memos found **')
@login_required()
def list_unread_memos(self, regex, chan, nick, **kwargs):
num_to_list = 10
memos = self._get_memos_obj(nick)
if memos:
idx = 0
for memo in memos:
if not memo.viewed_on:
self.notice(nick, str(idx) + " " + memo.from_user.username + " " + memo.subject)
idx += 1
if idx >= num_to_list: break
else:
self.notice(nick, '** No memos found **')
@login_required()
def read(self, regex, chan, nick, **kwargs):
logger.debug("read memos: %s %s " % (chan, nick))
memos = self._get_memos_obj(nick)
memo_id = int(regex.group('memo_id'))
try:
memo = memos[memo_id]
text = re.sub(r'\n', ' ', memo.text)
self.notice(nick, text)
memo.viewed_on = datetime.now(pytz.utc)
memo.save()
except IndexError:
self.notice(nick, "Memo not in list")
@login_required()
def delete_memo(self, regex, chan, nick, **kwargs):
memos = self._get_memos_obj(nick)
memo_id = int(regex.group('memo_id'))
logger.debug("delete memo: %s %s %d" % (chan, nick, memo_id))
try:
memo = memos[memo_id]
subject = memo.subject
memo.delete()
self.notice(nick, "Memo %d %s deleted" % (memo_id, subject))
except IndexError:
self.notice(nick, "Memo not in list")
| UTF-8 | Python | false | false | 6,966 | py | 216 | bot_plugin.py | 149 | 0.537037 | 0.535314 | 0 | 185 | 36.654054 | 106 |
mhowsmon/pyclass | 4,964,982,203,588 | 8db92a053d4d738198c4542622d703ef5e93f6b8 | a9f413939bdd8e888c2a98e2c76c39916775e4ac | /WK1/cisco_con.py | 8d836002445ea5b15ee39b0b9b61b9b581f7a865 | []
| no_license | https://github.com/mhowsmon/pyclass | 80101fcc4d6cbac5b1ad46fa6f868117bc807a2e | 89e7b9b021b9ebee818be11598b9302792fa80ba | refs/heads/master | 2022-11-22T14:01:46.295772 | 2020-07-22T02:47:35 | 2020-07-22T02:47:35 | 262,201,295 | 0 | 0 | null | false | 2020-05-08T02:23:47 | 2020-05-08T01:51:47 | 2020-05-08T02:07:59 | 2020-05-08T02:23:47 | 0 | 0 | 0 | 0 | Python | false | false | #!/usr/bin/env python
from netmiko import ConnectHandler
from getpass import getpass
device1 = {
'host':'nxos1.lasthop.io',
'username':'pyclass',
'password':'88newclass',
'device_type':'cisco_nxos',
'session_log':'my_session.txt'
}
net_connect = (ConnectHandler(**device1))
print(net_connect.find_prompt())
''' His example
import os
from getpass import getpass
from netmiko import ConnectHandler
password = os.getenv("PYNET_PASSWORD") if os.getenv("PYNET_PASSWORD") else getpass()
net_connect = ConnectHandler(
host="cisco3.lasthop.io",
username="pyclass",
password=password,
device_type="cisco_ios",
session_log="my_session.txt",
)
print(net_connect.find_prompt())
net_connect.disconnect()
'''
| UTF-8 | Python | false | false | 744 | py | 24 | cisco_con.py | 21 | 0.698925 | 0.69086 | 0 | 35 | 20.228571 | 84 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.