repo_name
stringlengths 7
111
| __id__
int64 16.6k
19,705B
| blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 5
151
| content_id
stringlengths 40
40
| detected_licenses
sequence | license_type
stringclasses 2
values | repo_url
stringlengths 26
130
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
42
| visit_date
timestamp[ns] | revision_date
timestamp[ns] | committer_date
timestamp[ns] | github_id
int64 14.6k
687M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 12
values | gha_fork
bool 2
classes | gha_event_created_at
timestamp[ns] | gha_created_at
timestamp[ns] | gha_updated_at
timestamp[ns] | gha_pushed_at
timestamp[ns] | gha_size
int64 0
10.2M
⌀ | gha_stargazers_count
int32 0
178k
⌀ | gha_forks_count
int32 0
88.9k
⌀ | gha_open_issues_count
int32 0
2.72k
⌀ | gha_language
stringlengths 1
16
⌀ | gha_archived
bool 1
class | gha_disabled
bool 1
class | content
stringlengths 10
2.95M
| src_encoding
stringclasses 5
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 10
2.95M
| extension
stringclasses 19
values | num_repo_files
int64 1
202k
| filename
stringlengths 4
112
| num_lang_files
int64 1
202k
| alphanum_fraction
float64 0.26
0.89
| alpha_fraction
float64 0.2
0.89
| hex_fraction
float64 0
0.09
| num_lines
int32 1
93.6k
| avg_line_length
float64 4.57
103
| max_line_length
int64 7
931
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ChunjingXiao/TwitterFacebook | 16,114,717,311,306 | 4b3f3b6958ff937989316bd54f72425c3ea97a30 | e8dc9bab61d14dba063d772ef1dbab31e8d0f69f | /01IdentifySameUser/4BothFacebookTwitterAllUpdate/extractFollowerLikes.py | 93067bd881e31fa78447c3c9407c88daa1c76e79 | [] | no_license | https://github.com/ChunjingXiao/TwitterFacebook | e02e9c4a779a49e2b0c5857631128b9649499bd2 | a500ae440bb2d329747a11466a0467fe794deca2 | refs/heads/master | 2020-04-26T15:53:01.329542 | 2019-03-04T02:52:27 | 2019-03-04T02:52:27 | 173,660,286 | 2 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null | # -*- coding: utf-8 -*-
# update the #follower and #likes in findBothFacebookTwitterAll from the twitter profiles and facebook profiles
# inFile: findBothFacebookTwitterAll
# twitterID #followr FacebookID #likes Url
# 40053156 28 rwilderjr 0 www.facebook.com/rwilderjr
# 30693817 488 cookiemodel 0 facebook.com/cookiemodel
#inFile2: TwitterProfileData
# 'UserID','Name','ScreenName','Location','Description','Url','Protected','Follower','Friend','CreatedAt','Favourite','UtcOffset','TimeZone','Status','Lang','List'
# userID Name screenName Location Description Url Protected Follower Friend
# |::|21579600|::|Sarah Fleenor|::|SarahFleenor|::|Indianapolis|::|25.Has.......IUPUI Alum.|::|http://t.co/F0RCZAemX4|::|False|::|353|::|382|::|
# 0 1 2 3 4 5 6 7 8
# CreatedAt Favourite UtcOffset TimeZone Status Lang List lastTweetTime lastTweet
# 2009-02-22 17:18:31|::|1241|::|-14400|::|Eastern Time (US & Canada)|::|18689|::|en|::|8|::|2016-04-14 22:36:08|::|{'contributors': .... None}|;;|
# 9 10 11 12 13 14 15 16 17
#inFile3: FacebookProfileData
#|::|json format file from Facebook APIs|;;|
#outFiel: findBothFacebookTwitterAllUpdate
# twitterID #followr FacebookID #likes howMatchUserID twitterName TwitterUrl facebookName facebookUrl
# 126830575 443 127122857320666 998 FacebookFromTwitterProfiles AGNewHaven https://t.co/AOwdu58GIv agnewhaven http://www.agnewhaven.com
# 70598975 1929 11553867053 5557 FacebookFromTwitterProfiles LavishandLime http://t.co/FnESasWZhK lavishandlime URL: http://www.lavishandlime.com Twitter: http://twitter.com/LavishandLime Pinterest: http://pinterest.com/lavishandlime Blog: http://lavishandlime.blogspot.ca/
import urllib2
import urllib
import re
import datetime
import time
import sys
import os
import traceback
from urllib2 import HTTPError, Request, urlopen, URLError
import minjson
def readBothFile(inFile,outFile,twitterFollower,facebookLikes):
# twitterFollower[user] = [userid, #followr, screenName, link]
fout = open(outFile,"w")
fin = open(inFile) # findBothFacebookTwitterAll
for current in fin:
current = current.replace('\n','')
curL = current.split('\t')
twitter = curL[0]
follower = 0
facebook = curL[2]
likes = 0
isTwitter = 0
isFacebook = 0
if (twitterFollower.has_key(twitter)):
follower = twitterFollower[twitter][1]
isTwitter = 1
if (facebookLikes.has_key(facebook)):
likes = facebookLikes[facebook][1]
isFacebook = 1
if (isTwitter == 1 and isFacebook ==1 ): # select users who have twitter profiles and facebook profiles
#fout.write( twitterFollower[twitter][0] + '\t' + str(follower) + '\t' + facebookLikes[facebook][0] + '\t' + str(likes) + '\t' + curL[4] + '\n')
fout.write( twitterFollower[twitter][0] + '\t' + str(follower) + '\t' + facebookLikes[facebook][0] + '\t' + str(likes) + '\t' + curL[4] )
fout.write( '\t' + twitterFollower[twitter][2] + '\t' + twitterFollower[twitter][3] + '\t' + facebookLikes[facebook][2] + '\t' + facebookLikes[facebook][3] +'\n')
def extractTwitterFollower(inFile2,twitterFollower):
columnMark = '|::|'
rowMark = '|;;|\n'
count = 0
fin = open(inFile2) # TwitterProfileData
for current in fin:
if not (current[0:4] == columnMark and current[-5:] == rowMark):
continue
count += 1 # allCount
if(count % 100000 == 0):
print 'twitterFollower:' + str(count)
data = current[4:-5]
data = data.replace('\n','')
curL = data.split(columnMark)
userId = curL[0]
screenName = curL[2]
follower = curL[7]
link = curL[5]
if (not twitterFollower.has_key(userId)): # twitterFollower[user] = [userid, #followr, screenName, link]
twitterFollower[userId] = [userId, follower, screenName,link]
if (not twitterFollower.has_key(screenName)):
twitterFollower[screenName] = [userId, follower, screenName,link]
def multipleFilesTwitter(inFile2,twitterFollower):
fileCount = 0
for root, dirs, files in os.walk(inFile2):
for name in files:
fileCount += 1
print str(fileCount) + '::currentFile::' + name
extractTwitterFollower(os.path.join(root, name),twitterFollower)
print 'twitterFollower is done:' + str(len(twitterFollower))
def extractFacebookLikes(inFile3,facebookLikes):
columnMark = '|::|'
rowMark = '|;;|\n'
count = 0
fin = open(inFile3)
for current in fin:
if not (current[0:4] == columnMark and current[-5:] == rowMark):
continue
count += 1 # allCount
if(count % 1000 == 0):
print 'facebookLikes:' + str(count)
data2 = current[4:-5]
if(len(data2) <= 5): # if |::|400|;;|, continue
continue
data = data2.replace("\\/","/")
data = data.replace("\\n"," ")
dataJsonList = minjson.safeRead(data)
for (oneUser,dataJson) in dataJsonList.items():
userId = dataJson['id']
if (dataJson.has_key('username')):
userName = dataJson['username']
else:
userName = oneUser
if (dataJson.has_key('likes')):
likes = dataJson['likes']
else:
likes = 0
website = '0'
if (dataJson.has_key('website')):
website = dataJson['website']
#website = website.replace("\n",";")
#website = website.replace("\t",";")
#website = website.replace(" "," ")
#website = website.replace(" "," ")
#print 'before::' + website
website = re.sub('\s+',';',website)
website = website.replace("\\/","/")
#website = website.replace("http://","")
#print 'after::' + website
if (not facebookLikes.has_key(userId)):
facebookLikes[userId] = [userId, likes,userName,website]
if (not facebookLikes.has_key(userName)):
facebookLikes[userName] = [userId, likes,userName,website]
def multipleFilesFacebook(inFile3,facebookLikes):
fileCount = 0
for root, dirs, files in os.walk(inFile3):
for name in files:
fileCount += 1
print str(fileCount) + '::currentFile::' + name
extractFacebookLikes(os.path.join(root, name),facebookLikes)
print 'facebookLikes is done:' + str(len(facebookLikes))
def main(argv):
inFile = argv[1] # findBothFacebookTwitterAll
inFile2 = argv[2] # TwitterProfileData
inFile3 = argv[3] # FacebookProfileData
outFile = argv[4] # findBothFacebookTwitterAllUpdate
twitterFollower = {} # twitterFollower[user] = [userid, #followr, screenName, link]
multipleFilesTwitter(inFile2,twitterFollower)
#extractFollower(inFile2,twitterFollower)
facebookLikes = {} # facebookLikes[user] = [userid, #likes, name, link]
multipleFilesFacebook(inFile3,facebookLikes)
#extractLikes(inFile3,facebookLikes)
readBothFile(inFile,outFile,twitterFollower,facebookLikes)
if __name__ == "__main__":
#ִmain fuction
main(sys.argv)
| UTF-8 | Python | false | false | 7,887 | py | 21 | extractFollowerLikes.py | 10 | 0.576465 | 0.543875 | 0 | 165 | 45.781818 | 273 |
jqsheng94/Twitter-API-Samples | 9,552,007,299,229 | 646dc4f373e21b82964f06c5ec51dbb2904215a0 | 30466c096cc104a8d7cfdce87f5d9dfc72c9be24 | /sentiment analysis for tweets.py | f935019de488324eedab5a7315084f68b0d76902 | [] | no_license | https://github.com/jqsheng94/Twitter-API-Samples | bf42ad746960f255f1927b626cdee36fa18138db | 8479021de2ba2e5df48a246ce22b154af9dc9c2b | refs/heads/master | 2021-01-11T19:01:37.435533 | 2017-03-10T17:06:51 | 2017-03-10T17:06:51 | 79,296,739 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from TwitterAPI import TwitterAPI, TwitterRestPager
import json
import sys
sys.path.append('./Alchemy') # path to alchemyapi_python's folder to make the import work
from alchemyapi import AlchemyAPI
alchemyapi = AlchemyAPI()
CLIENT_SECRETS_FILE = "client_secret.json"
with open(CLIENT_SECRETS_FILE) as json_data:
d = json.load(json_data)
ckey = d['ckey']
csecret = d['csecret']
atoken = d['atoken']
asecret = d['asecret']
api = TwitterAPI(ckey,
csecret,
atoken,
asecret)
SEARCH_TERM = 'donald trump'
r = api.request('search/tweets', {'q': SEARCH_TERM, 'count' : 10})
for item in r:
if 'text' in item and 'user' in item:
tweet = item['text']
username = item['user']['name']
response = alchemyapi.sentiment('html', tweet)
if response['status'] == 'OK':
results = json.dumps(response, indent=4)
type = response['docSentiment']['type']
if 'score' in response['docSentiment']:
score = response['docSentiment']['score']
else:
score = 0
else:
print('Error in sentiment analysis call: ', response['statusInfo'])
print(username)
print('=================Output========================')
print(score)
print(type)
print(tweet)
| UTF-8 | Python | false | false | 1,376 | py | 20 | sentiment analysis for tweets.py | 19 | 0.56468 | 0.561773 | 0 | 45 | 29.444444 | 90 |
lyk19940625/MyRFBNet | 12,627,203,867,547 | 3e0f8db4c1abe519e063777b7410d7953525f8d1 | f6dd2dfdb5a374c4db12c15831475b0a641ceee4 | /data/config.py | 68347b8ad434d5dc7616204be88a376c2919684e | [
"MIT"
] | permissive | https://github.com/lyk19940625/MyRFBNet | a87a833c0dbfb454f0218cc2294d34d2206e0b1c | bf11967a91d9e75cead78bf9edc5cf067b73274c | refs/heads/master | 2022-12-14T14:22:21.041078 | 2019-08-02T03:58:01 | 2019-08-02T03:58:01 | 197,345,302 | 42 | 10 | MIT | false | 2022-11-22T02:57:26 | 2019-07-17T08:11:46 | 2022-09-15T11:46:48 | 2022-11-22T02:57:23 | 8,525 | 31 | 11 | 11 | Python | false | false | # config.py
import os.path
# gets home dir cross platform
home = os.path.expanduser("~")
ddir = os.path.join(home, "data/VOCdevkit/")
# note: if you used our download scripts, this should be right
VOCroot = ddir # path to VOCdevkit root dir
# define yourself data set class label name
CLASSES = ('__background__', 'rusty', 'nest', 'polebroken',
'poletopleaky', 'poleleakssteel', 'pdz')
# RFB CONFIGS
VOC_300 = {
'feature_maps': [38, 19, 10, 5, 3, 1],
'min_dim': 300,
'steps': [8, 16, 32, 64, 100, 300],
# 'min_sizes': [30, 60, 111, 162, 213, 264],
'min_sizes': [21, 45, 99, 153, 207, 261],
'max_sizes': [60, 111, 162, 213, 264, 315],
'aspect_ratios': [[2, 3], [2, 3], [2, 3], [2, 3], [2], [2]],
'variance': [0.1, 0.2],
'clip': True,
}
VOC_512 = {
'feature_maps': [64, 32, 16, 8, 4, 2, 1],
'min_dim': 512,
'steps': [8, 16, 32, 64, 128, 256, 512],
# 'min_sizes': [35.84, 76.8, 153.6, 230.4, 307.2, 384.0, 460.8],
'min_sizes': [20.48, 51.2, 133.12, 215.04, 296.96, 378.88, 460.8],
'max_sizes': [76.8, 153.6, 230.4, 307.2, 384.0, 460.8, 537.6],
'aspect_ratios': [[2, 3], [2, 3], [2, 3], [2, 3], [2, 3], [2], [2]],
'variance': [0.1, 0.2],
'clip': True,
}
| UTF-8 | Python | false | false | 1,253 | py | 32 | config.py | 21 | 0.52913 | 0.351157 | 0 | 52 | 23.096154 | 72 |
AdamZhouSE/pythonHomework | 18,794,776,902,764 | 7144878b7e92d405de1dc2421e8d0999cc4c18aa | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2515/60661/261228.py | ce292a3e20722db0f6f458a03730123a7117984d | [] | no_license | https://github.com/AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null | nums=eval(input())
m=eval(input())
l,r=max(nums),sum(nums)
while l<r:
mid=l+(r-l)//2
i,count,temp=0,1,0
while(i<len(nums)):
temp+=nums[i]
if temp>mid:
temp=nums[i]
count+=1
i+=1
#if count==m:
# print(mid)
# exit()
if count<=m:
r=mid
else:
l=mid+1
print(l) | UTF-8 | Python | false | false | 357 | py | 45,079 | 261228.py | 43,489 | 0.45098 | 0.431373 | 0 | 20 | 16.9 | 24 |
gkreder/tequila_mockingbird_2 | 17,729,625,025,614 | 4dafbe26326d7191f1a5f7850c9010632ee7ce0e | 7a69c2fd074dbecbdf045a62a218d604f960d66c | /fitness_scores/jupyter/test.py | 161e56ba0b7bea3b8edab00a27cb5a897a6dc0d3 | [] | no_license | https://github.com/gkreder/tequila_mockingbird_2 | 36400523d43eebcd098a60b5a0919297b4a345b2 | 486d932d87673f420aef696c9b8e0bd4df253f19 | refs/heads/master | 2021-01-12T16:05:10.156621 | 2016-10-31T04:52:59 | 2016-10-31T04:52:59 | 71,934,059 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import fitness_lib
import pickle
import sys
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
import seaborn as sns
from operator import itemgetter
AMINOTONUMBER_DATA = pickle.load(open('./input_output_files/input/aminotonumber.pkl', 'rb'))
replicate = 'control'
# file_name = './input_output_files/output/' + replicate + '_full_data.pkl'
file_name = './input_output_files/output/control_full_data.pkl'
# def plot_scores(AA_scores):
# print(AA_scores)
def plot_hmap(data, row_labels, column_labels):
sns.set(font_scale=0.7)
grid_kws = {"height_ratios": (.9, .05), "hspace": .3}
f, (ax, cbar_ax) = plt.subplots(2, gridspec_kw=grid_kws)
ax = sns.heatmap(data, ax=ax,
cbar_ax=cbar_ax,
cbar_kws={"orientation": "horizontal"},
xticklabels=row_labels,
yticklabels=column_labels,
linewidths = 0.5)
plt.sca(ax)
plt.yticks(rotation=0)
plt.xticks(rotation=90)
plt.show()
def plot_scores(AA_scores):
locs = []
# AAs_keys = AMINOTONUMBER_DATA.keys().
AA_labels = [tup[0] for tup in sorted(AMINOTONUMBER_DATA.items(), key=itemgetter(1))]
scores = []
# numpy.zeros((21,77))
scores = np.empty((21,75))
scores[:] = np.NAN
for (loc, AA) in AA_scores:
print loc, AA
if AA != 'WT' and loc > 1 and loc < 77:
scores[int(AMINOTONUMBER_DATA[AA]), int(loc - 2)] = AA_scores[(loc, AA)]['score']
locs.append(loc)
loc_labels = sorted(set(locs))
plot_hmap(scores, loc_labels, AA_labels)
# for (loc, AA) in AA_scores:
# locs.append(loc)
# AAs.append(AA)
# scores.append(AA_scores[(loc, AA)]['score'])
# plotting_df = pd.DataFrame()
# plotting_df['Location'] = locs
# plotting_df['AA'] = AAs
# plotting_df['score'] = scores
# plotting_df = plotting_df[plotting_df['Location'] != 0]
# plotting_df = plotting_df[plotting_df['AA'] != 'WT']
# plotting_df = plotting_df.pivot(index='AA', columns='Location', values='score')
# print plotting_df.sort()
# # sns.heatmap(plotting_df)
# # plt.show()
(AA_scores, total_reads, thrown_out_N_reads, thrown_out_dictionary_reads) = pickle.load(open(file_name, 'rb'))
plot_scores(AA_scores)
# plot_scores(AA_scores) | UTF-8 | Python | false | false | 2,355 | py | 184 | test.py | 41 | 0.603822 | 0.592357 | 0 | 79 | 28.822785 | 110 |
tak8/chop-spec | 5,162,550,697,750 | c8637a70eef9f8eba4e0f510d4d145472dd372b2 | aea97eb37915563148021ed37954ebab56cb7c7e | /pass-8.py | 87204e1b54a7d4ab97fd97c4674973635e1a114a | [
"BSD-2-Clause"
] | permissive | https://github.com/tak8/chop-spec | 8941a1c54f99137729fe9817136e3e2acc50e7b4 | c813a7e664560a11db8be5669470f0ee11a37bb0 | refs/heads/master | 2021-07-06T05:08:09.603326 | 2013-11-07T17:32:50 | 2013-11-07T17:32:50 | 14,181,947 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/python
#
# Pass 8: Maps each text chunk ID to its original location in spec.
#
##############################################################################
# Copyright (C) 2013 Cable Television Laboratories, Inc.
# Contact: http://www.cablelabs.com/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL CABLELABS OR ITS CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
##############################################################################
import fileinput
import re
import sys
id_file = sys.argv[1]
del sys.argv[1:2]
ids = {}
for l in fileinput.input(files = (id_file)):
m = re.search('^ *<t __addr="(:[0-9:]+?)" id-hash="(.+@.+)">', l)
if m:
ids[m.group(1)] = m.group(2)
def lookup(m):
key = m.group(1)
if key in ids:
return ' __addr_id="' + ids[key] + '">'
else:
return m.group(0)
for l in fileinput.input():
sys.stdout.write(re.sub(' __addr="(:[0-9:]+?)">', lookup, l))
| UTF-8 | Python | false | false | 2,109 | py | 9 | pass-8.py | 3 | 0.659554 | 0.651019 | 0 | 53 | 38.792453 | 78 |
dmikos/stepikCourse | 11,725,260,757,292 | 4f87a5a57c8bbc04eff37e6d9aa8e5d012972a24 | 4b379051aa3430eb2d8931f6055772731dcb199d | /512-Python_основы_и_применение/24458/stepik-512_24458-step15.py | 18b8cce88ecda3f32beeaafae65bf2fcaa86a1a8 | [] | no_license | https://github.com/dmikos/stepikCourse | 1416614ef51a4352374f37e86e3211c3b42cbaf6 | 3faeabfdc56cac597fb6b1495e7bb38a7f2a6816 | refs/heads/master | 2021-01-12T17:06:37.720050 | 2016-11-21T14:37:20 | 2016-11-21T14:37:20 | 69,057,420 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/env python3
#https://stepik.org/lesson/%D0%A4%D1%83%D0%BD%D0%BA%D1%86%D0%B8%D0%B8-%D0%B8-%D1%81%D1%82%D0%B5%D0%BA-%D0%B2%D1%8B%D0%B7%D0%BE%D0%B2%D0%BE%D0%B2-24459/step/15?course=Python-%D0%BE%D1%81%D0%BD%D0%BE%D0%B2%D1%8B-%D0%B8-%D0%BF%D1%80%D0%B8%D0%BC%D0%B5%D0%BD%D0%B5%D0%BD%D0%B8%D0%B5&unit=6764
def C(n, k):
if k == 0:
return 1
elif k > n:
return 0
elif k <= n:
return C(n-1, k) + C(n-1, k-1)
#n = int(input())
#k = int(input())
n, k = map(int, input().split())
print(C(n, k))
| UTF-8 | Python | false | false | 534 | py | 73 | stepik-512_24458-step15.py | 45 | 0.573034 | 0.41573 | 0 | 18 | 28.666667 | 287 |
dustine32/pthr_db_caller | 8,864,812,501,370 | 70d90d6803108df7a91519925d10594a1d1bab06 | 2f7de5811985ce5e6bd15dc445baf601bff035df | /bin/format_xml_iba_to_gaf.py | f3a5a3efc2601bf0020d8fb2ebe2e1e7f00e4276 | [] | no_license | https://github.com/dustine32/pthr_db_caller | 78227d7317fbc0db8ec033e2390bed3d4168d8d9 | 70397a69612e1156ed7686a675341606c0443228 | refs/heads/master | 2021-11-08T17:39:29.950241 | 2021-10-20T22:54:21 | 2021-10-20T22:54:21 | 192,622,742 | 0 | 0 | null | false | 2021-03-25T22:41:38 | 2019-06-18T22:54:50 | 2020-11-25T20:54:58 | 2021-03-25T22:41:36 | 258 | 0 | 0 | 2 | Python | false | false | #!/usr/bin/python3
import argparse
import os
from pthr_db_caller.models import paint, metadata
parser = argparse.ArgumentParser()
parser.add_argument('-f', '--file_xml')
parser.add_argument('-g', '--go_aspect', help="Filepath to TSV of GO term ID -> GO aspect. "
"E.g. 'GO:0009507\tcellular_component'")
parser.add_argument('-c', '--complex_termlist', help="Filepath to (pre-computed from GO ontology) list of "
"protein-containing complex (GO:0032991) and all its descendant "
"terms.")
parser.add_argument('-s', '--split_by_species', help="Filepath to 'filename,taxon_id,oscode' TSV. Write to STDOUT"
"if this option is omitted.")
parser.add_argument('-d', '--out_directory', help="Destination directory for split files. Only used if"
"--split_by_species is specified.")
parser.add_argument('-a', '--file_format', help="GO annotation format to output. Default is 'GAF' (version 2.2)")
parser.add_argument('-p', '--panther_version', help="PANTHER library version. E.g. 15.0, 16.0")
parser.add_argument('-r', '--go_release_date', help="GO release date in YYYY-MM-DD format")
parser.add_argument('-u', '--obsolete_uniprots', help="Filepath to list of PANTHER UniProt IDs not in UniProt GPI")
parser.add_argument('-b', '--ibd_file_outpath', help="If supplied, filepath to write IBD file to")
if __name__ == "__main__":
args = parser.parse_args()
if args.file_format:
file_format = args.file_format.upper()
else:
file_format = "GAF"
writer = paint.PaintIbaWriter(go_aspect=args.go_aspect,
complex_termlist=args.complex_termlist,
file_format=file_format,
obsolete_uniprots=args.obsolete_uniprots)
anodes = paint.PaintIbaXmlParser.parse(args.file_xml)
if args.ibd_file_outpath:
ibd_nodes = anodes.ibd_nodes()
ibd_file = metadata.PaintIbdFile(writer=writer,
panther_version=args.panther_version,
go_release_date=args.go_release_date,
ibd_nodes=ibd_nodes)
ibd_file.write(args.ibd_file_outpath)
# Split anodes by file to write to; by taxon
if args.split_by_species:
iba_file_data = metadata.parse_iba_metadata_file(args.split_by_species)
# Add the catch-all, fallback file
iba_file_data.append({"basename": "gene_association.paint_other", "taxon_id": "other", "oscode": None})
taxon_to_file = {}
for pif in iba_file_data:
iba_file = metadata.PaintIbaFile(writer=writer,
panther_version=args.panther_version,
go_release_date=args.go_release_date,
basename=pif["basename"],
taxon_id=pif["taxon_id"],
oscode=pif["oscode"])
taxon_to_file[iba_file.taxon_id] = iba_file
for node in anodes:
iba_file = taxon_to_file.get(node.taxon_id)
if iba_file is None:
iba_file = taxon_to_file.get("other")
iba_file.add_node(node)
# Now that the iba_files have their annotated_nodes
for taxon_id, iba_file in taxon_to_file.items():
# Specify format (gaf) and outdir and
if iba_file.annotated_nodes:
full_filepath = os.path.join(args.out_directory, iba_file.basename)
full_filepath = "{}.{}".format(full_filepath, file_format.lower())
print(iba_file.basename, len(iba_file.annotated_nodes))
iba_file.write(full_filepath)
else:
writer.print(anodes)
| UTF-8 | Python | false | false | 4,044 | py | 23 | format_xml_iba_to_gaf.py | 18 | 0.550198 | 0.54451 | 0 | 75 | 52.92 | 118 |
suzuki-shunsuke/ansible-role-general-installer | 15,453,292,379,268 | fcebe641d4d10de2aa9c205aaba4cc4ef502762e | 38e96590a2dd62da46eccfe544ff9063cc5cee76 | /filter_plugins/is_archived.py | edc1ecc59df3e342f1325f24d78d2814b5ead342 | [
"MIT"
] | permissive | https://github.com/suzuki-shunsuke/ansible-role-general-installer | eb888c1812c06359fadc9c904d6c1c65acbd31fe | 21a91701aa64c7cd6df66cdadafaca0e4c06f5fa | refs/heads/master | 2022-05-07T22:50:27.032947 | 2020-08-24T09:42:32 | 2020-08-24T09:42:32 | 116,222,452 | 1 | 2 | MIT | false | 2022-03-26T15:49:38 | 2018-01-04T06:18:02 | 2022-02-09T06:33:14 | 2022-03-26T15:49:38 | 120 | 1 | 1 | 14 | Python | false | false | def is_archived(filename, suffixes):
"""
"""
return any(filename.endswith(s) for s in suffixes)
class FilterModule(object):
def filters(self):
return {
"is_archived": is_archived}
| UTF-8 | Python | false | false | 218 | py | 9 | is_archived.py | 2 | 0.605505 | 0.605505 | 0 | 10 | 20.8 | 54 |
orenrimer/django-password-manager | 3,667,902,114,830 | 15917fc17f59032621076d8797b73c07af61ed36 | 79f294e8704ae6ff95ece9055e14bdc3b43b3506 | /password_manager/accounts/urls.py | d340cafec8956a4986904f6f464952ad5a9a4204 | [] | no_license | https://github.com/orenrimer/django-password-manager | d3d1377a22e6ee630b5f9f19097e3ff289651f33 | 88d642ec9e3d451bc1c868f8f1c6e09f77055d6c | refs/heads/main | 2023-03-05T19:20:12.247894 | 2021-02-15T21:53:01 | 2021-02-15T21:53:01 | 333,854,208 | 1 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null | from django.urls import path
from .views import (
register_page_view,
login_page_view,
logout_view,
account_page_view,
must_authenticate_view,
unauthorized_view,
)
app_name = "accounts"
urlpatterns = [
# Account management links
path('register/', register_page_view, name="register"),
path('login/', login_page_view, name="login"),
path('my-account/', account_page_view, name="account"),
path('logout/', logout_view, name="logout"),
path('must-authenticate/',must_authenticate_view, name="must_authenticate"),
path('unauthorized/',unauthorized_view, name="unauthorized"),
]
| UTF-8 | Python | false | false | 629 | py | 28 | urls.py | 13 | 0.675676 | 0.675676 | 0 | 20 | 30.45 | 80 |
lg2461/NewSchoolSystem | 3,719,441,702,092 | 7209c616c836949fe261657ef82ce6ead9fc64a2 | 266fa73b88b37b00629325fff79a56cb35f09591 | /Main/migrations/0003_auto_20190515_1648.py | e07cf1e4c4d4352ab8812b5da88327e9b98062d1 | [] | no_license | https://github.com/lg2461/NewSchoolSystem | 6914204289fbe8ab21b5c696a9250ce2505265c4 | 47fe4137e5262c545a5def32e032d0e8f06b5ef3 | refs/heads/master | 2020-05-23T14:03:40.275289 | 2019-05-15T09:17:18 | 2019-05-15T09:17:18 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # Generated by Django 2.1.7 on 2019-05-15 08:48
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('Main', '0002_auto_20190515_1643'),
]
operations = [
migrations.RemoveField(
model_name='student',
name='acc_type',
),
migrations.RemoveField(
model_name='student',
name='department',
),
migrations.RemoveField(
model_name='student',
name='grade',
),
migrations.RemoveField(
model_name='student',
name='state',
),
migrations.RemoveField(
model_name='teacher',
name='acc_type',
),
migrations.DeleteModel(
name='Account_type',
),
migrations.DeleteModel(
name='Stu_ate',
),
migrations.DeleteModel(
name='Student',
),
]
| UTF-8 | Python | false | false | 966 | py | 26 | 0003_auto_20190515_1648.py | 16 | 0.495859 | 0.463768 | 0 | 42 | 22 | 47 |
ricardobtxr/experiment-scripts | 14,491,219,663,651 | 2f26e2a09105122d0a3a808a833837c42347b2dc | ac5e52a3fc52dde58d208746cddabef2e378119e | /exps-gsn-edf.0/gsn-edf_ut=3.5_rd=1_rw=0.06_rn=4_u=0.075-0.325_p=harmonic-2/sched=RUN_trial=89/sched.py | 00123b2ea4873d836bb6dc1848595a1daad95e86 | [] | no_license | https://github.com/ricardobtxr/experiment-scripts | 1e2abfcd94fb0ef5a56c5d7dffddfe814752eef1 | 7bcebff7ac2f2822423f211f1162cd017a18babb | refs/heads/master | 2023-04-09T02:37:41.466794 | 2021-04-25T03:27:16 | 2021-04-25T03:27:16 | 358,926,457 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | -X FMLP -Q 0 -L 2 96 300
-X FMLP -Q 0 -L 2 70 400
-X FMLP -Q 0 -L 2 63 300
-X FMLP -Q 0 -L 2 59 250
-X FMLP -Q 0 -L 2 47 150
-X FMLP -Q 1 -L 2 45 175
-X FMLP -Q 1 -L 2 44 150
-X FMLP -Q 1 -L 2 42 300
-X FMLP -Q 1 -L 2 41 250
-X FMLP -Q 2 -L 1 38 250
-X FMLP -Q 2 -L 1 37 125
-X FMLP -Q 2 -L 1 33 125
-X FMLP -Q 2 -L 1 24 125
-X FMLP -Q 3 -L 1 20 175
-X FMLP -Q 3 -L 1 20 150
-X FMLP -Q 3 -L 1 19 175
-X FMLP -Q 3 -L 1 13 100
| UTF-8 | Python | false | false | 476 | py | 6,754 | sched.py | 6,466 | 0.5 | 0.25 | 0 | 17 | 27 | 27 |
Feras-1998/graduation-project | 17,282,948,439,126 | 8fcaaf8fa04f6d4e460b1729b0171ee9746f1735 | 895f5581d12379c507018f36c58b63920190f287 | /ManageStore/migrations/0001_initial.py | ebeb93b0383979c7fd778b2e395b633f2db5b40e | [] | no_license | https://github.com/Feras-1998/graduation-project | c1a4d65449b573f5c10c4059d78b423f13ad9be8 | b93e736ecc710d7ec1f31e4db30c3c5288a7bcf5 | refs/heads/master | 2023-03-19T12:06:53.199292 | 2021-03-21T17:56:55 | 2021-03-21T17:56:55 | 350,102,451 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # Generated by Django 3.1.6 on 2021-02-06 22:02
import ManageStore.models
from django.db import migrations, models
import phonenumber_field.modelfields
class Migration(migrations.Migration):
initial = True
dependencies = [
('ManageReview', '0001_initial'),
('ManageOffer', '0001_initial'),
('ManageProduct', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='AD_imags',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50)),
('ad_imags', models.ImageField(default='AD default.png', upload_to=ManageStore.models.AD_imags.AD_imags_directory_path)),
],
),
migrations.CreateModel(
name='Section',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=20)),
],
),
migrations.CreateModel(
name='Store',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('slug', models.SlugField(blank=True, null=True)),
('name', models.CharField(max_length=50, unique=True)),
('image', models.ImageField(default='store default.jpg', upload_to=ManageStore.models.Store.store_directory_path)),
('phonesNumber', phonenumber_field.modelfields.PhoneNumberField(blank=True, help_text="Phone number must be entered in the format '+999999999'. Up to 15 digits allowed.", max_length=128, null=True, region=None, unique=True)),
('type', models.CharField(choices=[('HyperMarket', 'HyperMarket'), ('BigMarket', 'BigMarket')], default='HyperMarket', max_length=50)),
('url', models.URLField(blank=True, null=True, unique=True)),
('description', models.TextField(blank=True, max_length=500, null=True)),
('start_hours', models.TimeField(blank=True, max_length=50, null=True)),
('close_hours', models.TimeField(blank=True, max_length=50, null=True)),
('state', models.BooleanField(blank=True, default=True)),
('join_at', models.DateField(blank=True, null=True)),
('new', models.BooleanField(default=True)),
('ADs', models.ManyToManyField(to='ManageStore.AD_imags')),
('StoreOffers', models.ManyToManyField(blank=True, to='ManageOffer.Offer')),
('StoreReviews', models.ManyToManyField(blank=True, to='ManageReview.Review')),
('products', models.ManyToManyField(blank=True, to='ManageProduct.Product')),
('sections', models.ManyToManyField(blank=True, to='ManageStore.Section')),
],
),
]
| UTF-8 | Python | false | false | 2,993 | py | 116 | 0001_initial.py | 66 | 0.598396 | 0.579686 | 0 | 57 | 51.508772 | 241 |
stoic1979/django_autocomplete | 17,497,696,795,851 | 5194a7111719cbfbbf26d61c6d5bd3705815a36c | d34a02058e33fe8af56b8c24821fc7d7cdb1aba6 | /autocomp/views.py | 1fc0219882b60c54022a8258c8c5986e9965a8af | [] | no_license | https://github.com/stoic1979/django_autocomplete | eec2b6297a28f41c460e115a50a48586a93e3a11 | c32a3153b32a061855198a4043f0dce75ff4e720 | refs/heads/master | 2021-01-10T03:30:31.738947 | 2015-12-17T15:36:16 | 2015-12-17T15:36:16 | 47,451,546 | 3 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from django.shortcuts import render_to_response
from django.template import RequestContext
from django.shortcuts import render
from autocomp.forms import CustomerForm
from autocomp.models import Customer
from django.core import serializers
from django.http import HttpResponse
import json
def home(request):
c = {'frmCustomer': CustomerForm()}
return render_to_response('index.html', c, context_instance=RequestContext(request))
def address_lookup(request):
c = {}
return render_to_response('auto_address.html', c, context_instance=RequestContext(request))
def customer_lookup(request, phone):
data = []
try:
data = serializers.serialize("json", Customer.objects.filter(phone__startswith=phone))
print "data=", data
except Exception, exp:
print exp
print "Customer Not Found !"
return HttpResponse(data)
def get_phones(request):
phones = []
try:
for customer in Customer.objects.all():
phones.append(customer.phone)
except Exception, exp:
print exp
print "Phone Not Found !"
print "phones=", phones
return HttpResponse(json.dumps(phones))
| UTF-8 | Python | false | false | 1,172 | py | 11 | views.py | 7 | 0.698805 | 0.698805 | 0 | 39 | 28.923077 | 95 |
jose-ramirez/project_euler | 13,855,564,537,775 | 6d7374cb650be9c9c4e85ebb0f6b04ce9c6901d7 | 6238cb85fb09c9960fa3e467fc155c5cac42fbcb | /sandbox/p148_sandboxing_2.py | 20f31fe3da703fa9681133bcc1793ad365d33ca7 | [] | no_license | https://github.com/jose-ramirez/project_euler | 761afa2a28c623fd6ffcdc794cfb79e5b6b6a0d7 | 7ae482228191159b896ad87160c025aa22f6b3f9 | refs/heads/master | 2022-07-24T19:10:30.090282 | 2022-07-17T23:21:25 | 2022-07-17T23:21:25 | 15,190,382 | 0 | 0 | null | false | 2018-10-26T20:09:45 | 2013-12-14T18:20:56 | 2018-09-23T10:46:35 | 2018-10-26T20:09:45 | 247 | 0 | 0 | 0 | Python | false | null | from functools import reduce
def p_base(n, p):
l = []
if n == 0:
return [0]
while n:
l.append(n % p)
n = n // p
return l
def T(m, p):
b = p_base(m, p)
inc_values = map(lambda x: x + 1, b)
return reduce(lambda a, b: a * b, inc_values)
# verifying everything is ok (should print 2361):
# print(sum([T(m, 7) for m in range(100)]))
# exploratory test 1:
def block_level_1(i):
"""
total between 49i and 49(i + i) - 1, i >= 0
"""
return sum([T(m, 7) for m in range(i * 49, (i + 1) * 49)])
def block_level_2(i):
return sum([block_level_1(m) for m in range(i * 7, (i + 1) * 7)])
def block_level_3(i):
return sum([block_level_2(m) for m in range(i * 7, (i + 1) * 7)])
def block_level_4(i):
return sum([block_level_3(m) for m in range(i * 7, (i + 1) * 7)])
def block_level_5(i):
return sum([block_level_4(m) for m in range(i * 7, (i + 1) * 7)])
def block_level_6(i):
return sum([block_level_5(m) for m in range(i * 7, (i + 1) * 7)])
print(sum([block_level_3(m) for m in range(7)]) - 28 * block_level_3(0))
print(sum([block_level_4(m) for m in range(7)]) - 28 * block_level_4(0))
print(sum([block_level_5(m) for m in range(7)]) - 28 * block_level_5(0))
print(sum([block_level_6(m) for m in range(7)]) - 28 * block_level_6(0)) | UTF-8 | Python | false | false | 1,316 | py | 98 | p148_sandboxing_2.py | 95 | 0.556231 | 0.5 | 0 | 45 | 28.266667 | 72 |
RobertSteinbach/EmailRulz | 19,026,705,138,013 | 76567480c4a70dd6fa29c155145ff1ff16691f38 | 14ac07b191b6927817a4c5ed21c09d502182762f | /email_rulz.py | c6dd975449433596660afb11e89e321ecd439e21 | [] | no_license | https://github.com/RobertSteinbach/EmailRulz | d9f2279bdb1456865340d42bcc27b2d9b05b2302 | 01521cf9dbbbe25deb7ebda196af2cb6f9f6e665 | refs/heads/master | 2023-04-20T05:16:40.174009 | 2021-05-11T16:12:08 | 2021-05-11T16:12:08 | 366,184,178 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # https://pypi.org/project/imap-tools/ IMAP Tools webpage
from imap_tools import MailBox, AND, OR # Needs PIP INSTALL
import imap_tools
import imaplib
import email.message
import sqlite3
import time # for sleep
import datetime
import re # maybe
import os # to get environment variables
#some even moreto test commit
# Global scope
# Get secrets from environment variables
# In Pycharm, select the "Edit Configuration" menu item in the project drop-down box in top menu bar
imapserver = os.environ.get("IMAP_SERVER")
userid = os.environ.get("IMAP_LOGIN")
pd = os.environ.get("IMAP_PWD")
myemailaddress = os.environ.get("EMAIL_ADDRESS")
#print(imapserver, userid, pd, myemailaddress) #print out credentials to make sure I got them
# Everything else
autofilefolder = "INBOX.autofile" #Everything occurs under this subfolder
runlog = [] # Build a list of events
SubFolders = [] # List of subfolders below the autofile folder.
SubfolderPrefix = autofilefolder + "."
status = ""
ActionFields = ("from", "subject", "body")
rundt = datetime.datetime.now()
extractrulesflag = False
sleeptime = 300 # 300 seconds = 5 minutes between iterations
def cleanup():
#Clean out old log files. Criteria:
# 1. From myself
# 2. Incremental logs
# 3. Older than 2 days (today and yesterday)
# 4. Unflagged (no errors)
status = "Cleanup: delete old incremental log emails..."
runlog.append(status)
date_criteria = (datetime.datetime.now() - datetime.timedelta(days=1)).date()
#print("date_criteria-", date_criteria)
try:
mailbox.folder.set(autofilefolder) #point mailbox to autofile folder
numrecs = mailbox.delete(mailbox.fetch(AND(from_=[myemailaddress], subject=["Rulz Log - Incremental @"],
date_lt=date_criteria, flagged=False), mark_seen=False))
runlog.append(str(str(numrecs).count('UID')) + " log messages removed.")
except Exception as e:
runlog.append("!!!FAILED to cleanup old log files from autofile folder.")
runlog.append(str(e))
return
return #end of cleanup
def looper():
a = 1
while a > 0:
a += 1 # a will be the time period between iterations (e.g. 5 minutes)
rundt = datetime.datetime.now() # update the time for this iteration
# extract the rules on the first iteration and about every 3 days
if a == 2 or (a % 1000 == 0):
extract_rulz()
# clean up the autofile folder on the first iteration and about every 8 hours
if (a == 2) or (a % 100 == 0):
cleanup()
# refresh the list of subfolders; strip off the prefix
SubFolders.clear()
for folder_info in mailbox.folder.list(SubfolderPrefix):
SubFolders.append(folder_info['name'].replace(SubfolderPrefix, ''))
# check to see if any rules need to be changed
change_rulz()
# return
# Process rules on INBOX
process_rulz()
# Dump the Event log to an email
emailflag = "" # Assume no flags
if str(runlog).find("!!!") > -1:
runlog.append("Errors were found. Adding FLAG to the log.")
# emailflag = imap_tools.MailMessageFlags.FLAGGED # Flag it there was an error (marked by !!!)
emailflag = "\FLAGGED"
new_message = email.message.Message()
new_message["To"] = myemailaddress
new_message["From"] = myemailaddress
new_message["Subject"] = "Rulz Log - Incremental @ " + str(rundt)
new_message.set_payload('\n'.join(map(str, runlog)))
mailbox2.append(autofilefolder, emailflag, imaplib.Time2Internaldate(time.time()),
str(new_message).encode('utf-8'))
# Dump out the event log to the screen
print("************* EVENT LOG **************")
for event in runlog:
print(event)
# Clear the run log
runlog.clear()
# Sleep for a while
print("Sleeping for " + str(sleeptime) + " seconds...")
time.sleep(sleeptime) # 600 = 10 minutes
return # Bottom of endless loop
def extract_rulz():
# status
runlog.append("Extracting rules to email...")
# extract the rules and send back
sql = "select field, criteria, tofolder from rulz " \
"order by field, criteria, tofolder;"
cursor.execute(sql)
rulz = cursor.fetchall()
# for row in rulz:
# print(' '.join(row))
# print('\n'.join(' '.join(row) for row in rulz)) #Dump the recordset (SAVE THIS LINE!)
# Create a new mail with all the rulz
new_message = email.message.Message()
new_message["To"] = myemailaddress
new_message["From"] = myemailaddress
new_message["Subject"] = "rulz extract"
new_message.set_payload("rulz:\n" + '\n'.join(' '.join(row) for row in rulz))
mailbox2.append(autofilefolder, '', imaplib.Time2Internaldate(time.time()),
str(new_message).encode('utf-8'))
return # end of extract_rulz
def change_rulz():
rundt = datetime.datetime.now()
runlog.append(str(rundt) + " - checking for rule changes...")
extractrulesflag = False
msgs2move = [] # build a list of messages from myself to move. Can't move them while in the loop of
# messages b/c it will invalidate the recordset and next loop will fail
# Get any mail sent from myself
try:
mailbox.folder.set('INBOX')
mymsgs = mailbox.fetch(AND(from_=[myemailaddress]))
except Exception as e:
status = "!!! ERROR fetching messages from myself. Error = " + str(e)
runlog.append(status)
return
for msg in mymsgs:
# Get the unique id
uid = msg.uid
if msg.subject.lower() == "rulz": ### DUMP RULES TO EMAIL ####
extractrulesflag = True
# Move the processed msg to the autofile folder to mark as processed
msgs2move.append(uid)
# mailbox.move(msg.uid, autofilefolder)
continue # onto next message
# REPLACE all rules from email
if 'rulz extract' in msg.subject:
# replace all the rules being sent back
# status
runlog.append("Replacing rules...")
rulzpos = msg.text.find('rulz:')
if rulzpos < 0:
status = "!!! ERROR - Could not find the 'rulz:' keyword. Ignoring file."
runlog.append(status)
mailbox.flag(msg.uid, imap_tools.MailMessageFlags.FLAGGED, True)
msgs2move.append(uid)
# mailbox.move(msg.uid, autofilefolder) # Move message as processed
continue # onto the next msg
# The rulz_new table should NOT exist, but attempt to rename just in case
sql = "ALTER TABLE rulz_new RENAME TO rulz_new_" + datetime.datetime.now().strftime("%b%d%Y%H%M%S") + ";"
try:
cursor.execute(sql) # drop the temp table
dbcon.commit()
except:
# don't care if this fails
status = "Error archiving old rulz_new table. This is normal."
# runlog.append(status)
# Create a temp table named rulz_new
sql = "SELECT sql FROM sqlite_master WHERE name = 'rulz';"
try:
cursor.execute(sql) # Get the CREATE statement for the rulz table
sql = cursor.fetchall()[0][0].replace('rulz', 'rulz_new') # make a copy
cursor.execute(sql) # create new table
dbcon.commit() # do it
except Exception as e:
status = "!!! Error - could not find schema for 'rulz' table."
runlog.append(status)
mailbox.flag(msg.uid, imap_tools.MailMessageFlags.FLAGGED, True)
msgs2move.append(uid)
# mailbox.move(msg.uid, autofilefolder) # Move message as processed
continue # onto the next msg
# Build a list of tuples
temprulz = msg.text[rulzpos + 7:].strip() # Substring everything past the rulz: tag
temprulz = temprulz.split('\r\n') # Create a list from all the lines
newrulz = [] # start with empty list
for row in temprulz: # each line now needs to put into a tuple
# newrulz.append(tuple(str(row).strip().split(' '))) # works, version #1
# print(row)
# https://stackoverflow.com/questions/2785755/how-to-split-but-ignore-separators-in-quoted-strings-in-python
row_aslist = re.split(''' (?=(?:[^'"]|'[^']*'|"[^"]*")*$)''', row) # I don't get it, but it works
# parse it out into variables and evaluate them
actionfield= str(row_aslist[0]).lower()
if row_aslist[0] not in ActionFields:
status = "!!! ERROR parsing rule. First word not recognized - " + actionfield
runlog.append(status)
runlog.append(row)
continue
row_aslist[0] = actionfield # force it to lowercase
actioncriteria = str(row_aslist[1]) # add any validation rules here
tofolder = str(row_aslist[2]).lower()
if tofolder not in SubFolders:
status = "!!! ERROR parsing rule. Target folder not recognized - " + tofolder
runlog.append(status)
runlog.append(row)
continue
row_aslist[2] = tofolder # force it to lower case
# put the values in a tuple and then add it to the list
newrulz.append(tuple(row_aslist))
# newrulz=[('aaa','bbb','ccc'),('ddd','eee','fff')] # this is the expected format
sql = "INSERT INTO rulz_new (Field,Criteria,ToFolder) VALUES (?,?,?)"
try:
cursor.executemany(sql, newrulz)
dbcon.commit()
except Exception as e:
status = "!!! ERROR inserting new data to rulz_new. Error=" + str(e)
runlog.append(status)
runlog.append(sql)
# status = 'New rules=' + str(newrulz)
# runlog.append(status)
for row in newrulz:
runlog.append(row)
mailbox.flag(msg.uid, imap_tools.MailMessageFlags.FLAGGED, True)
msgs2move.append(uid)
# mailbox.move(msg.uid, autofilefolder) # Move message as processed
continue
# Make a copy of the current Rulz_new table
try:
sql = "ALTER TABLE rulz RENAME TO rulz" + datetime.datetime.now().strftime("%b%d%Y%H%M%S") + ";"
cursor.execute(sql) # drop the temp table
sql = "ALTER TABLE rulz_new RENAME TO rulz;"
cursor.execute(sql)
dbcon.commit()
except Exception as e:
status = "!!! ERROR attempting to archive/swap table 'rulz'. Error: " + str(e)
runlog.append(status)
mailbox.flag(msg.uid, imap_tools.MailMessageFlags.FLAGGED, True)
msgs2move.append(uid)
# mailbox.move(msg.uid, autofilefolder) # Move message as processed
continue
# Move the processed msg to the autofile folder to mark as processed - everything was good!
msgs2move.append(uid)
# mailbox.move(msg.uid, autofilefolder)
# Extract the rules once more
extractrulesflag = True
continue # onto next message
# End REPLACE rulz from email
#####################################################
# CREATE ONE RULE FROM FORWARDED EMAIL
#####################################################
if (msg.subject.find('FW:') > -1) or (msg.subject.find('Fwd:') > -1):
body = msg.text[:msg.text.find('\r')] # get the first line of the body
#print("Body=", body)
# https://stackoverflow.com/questions/2785755/how-to-split-but-ignore-separators-in-quoted-strings-in-python
body = re.split(''' (?=(?:[^'"]|'[^']*'|"[^"]*")*$)''', body) #I don't get it, but it works
# parse it out into variables
actionfield = str(body[0]).lower().strip()
actioncriteria = str(body[1])
tofolder = str(body[2]).lower().strip()
# print(actionfield)
# print(actioncriteria)
# print(tofolder)
# If the actioncriteria was a hyperlink, then fix that
if tofolder.find("mailto:") > -1:
runlog.append("Criteria found to be a hyperlink. Skipping over 'mailto:' tag.")
# tofolder = msg.text.split()[3].lower()
tofolder = tofolder.replace("mailto:", "") #remove the mailto: tag
status = "FW email found. ActionField='" + actionfield + "'. ActionCriteria='" + actioncriteria \
+ "'. ToFolder='" + tofolder + "'."
runlog.append(status)
# make sure the first word is a valid action field (from, subject,...)
if actionfield not in ActionFields:
status = "WARNING - Did not find the first word '" + actionfield + "' to be a valid action field. " \
"Email was ignored. List of possible action fields are: " + str(
ActionFields)
runlog.append(status)
# mailbox.flag(uid, imap_tools.MailMessageFlags.FLAGGED, True)
# mailbox.move(uid, autofilefolder) # Move message as processed
continue # onto next message
# make sure the tofolder is in the list of subfolders
if tofolder not in SubFolders:
# print(msg.text)
status = "!!! ERROR - Did not find autofile folder '" + tofolder + ". Email was ignored. " \
"List of possible folders are: " + str(
SubFolders)
runlog.append(status)
mailbox.flag(uid, imap_tools.MailMessageFlags.FLAGGED, True)
msgs2move.append(uid)
# mailbox.move(uid, autofilefolder) # Move message as processed
continue # onto next message
# Create the rule in the database
sql = "INSERT INTO Rulz (Field,Criteria,ToFolder) VALUES ('" + actionfield + "','" + actioncriteria + "'," \
"'" + tofolder + "');"
try:
cursor.execute(sql)
dbcon.commit()
except Exception as e:
status = "!!! ERROR - Could not insert new rule. SQL='" + sql + \
"Error: " + str(e)
runlog.append(status)
mailbox.flag(uid, imap_tools.MailMessageFlags.FLAGGED, True)
msgs2move.append(uid)
# mailbox.move(uid, autofilefolder) # Move message as processed
# Move the msg to the autofile folder to mark as processed
msgs2move.append(uid)
# mailbox.move(uid, autofilefolder)
# Give good status news
runlog.append("Rule added! ID=" + str(cursor.lastrowid) + ". Action Field ='" + actionfield \
+ "'. Criteria='" + actioncriteria + "'. ToFolder='" + tofolder + "'.")
# Extract the rules once more
extractrulesflag = True
continue # to the next message
# for each message sent from myself
# Move all the processed messages from myself
mailbox.move(msgs2move, autofilefolder)
# If something changed, extract the rules again
if extractrulesflag == True:
extract_rulz()
return # end of change_rulz()
def process_rulz():
# make a timestamp for the run
rundt = datetime.datetime.now()
runlog.append(str(rundt) + " - processing rules...")
# Get the list of "to folders" from database. Will move emails in bulk
sql = "Select distinct ToFolder, Field from Rulz;"
try:
cursor.execute(sql)
ToFolders = cursor.fetchall()
except Exception as e:
runlog.append("!!! ERROR - Could not get list of ToFolders. Error=" + str(e))
return
for row in ToFolders: # For every To Folder/Keyword....
ToFolder = row[0]
ToFolderVerbose = SubfolderPrefix + ToFolder
actionField = row[1]
sql = "select criteria from rulz where tofolder='" + ToFolder + "' AND field='" + actionField + "';"
cursor.execute(sql)
CriteriaSet = cursor.fetchall()
Criteria = []
for row2 in CriteriaSet:
Criteria.append(row2[0].replace('"', '')) # drop any double quotes in the criteria
# Pull the emails that have the criteria
# for msg in mailbox.fetch(OR(from_=['little', 'newegg', 'drafthouse.com']), mark_seen=False):
status = "Processing '" + actionField + "' rules for '" + ToFolder + "' folder..." # Criteria=" + str(Criteria)
runlog.append(status)
numrecs = 0
try:
mailbox.folder.set('INBOX')
if actionField.lower() == "from":
numrecs = mailbox.move(mailbox.fetch(OR(from_=Criteria), mark_seen=False), ToFolderVerbose)
if actionField.lower() == "subject":
numrecs = mailbox.move(mailbox.fetch(OR(subject=Criteria), mark_seen=False), ToFolderVerbose)
if actionField.lower() == "body":
numrecs = mailbox.move(mailbox.fetch(OR(body=Criteria), mark_seen=False), ToFolderVerbose)
runlog.append("..." + str(str(numrecs).count('UID')) + " messages moved to folder '" + ToFolder + "'.")
except Exception as e:
runlog.append("!!!FAILED rule for folder '" + ToFolder + "'. Criteria=" + str(Criteria))
runlog.append(str(e))
continue
# end for each keyword (from, subject, body...)
# end for each ToFolder
runlog.append("Rules processing completed.")
return # end of process_rulz()
def tester():
print("test completed")
quit()
return # end of tester()
# Press the green button in the gutter to run the script.
if __name__ == '__main__':
# tester()
'Give feedback on start'
status = 'Email_Rulz program started at: ' + datetime.datetime.now().strftime("%Y-%M-%d %H:%M:%S")
print(status)
runlog.append(status)
# Connect to mailbox
mailbox = MailBox(imapserver)
try:
mailbox.login(userid, pd)
runlog.append("Mailbox connected")
except Exception as e:
runlog.append("Mailbox NOT connected. Error=" + str(e))
# Open a 2nd connection using the imaplib library.
# This will allow me to create new email (imap-tools doesn't support)
# TODO: Not eloquent to login to the imap server twice
mailbox2 = imaplib.IMAP4_SSL(imapserver)
try:
mailbox2.login(userid, pd)
runlog.append("Mailbox2 connected")
except Exception as e:
runlog.append("!!! ERROR Mailbox2 NOT connected. Error=" + str(e))
# Connect to database
try:
dbcon = sqlite3.connect('./rulz/rulz.db')
cursor = dbcon.cursor()
runlog.append("Database connected")
except Exception as e:
runlog.append("!!! ERROR Database NOT connected. Error=" + str(e))
# Check for errors, abend if there were any errors to this point
if "!!!" in runlog:
print("Fatal error detected! Abend. ")
for event in runlog:
print(event)
quit()
# Call the endless loop
looper()
# Should never get here
print('!!!!END OF PROGRAM!!!!')
def ParkingLot():
"""
# print("****FOLDERS:")
# for folder_info in mailbox.folder.list():
# print("folder_info", folder_info)
for msg in mailbox.fetch(OR(from_=['little', 'newegg', 'drafthouse.com']), mark_seen=False):
print("***MESSAGE:")
print("UID-", msg.uid)
print("Subject-", msg.subject)
print("From-", msg.from_)
# print("To-", msg.to)
# print("CC-", msg.cc)
print("ReplyTo-", msg.reply_to)
# print("Flags-", msg.flags)
# print("Headers-", msg.headers)
# print("Text-", msg.text)
print("** PROCESS RULES:")
if msg.from_.find("littlecaesars") > -1:
print("Found little caesers in msg ", msg.uid)
try:
# mailbox.move(msg.uid, 'INBOX.Autofile.Junk')
print("message moved - ", msg.uid)
except Exception as e:
print('Error moving message id ', msg.uid, '. Error-', e)
#Create email via IMAP
https://stackoverflow.com/questions/3769701/how-to-create-an-email-and-send-it-to-specific-mailbox-with-imaplib
import imaplib
connection = imaplib.IMAP4_SSL(HOSTNAME)
connection.login(USERNAME, PASSWORD)
new_message = email.message.Message()
new_message["From"] = "hello@itsme.com"
new_message["Subject"] = "My new mail."
new_message.set_payload("This is my message.")
#Python tips - How to easily convert a list to a string for display
https://www.decalage.info/en/python/print_list
#For statement on one line (list comprehensions)
https://stackoverflow.com/questions/1545050/python-one-line-for-expression
"""
| UTF-8 | Python | false | false | 21,819 | py | 3 | email_rulz.py | 1 | 0.567762 | 0.562491 | 0 | 550 | 38.670909 | 124 |
locmai/FapPy | 19,198,503,835,022 | db4bf1fd50de0338d2a88ec1cf79cd3d714c4747 | d5af6206f54d7b75759cfd2cd3e60310b86a13eb | /accounts/migrations/0002_auto_20180218_1651.py | a4d580f8e857f11b7c6424864cb50df455cdeb4b | [] | no_license | https://github.com/locmai/FapPy | 6ab8e5019377abb5267be628c6bb774a41f21130 | b93d54a52fe5f3093ec2494e94a1953e9c6c7104 | refs/heads/master | 2021-04-28T17:49:34.168069 | 2018-02-19T12:22:50 | 2018-02-19T12:22:50 | 121,861,295 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # Generated by Django 2.0.2 on 2018-02-18 09:51
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('accounts', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='usermanagement',
name='user_in_charge',
),
migrations.RemoveField(
model_name='usermanagement',
name='user_under_control',
),
migrations.AddField(
model_name='usermanagement',
name='supervisor',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='supervisor', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='usermanagement',
name='user',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='user', to=settings.AUTH_USER_MODEL),
),
]
| UTF-8 | Python | false | false | 1,114 | py | 34 | 0002_auto_20180218_1651.py | 21 | 0.62298 | 0.605925 | 0 | 34 | 31.764706 | 151 |
couchemar/piter-united-itgm5-slides | 10,746,008,212,024 | 4eb0b874cbc54b30bfc6f0b35df7275ca8198234 | e3e72a6ffbf662b8c775b9dc0efd4d718694c95b | /my_math/my_math/__init__.py | fa2f805f1f79710b46bfe0d33756d852bfd56d85 | [] | no_license | https://github.com/couchemar/piter-united-itgm5-slides | 3c19c463ff7d28f02be2f9b8fdb4e49a2526b61a | 93ed8570ad505c77edd26f66496156e591d85b2b | refs/heads/master | 2021-01-10T13:14:26.898236 | 2015-06-05T21:46:35 | 2015-06-05T21:46:35 | 36,254,143 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from operator import add
def plus(*args):
return reduce(add, args)
| UTF-8 | Python | false | false | 71 | py | 10 | __init__.py | 4 | 0.71831 | 0.71831 | 0 | 3 | 22.666667 | 28 |
mohamed-osama-aboelkheir/Toggl_to_Gcal | 11,768,210,416,078 | ca1509d3390970856864f5c2ec90eb2cc32a018a | 78817a93e20218fb626da60c7f0b69e0be7443a2 | /Toggl_to_Gcal.py | 05ba05d55784e671089b1308ffb7d5dc914e3100 | [] | no_license | https://github.com/mohamed-osama-aboelkheir/Toggl_to_Gcal | fc969b15739c9f5415cf79ce07390db4e9b6d53c | 4ba2fc2d6e5e619c19a11292f6a677d6d83ff85c | refs/heads/master | 2016-09-06T19:26:16.239860 | 2014-12-06T21:18:42 | 2014-12-06T21:18:42 | 27,649,341 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #! /usr/bin/python
#
# DESCRIPTION:
# AUTHOR: Mohamed Osama (mohamed.osama.aboelkheir@gmail.com)
# CREATED: Wed 26-Nov-2014
# LAST REVISED: Wed 26-Nov-2014
#
##############
# DISCLAIMER #
##############
# Anyone is free to copy, modify, use, or distribute this script for any purpose, and by any means. However, Please take care THIS IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND AND YOU SHOULD USE IT AT YOUR OWN RISK.
import ConfigParser
import sys,os.path
from Get_toggl_events import Get_toggl_events
from Gcal_create_event import Gcal_create_event
class Toggl_to_Gcal:
def __init__(self):
# Get day
self.day=sys.argv[1]
# Import configuration
dir_name=os.path.dirname(os.path.realpath(__file__))
conf_file=os.path.join(dir_name,"settings.conf")
config = ConfigParser.ConfigParser()
file=config.read(conf_file)
if not file:
print "ERROR: unable to read settings.conf file, Please run: \"python setup.py\" to fix issue"
sys.exit(1)
if not config.has_section("Gcal"):
print "ERROR: Gcal settings section misssing from settings.conf file, Please run: \"python setup.py\" to fix issue"
sys.exit(1)
if not config.has_section("Toggl"):
print "ERROR: Toggl settings section misssing from settings.conf file, Please run: \"python setup.py\" to fix issue"
sys.exit(1)
calendar_address=self.get_option(config,'Gcal','calendar_address')
#client_id=self.get_option(config,'Gcal','client_id')
#client_secret=self.get_option(config,'Gcal','client_secret')
client_id='449544381405-cvobene3kc8qbtt4t1e7vba1hqe13pd7.apps.googleusercontent.com'
client_secret='f7OgS57kU6AfaxzUiK22sQ14'
toggl_api_token=self.get_option(config,'Toggl','toggl_api_token')
workspace_id=self.get_option(config,'Toggl','workspace_id')
# Get the list of events from Toggl
events=Get_toggl_events(toggl_api_token,workspace_id,self.day).events
#print events[1]['start']
#print events[1]['end']
#print events[1]['project']
#print events[1]['description']
# Create the events on Gcal
cal=Gcal_create_event(calendar_address,client_id,client_secret)
for event in events:
color=""
if config.has_section("Color_mapping"):
if config.has_option("Color_mapping",event['project']):
color=config.get("Color_mapping",event['project']).lstrip('"').rstrip('"').lstrip('\'').rstrip('\'')
print "Creating event \""+event['description']+" | "+event['project']+"\""+" from "+event['start']+" until "+event['end']
cal.create_event(summary=event['description']+" | "+event['project'],start_time=event['start'],end_time=event['end'],colorId=color)
def get_option(self,config,section,option):
if config.has_option(section,option):
value=config.get(section,option).lstrip('"').rstrip('"').lstrip('\'').rstrip('\'')
if value == "" or value is None:
print "ERROR: \""+option+"\" value is missing from the settings.conf file, Please run: \"python setup.py\" to fix issue"
sys.exit(1)
else:
return value
else:
print "ERROR: \""+option+"\" value is missing from the settings.conf file, Please run: \"python setup.py\" to fix issue"
sys.exit(1)
if __name__ == '__main__':
obj=Toggl_to_Gcal()
| UTF-8 | Python | false | false | 3,194 | py | 6 | Toggl_to_Gcal.py | 4 | 0.687226 | 0.671259 | 0 | 90 | 34.477778 | 220 |
krummja/Anathema_v1 | 6,485,400,634,725 | c5dfd57abba88c2164c5b3edc737aaed1ff71824 | 027fdb52e42dbf43c8a00eee977d3805edeb0aa0 | /anathema/engine/core/options.py | 853c7325bb8ececd5b404c59e66446a62713f6d0 | [] | no_license | https://github.com/krummja/Anathema_v1 | 839b549bd8b9d07e2539d9c8fa26e602fe2d68fb | 18b4ba93f4560bcd4adb571a5851d91106508c63 | refs/heads/master | 2023-05-09T14:22:48.484669 | 2021-05-17T01:36:20 | 2021-05-17T01:36:20 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import tcod
from tcod.tileset import Tileset
from anathema.data import get_data
class Options:
CONSOLE_WIDTH: int = 110
CONSOLE_HEIGHT: int = 65
TILESET: Tileset = tcod.tileset.load_tilesheet(get_data("font_16.png"), 32, 8, tcod.tileset.CHARMAP_CP437)
TITLE: str = "Anathema"
VSYNC: bool = True
STAGE_PANEL_WIDTH: int = 72
STAGE_PANEL_HEIGHT: int = 50
AREA_SIZE: int = 512
WORLD_WIDTH: int = 240
WORLD_HEIGHT: int = 160
| UTF-8 | Python | false | false | 463 | py | 95 | options.py | 88 | 0.673866 | 0.617711 | 0 | 17 | 26.235294 | 110 |
rwheeler-7864/simplenight-api | 4,535,485,504,502 | 80856d76af0e051db96d11daef20d0da945319dc | e06c7fd594c52425ab7fc5498c07ae14daf9578b | /api/common/common_models.py | 2910a4fa5443fc9b3d3c606f577ae8e2de9ebe08 | [] | no_license | https://github.com/rwheeler-7864/simplenight-api | bc35560eca1e1c25092a1bcdc4af1633367413b8 | 602646911a0155df5b70991d1445c10cee18cd33 | refs/heads/master | 2023-03-12T03:10:51.516499 | 2021-02-25T20:40:44 | 2021-02-25T20:40:44 | 342,370,358 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import json
from typing import List, Union, TypeVar, Type
from pydantic.main import BaseModel
class SimplenightModel(BaseModel):
class Config:
arbitrary_types_allowed = True
class BusinessContact(SimplenightModel):
name: str
email: str
website: str
address: str
phones: List[str]
class BusinessLocation(SimplenightModel):
latitude: float
longitude: float
address: str
T = TypeVar("T")
def from_json(obj, cls: Type[T], many=False) -> Union[List[T], T]:
if many:
return list(map(cls.parse_obj, json.loads(obj)))
if isinstance(obj, (str, bytes)):
return cls.parse_raw(obj)
return cls.parse_obj(obj)
def to_json(obj: Union[SimplenightModel, List[SimplenightModel]], many=False, indent=2):
class ItemList(SimplenightModel):
__root__: List[SimplenightModel]
if many:
return ItemList.parse_obj(obj).json(indent=indent)
return obj.json(indent=indent)
| UTF-8 | Python | false | false | 960 | py | 364 | common_models.py | 317 | 0.678125 | 0.677083 | 0 | 46 | 19.869565 | 88 |
dyseo/PublicLineService | 19,447,611,918,458 | c1997c772da5f71a2c9baeb0a646af778fc3a9e5 | 83884b21747856833fa9cafd267a81b59b2e4333 | /LineService/f_LiffService.py | 4998a657d1b04fee0da9aeef518d3183b923b206 | [
"Apache-2.0"
] | permissive | https://github.com/dyseo/PublicLineService | 7f05f4904f53a122ef03629d0063f7612021fe1f | fc305bcf3c2d2427f2cc5d424d2650fd947f042c | refs/heads/master | 2021-01-16T01:34:30.104417 | 2020-03-17T06:14:15 | 2020-03-17T06:14:15 | 242,927,635 | 3 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null | #
# Autogenerated by Frugal Compiler (3.7.1)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
from threading import Lock
from frugal.middleware import Method
from frugal.exceptions import TApplicationExceptionType
from frugal.exceptions import TTransportExceptionType
from frugal.processor import FBaseProcessor
from frugal.processor import FProcessorFunction
from frugal.util.deprecate import deprecated
from frugal.util import make_hashable
from thrift.Thrift import TApplicationException
from thrift.Thrift import TMessageType
from thrift.transport.TTransport import TTransportException
from .ttypes import *
class Iface(object):
def issueLiffView(self, ctx, request):
"""
Args:
ctx: FContext
request: LiffViewRequest
"""
pass
def revokeToken(self, ctx, request):
"""
Args:
ctx: FContext
request: RevokeTokenRequest
"""
pass
class Client(Iface):
def __init__(self, provider, middleware=None):
"""
Create a new Client with an FServiceProvider containing a transport
and protocol factory.
Args:
provider: FServiceProvider with TSynchronousTransport
middleware: ServiceMiddleware or list of ServiceMiddleware
"""
middleware = middleware or []
if middleware and not isinstance(middleware, list):
middleware = [middleware]
self._transport = provider.get_transport()
self._protocol_factory = provider.get_protocol_factory()
self._oprot = self._protocol_factory.get_protocol(self._transport)
self._iprot = self._protocol_factory.get_protocol(self._transport)
self._write_lock = Lock()
middleware += provider.get_middleware()
self._methods = {
'issueLiffView': Method(self._issueLiffView, middleware),
'revokeToken': Method(self._revokeToken, middleware),
}
def issueLiffView(self, ctx, request):
"""
Args:
ctx: FContext
request: LiffViewRequest
"""
return self._methods['issueLiffView']([ctx, request])
def _issueLiffView(self, ctx, request):
self._send_issueLiffView(ctx, request)
return self._recv_issueLiffView(ctx)
def _send_issueLiffView(self, ctx, request):
oprot = self._oprot
with self._write_lock:
oprot.get_transport().set_timeout(ctx.timeout)
oprot.write_request_headers(ctx)
oprot.writeMessageBegin('issueLiffView', TMessageType.CALL, 0)
args = issueLiffView_args()
args.request = request
args.write(oprot)
oprot.writeMessageEnd()
oprot.get_transport().flush()
def _recv_issueLiffView(self, ctx):
self._iprot.read_response_headers(ctx)
_, mtype, _ = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
if x.type == TApplicationExceptionType.RESPONSE_TOO_LARGE:
raise TTransportException(type=TTransportExceptionType.RESPONSE_TOO_LARGE, message=x.message)
raise x
result = issueLiffView_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.e is not None:
raise result.e
if result.success is not None:
return result.success
x = TApplicationException(TApplicationExceptionType.MISSING_RESULT, "issueLiffView failed: unknown result")
raise x
def revokeToken(self, ctx, request):
"""
Args:
ctx: FContext
request: RevokeTokenRequest
"""
return self._methods['revokeToken']([ctx, request])
def _revokeToken(self, ctx, request):
self._send_revokeToken(ctx, request)
self._recv_revokeToken(ctx)
def _send_revokeToken(self, ctx, request):
oprot = self._oprot
with self._write_lock:
oprot.get_transport().set_timeout(ctx.timeout)
oprot.write_request_headers(ctx)
oprot.writeMessageBegin('revokeToken', TMessageType.CALL, 0)
args = revokeToken_args()
args.request = request
args.write(oprot)
oprot.writeMessageEnd()
oprot.get_transport().flush()
def _recv_revokeToken(self, ctx):
self._iprot.read_response_headers(ctx)
_, mtype, _ = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
if x.type == TApplicationExceptionType.RESPONSE_TOO_LARGE:
raise TTransportException(type=TTransportExceptionType.RESPONSE_TOO_LARGE, message=x.message)
raise x
result = revokeToken_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.e is not None:
raise result.e
return
class Processor(FBaseProcessor):
def __init__(self, handler, middleware=None):
"""
Create a new Processor.
Args:
handler: Iface
"""
if middleware and not isinstance(middleware, list):
middleware = [middleware]
super(Processor, self).__init__()
self.add_to_processor_map('issueLiffView', _issueLiffView(Method(handler.issueLiffView, middleware), self.get_write_lock()))
self.add_to_processor_map('revokeToken', _revokeToken(Method(handler.revokeToken, middleware), self.get_write_lock()))
class _issueLiffView(FProcessorFunction):
def __init__(self, handler, lock):
super(_issueLiffView, self).__init__(handler, lock)
def process(self, ctx, iprot, oprot):
args = issueLiffView_args()
args.read(iprot)
iprot.readMessageEnd()
result = issueLiffView_result()
try:
result.success = self._handler([ctx, args.request])
except LiffException as e:
result.e = e
except TApplicationException as ex:
with self._lock:
_write_application_exception(ctx, oprot, "issueLiffView", exception=ex)
return
except Exception as e:
with self._lock:
_write_application_exception(ctx, oprot, "issueLiffView", ex_code=TApplicationExceptionType.INTERNAL_ERROR, message=e.message)
raise
with self._lock:
try:
oprot.write_response_headers(ctx)
oprot.writeMessageBegin('issueLiffView', TMessageType.REPLY, 0)
result.write(oprot)
oprot.writeMessageEnd()
oprot.get_transport().flush()
except TTransportException as e:
# catch a request too large error because the TMemoryOutputBuffer always throws that if too much data is written
if e.type == TTransportExceptionType.REQUEST_TOO_LARGE:
raise _write_application_exception(ctx, oprot, "issueLiffView", ex_code=TApplicationExceptionType.RESPONSE_TOO_LARGE, message=e.args[0])
else:
raise e
class _revokeToken(FProcessorFunction):
def __init__(self, handler, lock):
super(_revokeToken, self).__init__(handler, lock)
def process(self, ctx, iprot, oprot):
args = revokeToken_args()
args.read(iprot)
iprot.readMessageEnd()
result = revokeToken_result()
try:
self._handler([ctx, args.request])
except LiffException as e:
result.e = e
except TApplicationException as ex:
with self._lock:
_write_application_exception(ctx, oprot, "revokeToken", exception=ex)
return
except Exception as e:
with self._lock:
_write_application_exception(ctx, oprot, "revokeToken", ex_code=TApplicationExceptionType.INTERNAL_ERROR, message=e.message)
raise
with self._lock:
try:
oprot.write_response_headers(ctx)
oprot.writeMessageBegin('revokeToken', TMessageType.REPLY, 0)
result.write(oprot)
oprot.writeMessageEnd()
oprot.get_transport().flush()
except TTransportException as e:
# catch a request too large error because the TMemoryOutputBuffer always throws that if too much data is written
if e.type == TTransportExceptionType.REQUEST_TOO_LARGE:
raise _write_application_exception(ctx, oprot, "revokeToken", ex_code=TApplicationExceptionType.RESPONSE_TOO_LARGE, message=e.args[0])
else:
raise e
def _write_application_exception(ctx, oprot, method, ex_code=None, message=None, exception=None):
if exception is not None:
x = exception
else:
x = TApplicationException(type=ex_code, message=message)
oprot.write_response_headers(ctx)
oprot.writeMessageBegin(method, TMessageType.EXCEPTION, 0)
x.write(oprot)
oprot.writeMessageEnd()
oprot.get_transport().flush()
return x
class issueLiffView_args(object):
"""
Attributes:
- request
"""
def __init__(self, request=None):
self.request = request
def read(self, iprot):
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = LiffViewRequest()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
self.validate()
def write(self, oprot):
self.validate()
oprot.writeStructBegin('issueLiffView_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(make_hashable(self.request))
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class issueLiffView_result(object):
"""
Attributes:
- success
- e
"""
def __init__(self, success=None, e=None):
self.success = success
self.e = e
def read(self, iprot):
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = LiffViewResponse()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = LiffException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
self.validate()
def write(self, oprot):
self.validate()
oprot.writeStructBegin('issueLiffView_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(make_hashable(self.success))
value = (value * 31) ^ hash(make_hashable(self.e))
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class revokeToken_args(object):
"""
Attributes:
- request
"""
def __init__(self, request=None):
self.request = request
def read(self, iprot):
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = RevokeTokenRequest()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
self.validate()
def write(self, oprot):
self.validate()
oprot.writeStructBegin('revokeToken_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(make_hashable(self.request))
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class revokeToken_result(object):
"""
Attributes:
- e
"""
def __init__(self, e=None):
self.e = e
def read(self, iprot):
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.e = LiffException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
self.validate()
def write(self, oprot):
self.validate()
oprot.writeStructBegin('revokeToken_result')
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(make_hashable(self.e))
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
| UTF-8 | Python | false | false | 16,065 | py | 22 | f_LiffService.py | 20 | 0.568005 | 0.56564 | 0 | 488 | 31.918033 | 156 |
arcchang1236/Umbo_human_pose | 16,063,177,712,369 | 43891c3ab2673bd6b9690d40aea42928e9a48c46 | ac36a799cb32c382bd6ca55d17f861379c17e6cf | /3Dmodel/net/loss.py | 4d6c17249b917886dd43d0792bd701f6bbbd5043 | [] | no_license | https://github.com/arcchang1236/Umbo_human_pose | 8d3b0d0b5be44fb49f3e13fc6ee118ae086e1f7e | 630547653961365f733906e91a0dd787cb38161e | refs/heads/master | 2020-04-23T06:09:32.943696 | 2019-04-04T12:21:10 | 2019-04-04T12:21:10 | 170,964,198 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import torch
import numpy as np
# class JointsMSELoss(torch.nn.Module):
# def __init__(self):
# super(JointsMSELoss, self).__init__()
# self.criterion = torch.nn.MSELoss().cuda()
# def forward(self, output, target):
# num_joints = output.size(0)
# loss = torch.Tensor([0.])
# for idx in range(num_joints):
# if output[idx] == 0:
# continue
# loss += self.criterion(output[idx], target[idx]/100.0)
# return loss / num_joints
def MPJPE(predicted, target):
"""
Mean per-joint position error (i.e. mean Euclidean distance),
often referred to as "Protocol #1" in many papers.
"""
assert predicted.shape == target.shape
return torch.mean(torch.norm(predicted - target, dim=len(target.shape)-1)) | UTF-8 | Python | false | false | 814 | py | 13 | loss.py | 13 | 0.594595 | 0.583538 | 0 | 27 | 29.185185 | 78 |
baagaard-usgs/geomodelgrids | 1,374,389,575,031 | cffab1bedea32addd6c64dd2e316f396200b5839 | fb5b204943101746daf897f6ff6e0a12985543c3 | /tests/experiment/test_fifo.py | f90a8f26c50571bade2b0866c62a849734c65b37 | [
"LicenseRef-scancode-warranty-disclaimer",
"CC0-1.0",
"LicenseRef-scancode-public-domain"
] | permissive | https://github.com/baagaard-usgs/geomodelgrids | 911a31ba23ca374be44873fdeb1e36a70ff25256 | 7d0db3c4ca1a83fea69ceb88f6ceec258928251a | refs/heads/main | 2023-08-03T07:52:25.727039 | 2023-07-27T21:56:19 | 2023-07-27T21:56:19 | 97,262,677 | 5 | 3 | NOASSERTION | false | 2023-03-23T03:34:45 | 2017-07-14T18:34:38 | 2023-03-15T02:21:09 | 2023-03-23T03:34:43 | 3,525 | 5 | 3 | 6 | C++ | false | false | #!/usr/bin/env python3
import os
import numpy
import threading
FILENAME_IN = "test_in.txt"
FILENAME_OUT = "test_out.txt"
def writer(data):
print("Starting writing")
with open(FILENAME_IN, "w") as fout:
numpy.savetxt(fout, data)
print("Finished writing")
return
def processor():
print("Starting processing")
with open(FILENAME_IN, "r") as fin:
data = numpy.loadtxt(fin)
with open(FILENAME_OUT, "w") as fout:
numpy.savetxt(fout, data)
print("Finished processing")
return
def reader(result):
print("Starting reading")
with open(FILENAME_OUT, "r") as fin:
data = numpy.loadtxt(fin)
result["data"] = data
print("Finished reading")
return
def mkfifo(filename):
if os.path.exists(filename):
os.unlink(filename)
os.mkfifo(filename)
return
mkfifo(FILENAME_IN)
mkfifo(FILENAME_OUT)
twrite = threading.Thread(target=writer, args=[numpy.array([1,2,3], dtype=numpy.float)])
twrite.start()
tprocess = threading.Thread(target=processor)
tprocess.start()
result = {}
tread = threading.Thread(target=reader, args=[result])
tread.start()
twrite.join()
tprocess.join()
tread.join()
print(" Result {}".format(result["data"]))
os.unlink(FILENAME_IN)
os.unlink(FILENAME_OUT)
| UTF-8 | Python | false | false | 1,289 | py | 246 | test_fifo.py | 120 | 0.664081 | 0.660978 | 0 | 60 | 20.483333 | 88 |
AnkitAutwn/Helo | 19,378,892,462,959 | a53492264f73cacc37d155272123f8fac77ed5db | fe617fd2067e7e9b228b9d4d34c9ebd1e6ef7483 | /recognise/models.py | f9db2152867ef2c95860fe0248bfde278de0510c | [] | no_license | https://github.com/AnkitAutwn/Helo | a45132c5443b0dfa0d9f5b40d7712d29f22ff17d | 46b3a50869d119e0911a278201e7633d279f0363 | refs/heads/main | 2023-02-04T05:55:33.343757 | 2020-12-29T10:58:50 | 2020-12-29T10:58:50 | 323,868,647 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from django.db import models
from PIL import Image
class Images(models.Model):
name = models.CharField(max_length=50,default='')
file = models.ImageField()
def __str__(self):
return self.name
def save(self,*args,**kwargs):
super().save(*args, **kwargs)
img = Image.open(self.file.path)
if img.height > 480 or img.width > 480:
output_size = (480,480)
img.thumbnail(output_size)
img.save(self.file.path) | UTF-8 | Python | false | false | 489 | py | 22 | models.py | 18 | 0.599182 | 0.570552 | 0 | 18 | 26.222222 | 53 |
Msksgm/atcoder_msksgm_practice | 15,109,694,949,473 | e610635c046d2abf1d97e34eff67904bf470b2f7 | a35b24c8c3c5bdf861f3cda9396f2fa6795ec929 | /arc/037/B.py | d2f29c02dedf822408289a080d8c88f0829860d2 | [] | no_license | https://github.com/Msksgm/atcoder_msksgm_practice | 92a19e2d6c034d95e1cfaf963aff5739edb4ab6e | 3ae2dcb7d235a480cdfdfcd6a079e183936979b4 | refs/heads/master | 2021-08-18T16:08:08.551718 | 2020-09-24T07:01:11 | 2020-09-24T07:01:11 | 224,743,360 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
def dfs(vs):
check = []
edges = []
for i in vs:
print(i)
while vs:
h, w = vs.pop(0)
edges.append(h)
if w in check:
continue
else
for v in vs:
if v[0] == h:
def main():
n, m = map(int, input().split())
vs = [list(map(int, input().split())) for _ in range(m)]
print(n, m)
print(vs)
dfs(vs)
if __name__ == "__main__":
main()
| UTF-8 | Python | false | false | 470 | py | 234 | B.py | 231 | 0.393617 | 0.389362 | 0 | 28 | 15.75 | 60 |
liousAlready/NewDream_learning | 6,004,364,281,590 | 03d87b33e5ea933c84086608ae8c07689623498c | b5479a025524c1387d3ba14f4fba7173f1d2df31 | /老师の课堂代码/day10/day10_selenium3.py | 0c844650fe18b590f0cb27843b107218f1632fcb | [] | no_license | https://github.com/liousAlready/NewDream_learning | 063f5e225d62eec8285e69bb5ba3bff850096ca3 | 7b790f675419224bfdbe1542eddc5a638982e68a | refs/heads/master | 2023-06-30T08:28:34.949464 | 2021-08-01T00:42:17 | 2021-08-01T00:42:17 | 391,489,851 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | '''
@File : day10_selenium3.py
@Time : 2021/5/16 10:57
@Author: luoman
'''
import time
# import lib
from selenium import webdriver
#selenium进行元素识别的方法 --- 7种方法
def demo1():
driver = webdriver.Chrome()
#driver.get('http://www.xmxbbs.com')
driver.get('http://www.baidu.com')
# 1、通过id方法
#driver.find_element_by_id('scbar_txt').send_keys('mysql') # send_keys('mysql') 表示要输入的内容
# 2、通过name方法
#driver.find_element_by_name('srchtxt').send_keys('python')
# 3、通过class属性方法:class_name
#driver.find_element_by_class_name("pn").click() # click() 表示 点击操作
# 4、通过标签名 tag_name 用得比较少,标签名的重复度比较高
time.sleep(1)
#driver.find_element_by_tag_name('map').click()
# 5、通过链接文本来进行识本 一定要是a标签上的文本
driver.find_element_by_link_text('新闻').click()
driver.find_element_by_partial_link_text('新').click() # 通过文本的一部分内容进行识别
time.sleep(3)
# demo1()
# 第6种方法 xpath
# 有6种小方法来找元素
def demo2():
driver = webdriver.Chrome()
driver.get('http://www.baidu.com')
# 1、绝对路径的方法 必须从html标签开始,以/开头
#driver.find_element_by_xpath('/html/body/div/div/div[3]/a').click() # 如果同一级别标签名相同,则需要指名下标,下标从1开始
# 2、相对路径 以//开头
# driver.find_element_by_xpath('//div/div/div[3]/a').click()
# 3、元素索引定位 结合相对路径或绝对路径
# 4、通过元素的属性值进行定位 '//标签名[@属性="属性值"]'
# 支持使用and 和 or 关键字,多个属性一起定位元素。
# driver.find_element_by_xpath('//input[@maxlength="255"]').send_keys('selenium')
# driver.find_element_by_xpath('//input[@maxlength="255" and @id="kw"]').send_keys('selenium')
# 5、进行模糊匹配--元素的属性值可能是动态变化的 或是太长
# 元素属性值开头包含内容:starts-with() 元素属性值结尾包含内容:substring() 元素属性值包含内容:contains()
# driver.find_element_by_xpath('//a[starts-with(@name,"tj_bri")]').click()
# driver.find_element_by_xpath('//a[substring(@name,7)="icon"]').click() #substring(@name,7) 7表示name属性值中的从第7个字符开始到字符串的末尾
# driver.find_element_by_xpath('//a[contains(@name,"j_bri")]').click()
# 6、元素文本定位 通过text()进行获取 不一定只有a标签才有文本
driver.find_element_by_xpath('//span[text()="国家卫健委派出工作组赴辽宁"]').click()
driver.find_element_by_xpath('//span[contains(text(),"国家卫健委")]').click()
time.sleep(2)
# demo2()
# 第7种元素识别的方法:Css_selector
def demo3():
driver = webdriver.Chrome()
driver.get('http://www.baidu.com')
# 第1种方法:使用绝对路径 html开始,使用id或者class属性进行标记
# driver.find_element_by_css_selector('html body div#wrapper div#head div#s-top-left a.c-font-normal').click()
# 第2种方法:使用相对路径
# driver.find_element_by_css_selector('div#head div#s-top-left a.c-font-normal').click()
# 第3种方法:元素属性 标签名[属性="属性值"]
#driver.find_element_by_css_selector('a[name="tj_briicon"]').click()
# driver.find_element_by_css_selector('a[name="tj_briicon"][class="s-bri"]').click()
# 第4种方法:模糊定义:属性值如果太长或网页中的元素属性动态变化
# $=:以..结束 ^=:以..开头 *=:包含
# driver.find_element_by_css_selector('a[name^="tj_bri"]').click()
# driver.find_element_by_css_selector('a[name$="iicon"]').click()
# driver.find_element_by_css_selector('a[name*="brii"]').click()
# 第5种方法:查询子元素
# driver.find_element_by_css_selector('form span input').send_keys('selenium')
# driver.find_element_by_css_selector('form>span>input').send_keys('selenium')
# driver.find_element_by_css_selector('div#s-top-left a:first-child').click()
# driver.find_element_by_css_selector('div#s-top-left a:last-child').click()
# driver.find_element_by_css_selector('div#s-top-left a:nth-child(3)').click()
# 第6种方法:查询兄弟元素
# driver.find_element_by_css_selector('div#s-top-left a+a+a+a').click()
from selenium.webdriver.common.by import By
driver.find_element(By.CSS_SELECTOR, 'div#s-top-left a+a+a+a').click()
time.sleep(2)
demo3() | UTF-8 | Python | false | false | 4,592 | py | 189 | day10_selenium3.py | 179 | 0.66091 | 0.644737 | 0 | 88 | 40.465909 | 124 |
drachens/Tarea1-Cripto | 7,696,581,406,602 | 47ff8d20eb8a630731d6e382142e037992dba8d0 | d98bc8f1be790e61a3a6c2d007385cb3cbe7cc41 | /Growbarato/singup_3.py | f4e1e1c68234eaac794aa4ef18d5f1aa5b190d3f | [] | no_license | https://github.com/drachens/Tarea1-Cripto | d62d26ea1c95f3baaa10494b647f4d4a539435c6 | f859e53ba9236fa90df10aa78bf4c289ec863b89 | refs/heads/main | 2023-08-26T09:06:28.957529 | 2021-10-11T02:22:33 | 2021-10-11T02:22:33 | 415,752,738 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from selenium import webdriver
from selenium.webdriver.common.keys import Keys
import time
driver = webdriver.Firefox()
def pass2048(repeticiones):
r = open("../generate.txt","r")
for linea in r:
passw = linea.strip("\n")
passw = passw*repeticiones
return passw
passw = pass2048(15)
emailw="fabefefroko-9434@yopmail.com"
driver.get("https://www.growbarato.net/autenticacion?back=my-account")
time.sleep(10)
a = driver.find_element_by_xpath("//span[@class='cf1y60']")
a.click()
time.sleep(10)
a = driver.find_element_by_xpath("//div[@class='form-group']/div/input[@name='email']")
a.send_keys(emailw)
a = driver.find_element_by_xpath("//footer[@class='form-footer clearfix']/input[@class='btn btn-primary']")
a.click()
a = driver.find_element_by_xpath("//input[@name='firstname']")
a.send_keys("TestName")
a = driver.find_element_by_xpath("//input[@name='lastname']")
a.send_keys("TestLastName")
a = driver.find_element_by_xpath("//input[@name='password']")
a.send_keys(passw)
a = driver.find_element_by_xpath("//button[@class='btn btn-primary form-control-submit float-xs-right']")
a.click()
try:
a = driver.find_element_by_xpath("//input[@name='password']")
print("No se pudo utilizar esa contraseña de largo: "+str(len(passw)))
except:
print("Contraseña de largo: "+str(len(passw))+" funciona")
| UTF-8 | Python | false | false | 1,343 | py | 14 | singup_3.py | 8 | 0.696495 | 0.680835 | 0 | 34 | 38.323529 | 107 |
dennis-wei/korean_verb_lookup | 17,403,207,486,686 | a57eac21ed4653e361a0454806e57f631a2f1fa2 | 4382e64ada89bd24b6cc1135c731e0682146dac3 | /verb_lokup.py | 8408f1fabb33fe034ace3f31f6e27852053fbd47 | [] | no_license | https://github.com/dennis-wei/korean_verb_lookup | 522caf770237796129765f87f9496154cc66733c | 373233328dfc67697b87dbbd12e8a3fb2aee7f32 | refs/heads/master | 2020-06-16T07:13:12.440727 | 2016-11-30T23:23:32 | 2016-11-30T23:23:32 | 75,236,004 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null | # -*- coding: utf-8 -*-
from urllib2 import urlopen
from bs4 import BeautifulSoup
import csv
FILE_NAME = "verb_and_tense.txt"
BASE_URL = "http://dongsa.net/?search="
with open(FILE_NAME, 'rb') as f:
row_list = []
reader = csv.reader(f, delimiter = ",")
for row in reader:
row_list.append(row)
verb_list = row_list[0]
tense_list = row_list[1]
for verb in verb_list:
response_list = []
html = urlopen(BASE_URL + verb).read()
soup = BeautifulSoup(html, "html.parser")
conjugation_dict = {}
for conjugation in soup.findAll("tr", {"class": "conjugation-row"}):
type, value = map(lambda x: x.get_text(), conjugation.findAll("td"))
conjugation_dict[type] = value
print verb
for tense in tense_list:
print tense, conjugation_dict[tense]
print
| UTF-8 | Python | false | false | 824 | py | 4 | verb_lokup.py | 1 | 0.627427 | 0.621359 | 0 | 29 | 27.37931 | 76 |
adsofron/python-examples | 17,197,049,082,547 | b787b41c6544b6a7f01ee88a6e8f608613573deb | b44bc437f7b4b94f4e9f8f885feb68fe0198f47e | /print_grades.py | 3699767f21316f13b4ea2c701a33a49ffe05d80f | [] | no_license | https://github.com/adsofron/python-examples | b7f3a7ce89a96b9fadbbf03b29b961954d68e8b9 | 8f0339824c13ce28eaa807a7fd8fa60deae20ba4 | refs/heads/master | 2021-01-24T00:02:56.509724 | 2018-02-25T16:20:33 | 2018-02-25T16:20:33 | 122,746,827 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | grades = [100, 100, 90, 40, 80, 100, 85, 70, 90, 65, 90, 85, 50.5]
def print_grades(grades_input):
for i in range(len(grades_input)-1):
print(grades_input[i])
print_grades(grades)
| UTF-8 | Python | false | false | 188 | py | 41 | print_grades.py | 38 | 0.643617 | 0.478723 | 0 | 7 | 25.857143 | 66 |
Idomass/red_alert_analyzer | 1,400,159,375,407 | e3012b1f3a31d53ba176c4227932a757a88ba6a7 | 998429df0afad707a7c4217f5bee8edb3b669727 | /analyzer.py | 456f2c9d70de9f7b31276bd15d89fcf85fe90fd8 | [] | no_license | https://github.com/Idomass/red_alert_analyzer | b2ad5ad81edf0e193ba9c845f23d8f15a30c0b99 | 3d4de19ec8b04813021f06adb9aaececd4f4d2d5 | refs/heads/master | 2023-04-25T07:24:03.220919 | 2021-05-14T15:15:13 | 2021-05-14T15:36:25 | 367,398,570 | 2 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #! /usr/bin/env python3
import argparse
import datetime
from oref_analyzer import OrefAnalyzer
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('--language', dest='lang', default='en', choices=['en', 'he'])
parser.add_argument('--location', help='Location to show history of red alerts')
parser.add_argument('--start-date', type=lambda s: datetime.datetime.strptime(s, '%Y-%m-%d'),
default=datetime.datetime(2021, 5, 10))
return parser.parse_args()
if __name__ == '__main__':
args = parse_args()
OrefAnalyzer(language=args.lang).show_history(args.location, args.start_date)
| UTF-8 | Python | false | false | 656 | py | 11 | analyzer.py | 7 | 0.66311 | 0.650915 | 0 | 20 | 31.8 | 97 |
oleg-chubin/let_me_play | 14,680,198,229,779 | c8bdab8c57df5640cdde6ecb5432be8819a21b84 | 2b6cbe0718b1074a2c9097ed7d15810a0a93d0c7 | /let_me_auth/forms.py | cd338abd3dce8c4320ea03b83cbab0675c52672c | [
"Apache-2.0"
] | permissive | https://github.com/oleg-chubin/let_me_play | fcc297a65400a0863ccb91e146a5ed4b16151ce6 | f567607c0d5d38b7519104c355e3738e01f8c6c9 | refs/heads/develop | 2020-12-24T05:53:49.724646 | 2017-09-24T20:50:41 | 2017-09-24T20:50:41 | 30,723,993 | 2 | 0 | null | false | 2017-09-24T20:50:42 | 2015-02-12T21:09:06 | 2017-09-02T19:24:39 | 2017-09-24T20:50:42 | 1,632 | 1 | 0 | 40 | Python | null | null | '''
Created on Jul 4, 2015
@author: oleg
'''
import os
import time
from django import forms
from django.contrib.gis import forms as geo_forms
from django.utils.translation import ugettext_lazy as _
from leaflet.forms.widgets import LeafletWidget
from floppyforms import widgets as floppyforms_widgets
from let_me_auth import models
from django.forms.models import BaseInlineFormSet
from django.forms.formsets import DELETION_FIELD_NAME
from django.contrib.auth.forms import SetPasswordForm
from django.conf import settings
from django.utils.safestring import mark_safe
from django.utils.datastructures import MultiValueDictKeyError
from PIL import Image as pil_image
from django.core.files.uploadedfile import InMemoryUploadedFile
from six import BytesIO
from django.core.validators import RegexValidator
UPLOAD_IMG_ID="new-img-file"
class BootstrapChoiceWidget(forms.Select):
def build_attrs(self, *args, **kwargs):
attrs = super(BootstrapChoiceWidget, self).build_attrs(*args, **kwargs)
attrs['class'] = attrs.get('class', '') + ' form-control'
return attrs
class NotificationSettingsForm(forms.ModelForm):
lang = forms.ChoiceField(
label=_('language'),
choices=settings.LANGUAGES,
widget=BootstrapChoiceWidget()
)
sms_notifications = forms.BooleanField(
required=False,
widget=floppyforms_widgets.Select(
choices=models.NotificationSettings.SMS_NOTIFICATION_CHOICES))
email_notifications = forms.BooleanField(
required=False,
widget=floppyforms_widgets.Select(
choices=models.NotificationSettings.EMAIL_NOTIFICATION_CHOICES))
class Meta:
model = models.NotificationSettings
fields = ('sms_notifications', 'email_notifications', 'lang')
class CustomSetPasswordForm(SetPasswordForm):
verification_code = forms.CharField(widget=forms.HiddenInput)
def save(self, commit=True):
self.user.is_active = True
return super(CustomSetPasswordForm, self).save(commit=commit)
class JcropWidget(floppyforms_widgets.FileInput):
template_name = 'floppyforms/jcrop_image.html'
omit_value = False
class Media:
# form media, i.e. CSS and JavaScript needed for Jcrop.
# You'll have to adopt these to your project's paths.
css = {
'all': ("css/jquery.Jcrop.min.css",)
}
js = (
"js/jquery.Jcrop.min.js",
)
def __init__(self, *args, **kwargs):
self.preview_height = kwargs.pop('preview_height')
self.preview_width = kwargs.pop('preview_width')
super(JcropWidget, self).__init__(*args, **kwargs)
# fixed Jcrop options; to pass options to Jcrop, use the jcrop_options
# argument passed to the JcropForm constructor. See example above.
jcrop_options = {
"onSelect": "storeCoords",
"onChange": "storeCoords",
}
# HTML template for the widget.
#
# The widget is constructed from the following parts:
#
# * HTML <img> - the actual image used for displaying and cropping
# * HTML <label> and <input type="file> - used for uploading a new
# image
# * HTML <input type="hidden"> - to remember image path and filename
# * JS code - The JS code makes the image a Jcrop widget and
# registers an event handler for the <input type="file">
# widget. The event handler submits the form so the new
# image is sent to the server without the user having
# to press the submit button.
#
def get_context(self, name, value, attrs, **kwargs):
context = super(JcropWidget, self).get_context(name, value, attrs)
context['value'] = value and getattr(value, 'url', None)
context['preview_height'] = self.preview_height
context['preview_width'] = self.preview_width
return context
def build_attrs(self, attrs):
attrs = super(JcropWidget, self). build_attrs(attrs)
attrs.update({"data-{}".format(k): v for k, v in self.jcrop_options.items()})
attrs['class'] = attrs.get('class', '') + ' jcropped'
return attrs
class CustomPhoneNumberInput(floppyforms_widgets.PhoneNumberInput):
template_name = 'floppyforms/phone_number_input.html'
class BaseUserForm(forms.ModelForm):
class Meta:
model = models.User
fields = ('first_name', 'last_name')
widgets = {
'first_name': floppyforms_widgets.TextInput(),
'last_name': floppyforms_widgets.TextInput(),
}
def clean(self):
cleaned_data = super(BaseUserForm, self).clean()
first_name = cleaned_data.get("first_name")
last_name = cleaned_data.get("last_name")
if not (first_name or last_name):
raise forms.ValidationError("Please fill first name or last name")
return cleaned_data
class UserDetailsForm(forms.ModelForm):
x1 = forms.DecimalField(widget=forms.HiddenInput(), required=False)
y1 = forms.DecimalField(widget=forms.HiddenInput(), required=False)
x2 = forms.DecimalField(widget=forms.HiddenInput(), required=False)
y2 = forms.DecimalField(widget=forms.HiddenInput(), required=False)
cell_phone = forms.CharField(
validators=[
RegexValidator(
regex="375\\d{9}$",
message=_("Phone number should have international "
"format without + sign"))],
widget=CustomPhoneNumberInput())
class Meta:
avatar_height = 100
avatar_width = 100
model = models.User
fields = ('first_name', 'last_name', 'sex', 'cell_phone', 'avatar')
widgets = {
'sex': floppyforms_widgets.Select(),
'first_name': floppyforms_widgets.TextInput(),
'last_name': floppyforms_widgets.TextInput(),
'cell_phone': floppyforms_widgets.TextInput(),
'avatar': JcropWidget(
preview_width=avatar_width, preview_height=avatar_height),
}
class Media:
js = ('js/update_profile.js',)
css = {
'all': ("css/update_profile.css",)
}
def clean(self):
"""
instantiate PIL image; raise ValidationError if field contains no image
"""
cleaned_data = super(UserDetailsForm, self).clean()
if not self.files.get("avatar"):
return cleaned_data
if any(cleaned_data.get(k) is None for k in ['x1', 'y1', 'x2', 'y2']):
raise forms.ValidationError("please Upload your image again and crop")
try:
img = pil_image.open(self.cleaned_data["avatar"])
except IOError:
raise forms.ValidationError("Invalid image file")
img = self.crop(img)
img = self.resize(img, (self.Meta.avatar_width, self.Meta.avatar_height))
# saving it to memory
thumb_io = BytesIO()
img.save(thumb_io, self.files['avatar'].content_type.split('/')[-1].upper())
# generating name for the new file
new_file_name = (str(self.instance.id) +'_avatar_' +
str(int(time.time())) +
os.path.splitext(self.instance.avatar.name)[1])
# creating new InMemoryUploadedFile() based on the modified file
self.cleaned_data["avatar"] = InMemoryUploadedFile(
thumb_io, u"avatar", new_file_name,
cleaned_data['avatar'].content_type, thumb_io.tell(), None)
return cleaned_data
# def is_valid(self):
# """
# checks if self._errors is empty; if so, self._errors is set to None and
# full_clean() is called.
# This is necessary since the base class' is_valid() method does
# not populate cleaned_data if _errors is an empty ErrorDict (but not 'None').
# I just failed to work this out by other means...
# """
# if self._errors is not None and len(self._errors) == 0:
# self._errors = None
# self.full_clean()
# return super(UserDetailsForm, self).is_valid()
def crop (self, img):
"""
crop the image to the user supplied coordinates
"""
x1=self.cleaned_data['x1']
x2=self.cleaned_data['x2']
y1=self.cleaned_data['y1']
y2=self.cleaned_data['y2']
return img.crop((x1, y1, x2, y2))
def resize (self, img, dimensions, maintain_ratio=False):
"""
resize image to dimensions passed in
"""
if maintain_ratio:
img = img.thumbnail(dimensions, pil_image.ANTIALIAS)
else:
img = img.resize(dimensions, pil_image.ANTIALIAS)
return img
@staticmethod
def prepare_uploaded_img(files, upload_to, profile, max_display_size=None):
"""
stores an uploaded image in the proper destination path and
optionally resizes it so it can be displayed properly.
Returns path and filename of the new image (without MEDIA_ROOT).
'upload_to' must be a function reference as expected by Django's
FileField object, i.e. a function that expects a profile instance
and a file name and that returns the final path and name for the
file.
"""
try:
upload_file = files[UPLOAD_IMG_ID]
except MultiValueDictKeyError:
# files dict does not contain new image
return None
# copy image data to final file
fn = upload_to(profile, upload_file.name)
pfn = settings.MEDIA_ROOT + fn
destination = open(pfn, 'wb+')
for chunk in upload_file.chunks():
destination.write(chunk)
destination.close()
if max_display_size:
# resize image if larger than specified
im = pil_image.open(pfn)
if im.size[0] > max_display_size[0]:
# image is wider than allowed; resize it
im = im.resize((max_display_size[0],
im.size[1] * max_display_size[0] / im.size[0]),
pil_image.ANTIALIAS)
if im.size[1] > max_display_size[1]:
# image is taller than allowed; resize it
im = im.resize((im.size[0] * max_display_size[1] / im.size[1],
im.size[1]), pil_image.ANTIALIAS)
im.save(pfn)
return fn
| UTF-8 | Python | false | false | 10,672 | py | 197 | forms.py | 84 | 0.603823 | 0.599138 | 0 | 286 | 36.314685 | 95 |
bijanagahi/Advent-Of-Code-2019 | 3,805,341,042,793 | e3652f3c6f8885c154326a8442909110edd9741e | 72696bc2548cf394267d2c8dc6c9bb5c2d48cc99 | /Day4/day4_part1.py | 11d7235327cdaffaadb29d2c7dd24ca3892d4515 | [] | no_license | https://github.com/bijanagahi/Advent-Of-Code-2019 | 44d5f21e241b71a3be377099536237ea2d26829b | 96884a81988090177828901d037230d9f6b44ef3 | refs/heads/master | 2020-09-27T18:50:09.990238 | 2019-12-22T20:17:03 | 2019-12-22T20:17:03 | 226,584,225 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # https://adventofcode.com/2019/day/4
# --- Day 4: Secure Container ---
#
# Authors: Bijan Agahi
range_low = 347312
range_high = 805915
valid_passwords = 0
def validateGuess(guess):
if not len(set(str(guess))) < 6:
return False
guess = str(guess)
prev = guess[0]
inc = int(guess[0])
flag = False
for char in guess[1:]:
if (prev == char) and not flag:
flag = True
if inc > int(char):
return False
prev = char
inc = int(char)
return flag
for guess in range(range_low,range_high):
# Validate the guess
if not validateGuess(guess):
continue
# If we got here it's a valid password
valid_passwords += 1
print(valid_passwords)
| UTF-8 | Python | false | false | 660 | py | 13 | day4_part1.py | 13 | 0.665152 | 0.628788 | 0 | 39 | 15.897436 | 41 |
baluneboy/pims | 7,035,156,455,009 | 56ba407d83be67da5ccf5b4bfc04d146b582b566 | 00d7e9321d418a2d9a607fb9376b862119f2bd4e | /notable/note_config.py | 4025ed7d24a5f52679c87c29f3389413822254f8 | [
"MIT"
] | permissive | https://github.com/baluneboy/pims | 92b9b1f64ed658867186e44b92526867696e1923 | 5a07e02588b1b7c8ebf7458b10e81b8ecf84ad13 | refs/heads/master | 2021-11-16T01:55:39.223910 | 2021-08-13T15:19:48 | 2021-08-13T15:19:48 | 33,029,780 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/env python
from ConfigParser import SafeConfigParser
from os.path import dirname, join, expanduser
# NOTE: the files in config.read are optional, but with each that exists
# subsequent one will override earlier one(s) on field-by-field basis
"""
GET THESE FILES IN PLACE TO SEE EXAMPLE OF LAYERED APPROACH
AND TRY VARIOUS COMBINATIONS OF THESE FILES EXISTING OR NOT
FIRST, defaults.ini in RELATIVE_SUBDIR of PROGRAM_DIR (where this file is):
--------------------------------------------------------
# tool/defaults.ini
[server]
# default host and port
host=localhost
port=8080
url=http://%(host)s:%(port)s/
NEXT, ~/.exampletool.ini for user's own prefs:
--------------------------------------------------------
# ~/.exampletool.ini
[server]
# user overrides default port
port=5000
FINALLY localconfig.ini for special config in $PWD
--------------------------------------------------------
# localconfig.ini
[server]
# override with special hostname
host=www.ninjarockstar.ken
"""
PROGRAM_DIR = dirname(__file__)
RELATIVE_SUBDIR = 'data'
config = SafeConfigParser()
print 'config files being used:', config.read([
join(PROGRAM_DIR, RELATIVE_SUBDIR, 'defaults.ini'),
expanduser('~/.exampletool.ini'),
'localconfig.ini'
])
print config.sections()
print 'host:', config.get('server', 'host')
print 'port:', config.get('server', 'port') | UTF-8 | Python | false | false | 1,368 | py | 725 | note_config.py | 626 | 0.644737 | 0.638889 | 0 | 48 | 27.520833 | 75 |
SAEONData/Open-Data-Platform | 4,398,046,556,755 | f928c104c79f29958ab0e81dc2d2d7a4937b433f | c44a3227d1c2b3a892a9a52438a324e675485ff7 | /odp/job/publish/main.py | f575155d7f2a5f4027563c9fe4487d407f349aa9 | [
"MIT"
] | permissive | https://github.com/SAEONData/Open-Data-Platform | 4b87aece6a83befd82a67f97d4ae330380c1f947 | 50c52bf476fd5c82afdf44379805f8790bb20319 | refs/heads/main | 2022-11-07T00:30:38.697706 | 2022-11-04T15:09:37 | 2022-11-04T15:09:37 | 251,641,495 | 2 | 1 | MIT | false | 2022-09-20T12:35:56 | 2020-03-31T15:12:19 | 2022-09-01T16:26:02 | 2022-09-20T12:35:17 | 2,678 | 2 | 0 | 7 | Python | false | false | #!/usr/bin/env python
import logging
import pathlib
import sys
from sqlalchemy import select
rootdir = pathlib.Path(__file__).parent.parent.parent.parent
sys.path.append(str(rootdir))
from odp.db import Session
from odp.db.models import Catalog
from odp.job.publish.datacite import DataCitePublisher
from odp.job.publish.saeon import SAEONPublisher
from odplib.const import ODPCatalog
from odplib.logging import init_logging
init_logging()
logger = logging.getLogger(__name__)
publishers = {
ODPCatalog.SAEON: SAEONPublisher,
ODPCatalog.DATACITE: DataCitePublisher,
}
def main():
logger.info('PUBLISHING STARTED')
try:
for catalog_id in Session.execute(select(Catalog.id)).scalars():
publisher = publishers[catalog_id]
publisher(catalog_id).run()
logger.info('PUBLISHING FINISHED')
except Exception as e:
logger.critical(f'PUBLISHING ABORTED: {str(e)}')
if __name__ == '__main__':
main()
| UTF-8 | Python | false | false | 973 | py | 209 | main.py | 124 | 0.71223 | 0.71223 | 0 | 43 | 21.627907 | 72 |
SergioAnd95/SimpleMessage | 876,173,337,370 | 7d040907a7bfb1949d912a7f3311a1991ecb5d9b | 71d0c489fa0a5464bdaa290b72188db325dd0cdd | /chat/forms.py | 6e425950af90e8ef8dd0354aab7d6b7de93891c9 | [] | no_license | https://github.com/SergioAnd95/SimpleMessage | edf35d8bcfbad35a4da47785dcf9dc098f23d8ff | b062989bcf45eef6d5c79617b077e26a11351475 | refs/heads/master | 2021-01-15T10:35:03.765370 | 2017-08-08T22:07:47 | 2017-08-08T22:07:47 | 99,588,250 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from django import forms
from django.utils.translation import ugettext_lazy as _
from .models import Message
class MessageForm(forms.ModelForm):
class Meta:
model = Message
fields = ('text', )
widgets = {
'text': forms.Textarea(attrs={'placeholder': _('Текст сообщения')})
} | UTF-8 | Python | false | false | 339 | py | 12 | forms.py | 6 | 0.630769 | 0.630769 | 0 | 13 | 24.076923 | 79 |
recuraki/PythonJunkTest | 970,662,614,476 | 5d98d3cc411b0f77fd127c1e840cb8860daf3f4e | 4a8c1f7d9935609b780aff95c886ef7781967be0 | /atcoder/_codeforces/1203_a.py | b7730f5606b08663544fca268bcc56d0e9c0d21f | [] | no_license | https://github.com/recuraki/PythonJunkTest | d5e5f5957ac5dd0c539ef47759b1fe5ef7a2c52a | 2556c973d468a6988d307ce85c5f2f8ab15e759a | refs/heads/master | 2023-08-09T17:42:21.875768 | 2023-07-18T23:06:31 | 2023-07-18T23:06:31 | 13,790,016 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import sys
from io import StringIO
import unittest
import logging
logging.basicConfig(level=logging.DEBUG)
def resolve():
q = int(input())
for qq in range(q):
n = int(input())
dat = list(map(int, input().split()))
can1 = True
can2 = True
cur = dat[0]
for i in range(n):
if dat[i] == 1:
p1 = i
break
#print("dat")
#print(dat)
#print(p1)
# clockか確認
lorig = list(range(1, n+1))
t = n - p1
col = lorig[t:] + lorig[:t]
#print(col)
for i in range(n):
if dat[i] != col[i]:
can1 = False
for i in range(n):
if dat[i] == n:
p1 = i
break
#print(p1)
lorig = list(range(n, 0, -1))
t = n - p1
col = lorig[t:] + lorig[:t]
#print(col)
for i in range(n):
if dat[i] != col[i]:
can2 = False
print("YES" if can1 or can2 else "NO")
class TestClass(unittest.TestCase):
maxDiff = 100000
def assertIO(self, input, output):
stdout, stdin = sys.stdout, sys.stdin
sys.stdout, sys.stdin = StringIO(), StringIO(input)
resolve()
sys.stdout.seek(0)
out = sys.stdout.read()[:-1]
sys.stdout, sys.stdin = stdout, stdin
self.assertEqual(out, output)
def test_input_1(self):
print("test_input_1")
input = """5
4
1 2 3 4
3
1 3 2
5
1 2 3 5 4
1
1
5
3 2 1 5 4"""
output = """YES
YES
NO
YES
YES"""
self.assertIO(input, output)
if __name__ == "__main__":
unittest.main() | UTF-8 | Python | false | false | 1,681 | py | 1,170 | 1203_a.py | 1,143 | 0.475821 | 0.444776 | 0 | 78 | 20.474359 | 59 |
enterpreneur369/holbertonschool-higher_level_programming | 9,191,230,046,819 | df5d9ecf45fdf6db0c462fec0b8b7f0ede5b8304 | c9ad6ad969de505b3c8471c6f46dfd782a0fb498 | /0x04-python-more_data_structures/102-complex_delete.py | d0afc9a2d6047458a9a6dfeef497d8b4565c075d | [] | no_license | https://github.com/enterpreneur369/holbertonschool-higher_level_programming | 002fd5a19b40c8b1db06b34c4344e307f24c17ac | dd7d3f14bf3bacb41e2116d732ced78998a4afcc | refs/heads/master | 2022-06-20T00:57:27.736122 | 2020-05-06T14:26:10 | 2020-05-06T14:26:10 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/python3
"""complex_delete
Function to delete some keys of the dictionary
"""
def complex_delete(a_dictionary, value):
ks = []
for k, v in a_dictionary.items():
if a_dictionary[k] == value:
ks.append(k)
for k in ks:
del a_dictionary[k]
return (a_dictionary)
| UTF-8 | Python | false | false | 315 | py | 169 | 102-complex_delete.py | 136 | 0.603175 | 0.6 | 0 | 15 | 20 | 46 |
junnyhe/Code | 11,948,599,049,418 | da79eee371402f795ea1011704f3cb38394f5d75 | c89d50f7554f2eb551e5a493b7b9614c478a8ac2 | /deprecated/deprecated.src/step1_data_rollup_merge/step1d_merge_signal_threatmetrix.py | 7f5d634a391985c95c7a7bad5ac1cf4c3c55ebef | [] | no_license | https://github.com/junnyhe/Code | c741d5f6b559ef0d4fbb37f8b67f5dd59d89b3d4 | b698e4831bc240dbc1229c21c71ceb88733aabd2 | refs/heads/master | 2020-04-11T09:44:22.451742 | 2015-08-30T02:28:51 | 2015-08-30T02:28:51 | 31,276,029 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import csv
import gzip
import os
import sys
import time
import datetime
sys.path.append("/Users/junhe/Documents/workspace/fraud_model/src/model_tools")
sys.path.append("/Users/junhe/Documents/workspace/fraud_model/src/csv_operations")
import csv_ops
from csv_ops import *
day=datetime.date(2014,11,5) #start date
nDays=27 # number of days to process
for iDay in range(nDays):
file1="/Users/junhe/Documents/Data/Raw_Data/signals/fraud_signal_flat_"+str(day)+".csv.gz"
file2="/Users/junhe/Documents/Data/Raw_Data/threatmetrix_payer/threatmetrix_payer_flat_"+str(day)+".csv.gz"
file3="/Users/junhe/Documents/Data/Raw_Data/threatmetrix_payee/threatmetrix_payee_flat_"+str(day)+".csv.gz"
file_out="/Users/junhe/Documents/Data/Raw_Data/merged_data/signals_threatmetrix_payer_payee_"+str(day)+".csv.gz"
file_out_tmp= "/Users/junhe/Documents/Data/Raw_Data/merged_data/merge_tmp_"+str(day)+".csv.gz"
key_list=['payment_request_id']
# merge signal and payer threatmetrix
t0=time.time()
print "Merging signal and payer threatmetrix for "+str(day)
csv_merge(file1, file2, key_list, file_out_tmp)
print "Merging signal and payer threatmetrix done; time lapsed: ",time.time()-t0,'sec'
# merge above results wtih payee threatmetrix
print "Merge all three data sources for "+str(day)
csv_merge(file_out_tmp, file3, key_list, file_out)
print "Merge all three data sources done ; total time lapsed: ",time.time()-t0,'sec'
#delete intermediate file
cmdout=os.system('rm '+file_out_tmp)
#increment day by one
day = day+datetime.timedelta(1) | UTF-8 | Python | false | false | 1,623 | py | 128 | step1d_merge_signal_threatmetrix.py | 88 | 0.716574 | 0.704868 | 0 | 39 | 40.641026 | 116 |
j-himanshu/Customs-Management-Project-Amigo- | 12,549,894,462,064 | bc51038907ddc1d3340bd9e2fcac7b2b304ad744 | 30961e982a8ad06000a85f54cc1bc5c14118602c | /cli.py | b11811570aa17da7bbb078484cef7eb9dc55f5b2 | [] | no_license | https://github.com/j-himanshu/Customs-Management-Project-Amigo- | 8fae16343eb72254d027f9f5968edc8361fb8f9b | 204c2197b836df7ef71a9e0ba91b74ea753b04fa | refs/heads/master | 2020-06-14T09:36:05.351647 | 2016-11-30T16:30:04 | 2016-11-30T16:30:04 | 75,204,786 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import Tkinter as tk
import MySQLdb
import traceback
import os
from server import *
from query_box import query_box
class CLI:
def __init__(self, parent):
self.parent=parent
self.frame=tk.Frame(self.parent)
self.entryvar=tk.StringVar()
self.entry=tk.Entry(self.frame, textvariable=self.entryvar, width=100, fg="white", bg="black", font="Ariel 15")
self.entry.bind("<Return>", self.query2)
self.entry.pack(pady = 5)
self.entryvar.set(">>>Enter Query")
self.entry.config(insertbackground = "white")
self.flexlabel=tk.StringVar()
self.label=tk.Label(self.frame, textvariable=self.flexlabel, width=137, fg="white", bg="black", anchor='w', font="Ariel 10")
self.label.pack(pady = 5)
self.flexlabel.set(">>>")
self.flexlabel2=tk.StringVar()
self.label2=tk.Label(self.frame, textvariable=self.flexlabel2, width=137, fg="white", bg="black", anchor='w', font="Ariel 10")
self.label2.pack(pady = 5)
self.flexlabel2.set(u"Enter your Query Mr. Admin")
self.query_button = tk.Button(self.frame, text = "Custom Select Query", command=self.query, bg= "red", fg = "white").pack(pady=10)
self.frame.pack()
def query2(self, event):
try:
self.db= MySQLdb.connect("127.0.0.1", "root", "root", "customs")
except:
start_server()
self.db= MySQLdb.connect("127.0.0.1", "root", "root", "customs")
try:
sql=self.entry.get()
cursor=self.db.cursor()
cursor.execute(sql)
result=cursor.fetchall()
if len(result)>0:
ans=">>>"
for res in result:
ans=ans+str(res)+"\n"
self.flexlabel.set(ans)
else:
self.flexlabel.set(">>>Query returned 0 rows")
self.flexlabel2.set(">>>Successfully Executed")
self.db.commit()
except:
self.flexlabel2.set(">>>")
error=str(traceback.format_exc())
error=">>>"+error[error.find("Error: "):len(error)]
self.flexlabel.set(error)
self.db.rollback()
self.db.close()
def query(self):
self.quer= tk.Toplevel(self.parent)
self.app = query_box(self.quer)
if __name__=="__main__":
root=tk.Tk()
app=CLI(root)
root.mainloop()
os.system("python table.py") | UTF-8 | Python | false | false | 2,530 | py | 28 | cli.py | 25 | 0.550593 | 0.533597 | 0 | 69 | 34.695652 | 138 |
tidebam/learn_torch | 11,321,533,837,402 | 99deaf6dadd2589b483e75a224346bb4710bf18f | bd9b0dc2e5a6e285d693b8e32be46c3c4d039208 | /dataloader.py | ec1738c43b55c3b5dc9da8705fa359ff789d8fdd | [] | no_license | https://github.com/tidebam/learn_torch | 797bcb17ea12cb11cfb8bfa75a43963dac99baa5 | d031332a916fd1cd9eea200f4b000c562cdf6594 | refs/heads/master | 2023-07-24T14:25:15.036345 | 2021-09-13T11:33:17 | 2021-09-13T11:33:17 | 405,808,895 | 0 | 0 | null | false | 2021-09-13T11:33:18 | 2021-09-13T02:32:53 | 2021-09-13T11:22:07 | 2021-09-13T11:33:17 | 11 | 0 | 0 | 0 | Jupyter Notebook | false | false | import torchvision
from torch.utils.data import DataLoader
from torch.utils.tensorboard import SummaryWriter
dataset_transform = torchvision.transforms.Compose([
torchvision.transforms.ToTensor()
])
# Test Dataset
test_set = torchvision.datasets.CIFAR10(root="./dataset", train=False, transform=dataset_transform, download=True)
test_loader = DataLoader(dataset=test_set, batch_size=4, shuffle=True, num_workers=0, drop_last=False)
# 测试样本种的第一张图片
img, target = test_set[0]
print(img.shape)
print(target) | UTF-8 | Python | false | false | 530 | py | 11 | dataloader.py | 10 | 0.78937 | 0.779528 | 0 | 15 | 32.933333 | 114 |
Subaru-PFS/ics_iicActor | 137,438,988,498 | e6771060e0fb41517c48d0548d74ee2ea61c5433 | 994023311ccdc9725995c7dcc44bea7e57bb823c | /python/ics/iicActor/utils/exception.py | 76375e3ff1232a44741033c0a75e82450063ec6b | [] | no_license | https://github.com/Subaru-PFS/ics_iicActor | 1bc3b38c8a01fd511849e0b13477d559936d5077 | 4de50d621193346af0933e85ecdc6fe8b82eed3e | refs/heads/master | 2023-08-05T12:14:11.834484 | 2023-07-26T06:37:44 | 2023-07-26T06:37:44 | 204,379,110 | 0 | 0 | null | false | 2023-09-13T17:17:04 | 2019-08-26T02:20:20 | 2021-12-24T21:07:34 | 2023-09-13T17:17:00 | 670 | 0 | 0 | 0 | Python | false | false | from ics.iicActor.utils.lib import stripQuotes
class IicException(Exception):
def __init__(self, reason="", className=""):
self.reason = reason
self.className = className
Exception.__init__(self)
def __str__(self):
className = self.__class__.__name__ if not self.className else self.className
return f"{className}({self.reason})"
class ResourceUnAvailable(IicException):
"""Exception raised when exposure is just trash and needs to be cleared ASAP."""
class ResourceIsBusy(IicException):
"""Exception raised when exposure is just trash and needs to be cleared ASAP."""
class OpDBFailure(IicException):
"""Exception raised when exposure is just trash and needs to be cleared ASAP."""
class SequenceIdentificationFailure(IicException):
"""Exception raised when exposure is just trash and needs to be cleared ASAP."""
class SequenceAborted(IicException):
"""Exception raised when exposure is just trash and needs to be cleared ASAP."""
class OpdbInsertFailed(IicException):
"""Exception raised when exposure is just trash and needs to be cleared ASAP."""
def __init__(self, tableName, reason):
self.tableName = tableName
self.reason = stripQuotes(str(reason))
Exception.__init__(self)
def __str__(self):
return f"{self.__class__.__name__}({self.tableName}) with {self.reason})"
class Failures(list):
def add(self, reason):
if 'SequenceAborted(' in reason and self.format():
pass # something else causes the failure
else:
self.append(reason)
def format(self):
return ','.join(list(set(self)))
| UTF-8 | Python | false | false | 1,678 | py | 48 | exception.py | 38 | 0.668653 | 0.668653 | 0 | 55 | 29.509091 | 85 |
EisenHao/Python_Learning | 12,592,844,114,529 | a1adfc5896f10a06ccc3421e932088bb510e595b | 45ca5dba64c2f2beb8b0a2bd3579c9029f4035d3 | /PythonApriori/ID3.py | 76d5fd3aa951e3a8a03c5816a03f41ec6506b0ea | [] | no_license | https://github.com/EisenHao/Python_Learning | ffd1e04aad7bef131dd80b3a8303803c2bc15125 | b10d42faa7df5e1cd9f7780cd0953a66079c79d3 | refs/heads/master | 2020-04-02T21:40:35.212240 | 2019-04-30T10:47:38 | 2019-04-30T10:47:38 | 154,807,867 | 1 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null | # coding: utf-8
# 数据挖掘与知识发现第5次作业-黎豪-18101223769:
# 编程实现ID3决策树建立算法
# 天气因素有温度、湿度和刮风等,通过给出数据,使用决策树算法学习分类,输出一个人是运动和不运动与天气之间的规则树。
import matplotlib.pyplot as plt
from math import log #计算log2()
from pylab import *
# 计算样本的信息期望
def calcH(dataSet):
numOfRow = len(dataSet) #得到行数,数据量个数
#为所有的分类类目创建字典
# labelCounts: 表示最后一列的字典统计信息(属性值种类及个数)
labelCounts = {}
for iRow in dataSet:
currentLable = iRow[-1] #取得当前行最后一列数据(决策属性值)
if currentLable not in labelCounts.keys(): #如果不在字典中,则添加到字典中
labelCounts[currentLable] = 0
labelCounts[currentLable] += 1 #如果在字典中,则对应的key计数+1
#计算给定样本所需的数学期望信息(香农熵)
H = 0.0 #测试样本的信息期望
for key in labelCounts:
prob = float(labelCounts[key]) / numOfRow #即p(t)
H -= prob * math.log(prob, 2)
return H #返回样本的信息期望
#得到根据第 i 列属性值A划分成的子集
#输入三个变量(待划分的数据集,特征,分类值)
def splitDataSet(dataSet, axis, value):
retDataSet = [] #表示由当第 i 列属性值A划分成的子集
for featVec in dataSet:
if featVec[axis] == value:
reduceFeatVec = featVec[:axis]
reduceFeatVec.extend(featVec[axis+1:])
retDataSet.append(reduceFeatVec)
return retDataSet #表示由当第 i 列属性值A划分成的子集(不含划分特征A)
#得到最大信息增益条件属性列下标
def chooseBestFeatureToSplit(dataSet):
numOfFeature = len(dataSet[0])-1 #条件属性值个数
H = calcH(dataSet)#返回样本的信息期望
bestInforGain = 0 #最大信息增益值,初始化为0
bestFeature = -1 ##最大信息增益值对应的条件属性列,,初始化为 -1
#分别计算每一个条件属性的熵
for i in range(numOfFeature):
# featList 表示第 i 列的所有值
featList = [number[i] for number in dataSet] #得到某个特征下所有值(某列)
# uniqualVals 表示当前第 i 列的条件属性内的属性值的列表
uniqualVals = set(featList) #set无重复的属性特征值
# E_A:表示由属性 A 划分子集的熵或平均期望
E_A = 0.0
for value in uniqualVals:
# subDataSet: 表示由当第 i 列属性值A划分成的子集
subDataSet = splitDataSet(dataSet, i, value)
prob = float(len(subDataSet)) / float(len(dataSet)) #即p(t)
E_A += prob * calcH(subDataSet)#对各子集香农熵求和
Gain_A = H - E_A #计算条件属性 第 i 列 的信息增益
# 从所有条件属性对应的信息增益中挑选最大信息增益(的列下标)
if (Gain_A > bestInforGain):
bestInforGain = Gain_A
bestFeature = i
return bestFeature #返回特征值(最佳分类列下标)
#投票表决代码
def majorityCnt(classList):
# classCount: 表示最后classList字典统计信息
classCount = {}
for vote in classList:
if vote not in classCount.keys():#如果不在字典中,则添加到字典中
classCount[vote] = 0
classCount[vote] += 1 #如果在字典中,则对应的key计数+1
sortedClassCount = sorted(classCount.items, key=operator.itemgetter(1), reversed=True)
return sortedClassCount[0][0]
# ==========决策树构造函数=============
def createTree(dataSet, labels):
classList = [example[-1] for example in dataSet]
#如果类别相同,停止划分
if classList.count(classList[-1]) == len(classList):
return classList[-1]
#长度为1,返回出现次数最多的类别
if len(classList[0]) == 1:
return majorityCnt(classList)
#按照信息增益最高选取分类特征属性
bestFeat = chooseBestFeatureToSplit(dataSet)#返回分类的特征序号
bestFeatLable = labels[bestFeat] #该特征的label
myTree = {bestFeatLable:{}} #构建树的字典
del(labels[bestFeat]) #从labels的list中删除该label
featValues = [example[bestFeat] for example in dataSet]
uniqueVals = set(featValues)
for value in uniqueVals:
subLables = labels[:] #子集合
#构建数据的子集合,并进行递归
myTree[bestFeatLable][value] = createTree(splitDataSet(dataSet,bestFeat,value),subLables)
return myTree # 最后生成的决策树myTree是一个多层嵌套的字典
# ======使用Matplotlib绘制决策树============
decisionNode = dict(boxstyle="square", ec='k', fc='yellow',)#决策点样式
leafNode = dict(boxstyle="round", ec=(1., 0.5, 0.5), fc=(1., 0.8, 0.8),)#叶节点样式
arrow_args = dict(arrowstyle='<-') #箭头样式
# 绘制节点
def plotNode(nodeTxt, centerPt, parentPt, nodeType):
createPlot.ax1.annotate(nodeTxt, xy=parentPt, xycoords='axes fraction',
xytext=centerPt, textcoords='axes fraction',
va='center', ha='center', bbox=nodeType, arrowprops=arrow_args)
def createPlot():
fig=plt.figure(1,facecolor = 'white')
fig.clf()
createPlot.ax1=plt.subplot(111, frameon=False)
plotNode('决策节点', (0.5,0.1), (0.1,0.5), decisionNode)
plotNode('叶节点', (0.8,0.1), (0.3,0.8), leafNode)
plt.show()
#获取叶节点数量(广度)
def getNumLeafs(myTree):
numLeafs=0
firstStr=list(myTree.keys())[0]#'dict_keys' object does not support indexing
secondDict=myTree[firstStr]
for key in secondDict.keys():
if type(secondDict[key]).__name__=='dict':
numLeafs+=getNumLeafs(secondDict[key])
else:numLeafs+=1
return numLeafs
#获取树的深度的函数(深度)
def getTreeDepth(myTree):
maxDepth=0
firstStr=list(myTree.keys())[0]
secondDict=myTree[firstStr]
for key in secondDict.keys():
if type(secondDict[key]).__name__=='dict':
thisDepth=1+getTreeDepth(secondDict[key])
else: thisDepth=1
if thisDepth > maxDepth:
maxDepth=thisDepth
return maxDepth
#定义在父子节点之间填充文本信息的函数
def plotMidText(cntrPt,parentPt,txtString):
xMid=(parentPt[0]-cntrPt[0])/2+cntrPt[0]
yMid=(parentPt[1]-cntrPt[1])/2+cntrPt[1]
createPlot.ax1.text(xMid,yMid,txtString)
#定义树绘制的函数
def plotTree(myTree, parentPt, nodeTxt):
numLeafs=getNumLeafs(myTree)
depth=getTreeDepth(myTree)
firstStr=list(myTree.keys())[0]
cntrPt=(plotTree.xOff+(1.0+float(numLeafs))/2/plotTree.totalW,plotTree.yOff)
plotMidText(cntrPt, parentPt, nodeTxt)
plotNode(firstStr, cntrPt, parentPt, decisionNode)
secondDict=myTree[firstStr]
plotTree.yOff=plotTree.yOff -1/plotTree.totalD
for key in secondDict.keys():
if type(secondDict[key]).__name__=='dict':
plotTree(secondDict[key], cntrPt, str(key))
else:
plotTree.xOff=plotTree.xOff+1.0/plotTree.totalW
plotNode(secondDict[key],(plotTree.xOff,plotTree.yOff),cntrPt,leafNode)
plotMidText((plotTree.xOff,plotTree.yOff),cntrPt,str(key))
plotTree.yOff=plotTree.yOff+1/plotTree.totalD
#显示决策树
def createPlot(inTree):
fig = plt.figure(1, facecolor='white')
fig.clf()
axprops = dict(xticks=[], yticks=[])
createPlot.ax1 = plt.subplot(111, frameon=False, **axprops)
plotTree.totalW = float(getNumLeafs(inTree))
plotTree.totalD = float(getTreeDepth(inTree))
plotTree.xOff = -0.5/plotTree.totalW; plotTree.yOff = 1.0;
plotTree(inTree, (0.5, 1.0), '')
plt.show()
# 加载数据文件函数
def loadDataFile(filename):
print('加载', filename, '文件数据...')
fr = open(filename)
data = [inst.strip().split(',') for inst in fr.readlines()]
return data
#预处理 温度和湿度 数据
def dataWrangling(data, iColumn):
for iRow in range(len(data)):
num = int(data[iRow][iColumn])
num = num - (num%10)
data[iRow][iColumn] = str(num)
return data
# # main函数
# if __name__ == '__main__':
# dataLabels = ['weather', 'temperature', 'humidity', 'wind conditions', 'exercise'] #数据的属性名称
# data = loadDataFile('ID3dataEn.csv') #加载数据文件
# print('预处理前数据:', data)
# #预处理 温度和湿度 数据
# data = dataWrangling(data, 1) #整理 温度数据 取十位数
# data = dataWrangling(data, 2) #整理 湿度数据 取十位数
# print('处理后数据:', data)
# myTree = createTree(data, dataLabels) #构造决策树
# print('决策树构造函数测试', myTree)
# createPlot(myTree) #显示决策树
| UTF-8 | Python | false | false | 8,997 | py | 20 | ID3.py | 14 | 0.651204 | 0.6351 | 0 | 213 | 33.084507 | 97 |
raunak222/Water-Crisis | 4,131,758,570,400 | e8a41000b60069002c8d48d27696830b8aade88a | 95869472de2cd0b2fa4fed0461860349370a9d4e | /Website/cgi-bin/first.cgi | 3af556af7ae6f9fe62980af5efb18881dfbaf4cd | [] | no_license | https://github.com/raunak222/Water-Crisis | 74e1df569a5ede10ec5d18909781cea486c745c1 | cd7b5eaa96869c27a8c93e420523d48593478fea | refs/heads/master | 2020-07-03T00:18:02.068301 | 2019-08-11T05:41:20 | 2019-08-11T05:41:20 | 201,722,023 | 0 | 0 | null | true | 2019-08-11T05:31:16 | 2019-08-11T05:31:16 | 2019-08-11T04:14:58 | 2019-08-11T04:14:56 | 1,302 | 0 | 0 | 0 | null | false | false | #!/usr/bin/python3
import cgi
#import os
import cgitb # cgi trace back--->>To show common error in webbrowser
cgitb.enable()
import subprocess
#import pandas as pd
#import matplotlib.pyplot as plt
print("content-type:text/html")
print("")
web_data=cgi.FieldStorage() # This will collect all the html code with data
# Now extracting the value of x
data=web_data.getvalue('x')
if data==2004:
print("""<meta http-equiv="refresh" content="0;url=http://3.87.210.149/a.html">""")
if data==2019:
print("""<meta http-equiv="refresh" content="0;url=http://3.87.210.149/b.html">""")
| UTF-8 | Python | false | false | 584 | cgi | 2 | first.cgi | 1 | 0.705479 | 0.655822 | 0 | 23 | 24.391304 | 87 |
janathent/GMM | 18,073,222,383,127 | 3ec14033705fbd9e32de2a90ca9c6d35589b4216 | 5c91d522f97426ecba02cd6d9ec975b3783da141 | /GenerateVideo.py | 3aa1a81700ba4f0e17766ae810827fde3b3e609c | [] | no_license | https://github.com/janathent/GMM | 530480e3591dfa3ab67e9e63611d782be9e5a545 | 3d5c56a944fe6576d0f73bb707a5210188487a08 | refs/heads/main | 2023-01-01T11:05:38.929969 | 2020-10-26T03:00:57 | 2020-10-26T03:00:57 | 305,720,968 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import glob
import cv2
import numpy as np
def generate_video_mog2():
img_list_mog2 = glob.glob('MOG2_OUTPUT/*.jpg') # 读取生成的图片
print(img_list_mog2)
img_list_mog2 = sorted(img_list_mog2)
img_list_wavingtress = sorted(glob.glob('WavingTrees/b*.bmp'))
print(img_list_wavingtress)
# 读取一张图片
img = cv2.imread('MOG2_OUTPUT/000.jpg')
# 获取当前图片的信息
imgInfo = img.shape
size = (imgInfo[1], imgInfo[0])
print("图片的大小为:", imgInfo)
print("定义的size:", size)
videowrite = cv2.VideoWriter("With_MOG2.mp4", -1, 10, (320, 120))
for i in range(len(img_list_mog2)):
imgleft = cv2.imread(img_list_mog2[i])
imgright = cv2.imread(img_list_wavingtress[i])
newimage = np.concatenate((imgright, imgleft), axis=1)
videowrite.write(newimage)
def generate_video_gmm():
img_list_gmm = glob.glob('GMM_OUTPUT/*.jpg') # 读取生成的图片
print(img_list_gmm)
img_list_mog2 = sorted(img_list_gmm)
img_list_wavingtress = sorted(glob.glob('WavingTrees/b*.bmp'))
print(img_list_wavingtress)
# 读取一张图片
img = cv2.imread('GMM_OUTPUT/000.jpg')
# 获取当前图片的信息
imgInfo = img.shape
size = (imgInfo[1], imgInfo[0])
print("图片的大小为:", imgInfo)
print("定义的size:", size)
videowrite = cv2.VideoWriter("With_GMM.mp4", -1, 10, (320, 120))
for i in range(len(img_list_mog2)):
imgleft = cv2.imread(img_list_mog2[i])
imgright = cv2.imread(img_list_wavingtress[i])
newimage = np.concatenate((imgright, imgleft), axis=1)
videowrite.write(newimage)
def generate_video_gmm_primordial():
img_list_gmm = glob.glob('GMM_OUTPUT_Primordial/*.jpg') # 读取生成的图片
print(img_list_gmm)
img_list_mog2 = sorted(img_list_gmm)
img_list_wavingtress = sorted(glob.glob('WavingTrees/b*.bmp'))
print(img_list_wavingtress)
# 读取一张图片
img = cv2.imread('GMM_OUTPUT_Primordial/000.jpg')
# 获取当前图片的信息
imgInfo = img.shape
size = (imgInfo[1], imgInfo[0])
print("图片的大小为:", imgInfo)
print("定义的size:", size)
videowrite = cv2.VideoWriter("With_GMM_Primordial.mp4", -1, 10, (320, 120))
for i in range(len(img_list_mog2)):
imgleft = cv2.imread(img_list_mog2[i])
imgright = cv2.imread(img_list_wavingtress[i])
newimage = np.concatenate((imgright, imgleft), axis=1)
videowrite.write(newimage)
if __name__ == '__main__':
generate_video_gmm()
generate_video_mog2()
generate_video_gmm_primordial()
| UTF-8 | Python | false | false | 2,670 | py | 6 | GenerateVideo.py | 5 | 0.637136 | 0.605583 | 0 | 80 | 29.875 | 79 |
xhr-git/TestCase-DSL | 6,923,487,285,288 | 4c0fd5b688f60ee47a3e5e92a50f532dad3dd04b | c4f6a13c52ba5cdb6333f34cff4ffc0729eabeec | /auto.py | cafda1a388a24615201ff015744b674aff85e0c4 | [] | no_license | https://github.com/xhr-git/TestCase-DSL | 6d3ed613015ac9aa24cbed0367b5631a54f1addd | 3b6d54f2182b8d889af06b17589ad20eb4cb2a75 | refs/heads/main | 2023-03-03T09:51:43.079772 | 2021-02-10T08:39:24 | 2021-02-10T08:39:24 | 337,663,765 | 2 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from TestStep.core.tc_to_TestStep import get_test_step
import sys
def __main(file: str, show_desc: bool = False):
ret = 0
t = get_test_step(file, show_desc)
print()
print('-' * 50)
print()
if show_desc:
t.show_desc()
else:
ret = t.do_test()
if ret == 0:
res = 'pass'
else:
res = 'fail'
print()
print('test result = {} {}'.format(ret, res))
print()
print('-' * 50)
print()
exit(ret)
if __name__ == '__main__':
__argc = len(sys.argv)
__show_desc = False
if __argc == 2:
pass
elif __argc == 3 and sys.argv[2] == 'show':
__show_desc = True
else:
print()
print('Usage:')
print(' python3 auto.py <xxx.tc> [show]')
print()
exit(1)
__main(sys.argv[1], __show_desc)
| UTF-8 | Python | false | false | 859 | py | 21 | auto.py | 20 | 0.46915 | 0.45518 | 0 | 40 | 20.45 | 54 |
polyeffects/digit_ui | 6,236,292,516,263 | 7f0d9e4460e939281fe34139eee2a216aaad0c37 | 87d4fb8b77749dae1ae43e4d6055a712b8a974dd | /ingen.py | b06af0c83637be097cbd7c50780428f2580d7879 | [] | no_license | https://github.com/polyeffects/digit_ui | 558dfb8eb31a3354c5190b9c338679ecf61f0ad0 | 867e8d3ea386722a0a7bd6256db9ad8ec0798003 | refs/heads/master | 2023-08-28T18:51:30.254358 | 2023-07-28T03:42:00 | 2023-07-28T03:42:00 | 246,703,879 | 13 | 5 | null | false | 2022-12-14T14:43:15 | 2020-03-11T23:53:19 | 2022-12-12T21:14:30 | 2022-12-14T14:43:14 | 1,429 | 14 | 4 | 21 | QML | false | false | #!/usr/bin/env python
# Ingen Python Interface
# Copyright 2012-2015 David Robillard <http://drobilla.net>
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THIS SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import os
import re
import socket
import sys
try:
import StringIO.StringIO as StringIO
except ImportError:
from io import StringIO as StringIO
_FINISH = False
class Interface:
'The core Ingen interface'
def put(self, subject, body):
pass
def put_internal(self, subject, body):
pass
def patch(self, subject, remove, add):
pass
def get(self, subject):
pass
def set(self, subject, key, value):
pass
def connect(self, tail, head):
pass
def disconnect(self, tail, head):
pass
def disconnect_all(self, subject):
pass
def delete(self, subject):
pass
def copy(self, subject, destination):
pass
class Error(Exception):
def __init__(self, msg, cause):
Exception.__init__(self, '%s; cause: %s' % (msg, cause))
def lv2_path():
path = os.getenv('LV2_PATH')
if path:
return path
elif sys.platform == 'darwin':
return os.pathsep.join(['~/Library/Audio/Plug-Ins/LV2',
'~/.lv2',
'/usr/local/lib/lv2',
'/usr/lib/lv2',
'/Library/Audio/Plug-Ins/LV2'])
elif sys.platform == 'haiku':
return os.pathsep.join(['~/.lv2',
'/boot/common/add-ons/lv2'])
elif sys.platform == 'win32':
return os.pathsep.join([
os.path.join(os.getenv('APPDATA'), 'LV2'),
os.path.join(os.getenv('COMMONPROGRAMFILES'), 'LV2')])
else:
return os.pathsep.join(['~/.lv2',
'/usr/lib/lv2',
'/usr/local/lib/lv2'])
class Remote(Interface):
def __init__(self, uri='unix:///tmp/ingen.sock'):
self.msg_id = 1
self.server_base = uri + '/'
self.server_uri = uri
# self.model = rdflib.Graph()
# self.ns_manager = rdflib.namespace.NamespaceManager(self.model)
# self.ns_manager.bind('server', self.server_base)
def __del__(self):
self.sock.close()
def socket_connect(self):
connected = False
# for (k, v) in NS.__dict__.items():
# if not k.startswith("__"):
# self.ns_manager.bind(k, v)
while not connected:
try:
if self.server_uri.startswith('unix://'):
self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
self.sock.connect(self.server_uri[len('unix://'):])
connected = True
elif self.server_uri.startswith('tcp://'):
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
parsed = re.split('[:/]', self.server_uri[len('tcp://'):])
addr = (parsed[0], int(parsed[1]))
self.sock.connect(addr)
connected = True
else:
raise Exception('Unsupported server URI `%s' % self.server_uri)
except ConnectionError as e:
pass
def msgencode(self, msg):
if sys.version_info[0] == 3:
return bytes(msg, 'utf-8')
else:
return msg
def uri_to_path(self, uri):
path = uri
if uri.startswith(self.server_base):
return uri[len(self.server_base)-1:]
return uri
def recv(self):
"""Read from socket until a null terminator is received
or split on \n\n
"""
msg = u''
while not _FINISH:
chunk = self.sock.recv(1, 0)
if not chunk or chunk[0] == 0: # End of transmission
break
# print(chunk.decode('utf-8'), end="")
msg += chunk.decode('utf-8')
if msg[-2:] == '\n\n':
break
# print("msg is ", msg)
return msg
def send(self, msg):
if type(msg) == list:
msg = '\n'.join(msg)
# print("sending", msg)
# Send message to server
self.sock.sendall(self.msgencode(msg) + b'\0')
def get(self, subject):
return self.send('''
[]
a patch:Get ;
patch:subject <%s> .
''' % subject)
def put(self, subject, body):
return self.send('''
[]
a patch:Put ;
patch:subject <%s> ;
patch:body [
%s
] .
''' % (subject, body))
def put_internal(self, subject, body):
return self.send('''
[]
a patch:Put ;
patch:subject <%s> ;
patch:context ingen:internalContext ;
patch:body [
%s
] .
''' % (subject, body))
def patch(self, subject, remove, add):
return self.send('''
[]
a patch:Patch ;
patch:subject <%s> ;
patch:remove [
%s
] ;
patch:add [
%s
] .
''' % (subject, remove, add))
def set(self, subject, key, value):
return self.send('''
[]
a patch:Set ;
patch:subject <%s> ;
patch:property <%s> ;
patch:value %s .
''' % (subject, key, value))
def connect(self, tail, head):
return self.send('''
[]
a patch:Put ;
patch:subject <%s> ;
patch:body [
a ingen:Arc ;
ingen:tail <%s> ;
ingen:head <%s> ;
] .
''' % (os.path.commonprefix([tail, head]), tail, head))
def disconnect(self, tail, head):
return self.send('''
[]
a patch:Delete ;
patch:body [
a ingen:Arc ;
ingen:tail <%s> ;
ingen:head <%s> ;
] .
''' % (tail, head))
def disconnect_all(self, subject):
return self.send('''
[]
a patch:Delete ;
patch:subject </main> ;
patch:body [
a ingen:Arc ;
ingen:incidentTo <%s>
] .
''' % subject)
def delete(self, subject):
return self.send('''
[]
a patch:Delete ;
patch:subject <%s> .
''' % subject)
def copy(self, subject, destination):
return self.send('''
[]
a patch:Copy ;
patch:subject <%s> ;
patch:destination <%s> .
''' % (subject, destination))
| UTF-8 | Python | false | false | 6,728 | py | 80 | ingen.py | 23 | 0.548157 | 0.54236 | 0 | 260 | 24.876923 | 83 |
Akash-Ubale/Complete-Machine-Learning | 17,514,876,634,591 | dd99b0d1fb55608fb0d33e797f5577fc56ab9609 | 1a98ae030a7ba6f1678be68e1c1e386074f54942 | /11 Ensemble Techniques/EnsembleLearning_And_RandomForest/Random_forest_deployment/app.py | c92fdef66289ae67e18021d551695a459bdb567a | [] | no_license | https://github.com/Akash-Ubale/Complete-Machine-Learning | f1c2f59d085cd25ce88156c7e4118cd2bc91b530 | ae27b564f4f98ec2e58015594e43b036ebe3b15a | refs/heads/master | 2023-01-13T15:42:03.792871 | 2020-11-17T15:48:34 | 2020-11-17T15:48:34 | 297,038,736 | 1 | 2 | null | null | null | null | null | null | null | null | null | null | null | null | null | import pickle
from wsgiref import simple_server
from flask import Flask, request, app
from flask import Response
from flask_cors import CORS
import pandas as pd
app = Flask(__name__)
CORS(app)
app.config['DEBUG'] = True
@app.route("/predict", methods=['POST'])
def predictRoute():
try:
if request.json['data'] is not None:
data = request.json['data']
print('data is: ', data)
res = predict_log(data)
print('result is ',res)
return Response(res)
except ValueError:
return Response("Value not found")
except Exception as e:
print('exception is ',e)
return Response(e)
def predict_log(dict_pred):
with open("modelForPrediction.sav", 'rb') as f:
model = pickle.load(f)
data_df = pd.DataFrame(dict_pred,index=[1,])
predict = model.predict(data_df)
if predict[0] == 3:
result = 'Bad'
elif predict[0] == 4 :
result = 'Below Average'
elif predict[0]==5:
result = 'Average'
elif predict[0] == 6:
result = 'Good'
elif predict[0] == 7:
result = 'Very Good'
else :
result = 'Excellent'
return result
if __name__ == "__main__":
host = '0.0.0.0'
port = 5000
app.run(debug=True)
#httpd = simple_server.make_server(host, port, app)
# print("Serving on %s %d" % (host, port))
#httpd.serve_forever() | UTF-8 | Python | false | false | 1,481 | py | 50 | app.py | 5 | 0.55503 | 0.542201 | 0 | 56 | 24.482143 | 55 |
choiSUNJUNG/ai | 9,792,525,443,778 | 4d883c0f2f33dca627aa6606f5d230cacc8cad91 | a1965ac236a634861705adf4d97b8abbe2cf7650 | /10_ai_softmax.py | a536e5e19c022d9a255e11ec7e81b085a47eea9f | [] | no_license | https://github.com/choiSUNJUNG/ai | e6c2d8356bd5f89c64a092a128f0011ed8ec0a5b | 8d4b651680a38fc1c0c79aa7cd47e953795cfa61 | refs/heads/main | 2023-06-12T07:39:30.811088 | 2021-07-05T07:51:03 | 2021-07-05T07:51:03 | 376,518,686 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import numpy as np
import matplotlib.pyplot as plt
def softmax(x):
return np.exp(x) / np.sum(np.exp(x))
x = np.arange(-10, 10, 0.1)
print(x)
y = softmax(x)
print(y)
plt.plot(x, y)
plt.title('Softmax Function')
plt.show() | UTF-8 | Python | false | false | 238 | py | 9 | 10_ai_softmax.py | 9 | 0.634454 | 0.609244 | 0 | 13 | 16.461538 | 40 |
yureistar/graphic_exodus | 2,491,081,033,917 | 477bef10426a70eafa738a049f0dc8a6a6cfe35a | d8c8f036dabba93598451ca59744682f2d449eb4 | /draw.py | 53b8595e35f011c1a94eb30a42f46d0b4f5ca8e3 | [] | no_license | https://github.com/yureistar/graphic_exodus | d191376554337fd8361517bb7420067383ae2ebf | 81f312121f2a105ac3b57d50bec363fa8887c0c8 | refs/heads/master | 2021-01-23T00:48:27.923391 | 2017-06-12T15:47:17 | 2017-06-12T15:47:17 | 92,846,516 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from display import *
from matrix import *
from math import *
from gmath import *
def lighting(polygons, i, color, shadeType, ambient):
I = [0,0,0]
aC = 0.1
dC = 0.3
sC = 0.5
pointL = [[255,255,255], [0, 0, 0]]
normal = calculate_normal(polygons,i)
view = [250,250,0]
if shadeType == 'flat':
#ambient
aL = [0,0,0]
#diffuse
dL = [0,0,0]
pLvec = [polygons[i][0]-pointL[1][0],
polygons[i][1]-pointL[1][1],
polygons[i][2]-pointL[1][2]]
nn = normalize(normal)
nl = normalize(pLvec)
dnl = dot_prod(nn,nl)
#specular
sL = [0,0,0]
vSvec = [polygons[i][0]-view[0],
polygons[i][1]-view[1],
polygons[i][2]-view[2]]
nv = normalize(vSvec)
dvn = dot_prod(nv,nn)
for i in range(3):
aL[i] = ambient[i] * aC
dL[i] = pointL[0][i] * dC * dnl
sL[i] = (2*dvn*vSvec[i]) - view[i]
I[i] = int(aL[i] + dL[i] + sL[i])
if I[i] < 0:
I[i] = 0
if I[i] > 255:
I[i] = 255
return I
def dot_prod(vec1, vec2):
return (vec1[0]*vec2[0])+(vec1[1]*vec2[1])+(vec1[2]*vec2[2])
def magnitude(vec):
return sqrt(vec[0]**2 + vec[1]**2 + vec[2]**2)
def normalize(vec):
mag = magnitude(vec)
return [vec[0]/mag, vec[1]/mag, vec[2]/mag]
def scanline_convert(polygons, i, screen, zbuffer, color, shadeType, ambient):
color = lighting(polygons, i, color, shadeType, ambient)
p1 = [polygons[i][0],polygons[i][1],polygons[i][2]]
p2 = [polygons[i+1][0],polygons[i+1][1],polygons[i+1][2]]
p3 = [polygons[i+2][0],polygons[i+2][1],polygons[i+2][2]]
points = sorted([p1,p2,p3], key=lambda x: x[1])
botX = points[0][0]
botY = points[0][1]
botZ = points[0][2]
midX = points[1][0]
midY = points[1][1]
midZ = points[1][2]
topX = points[2][0]
topY = points[2][1]
topZ = points[2][2]
dx0 = float(topX - botX) / (topY - botY)
dz0 = float(topZ - botZ) / (topY - botY)
if botY != midY:
dx1 = float(midX - botX) / (midY - botY)
x1 = botX
dz1 = float(midZ - botZ) / (midY - botY)
z1 = botZ
else:
dx1 = float(topX - midX) / (topY - midY)
x1 = midX
dz1 = float(topZ - midZ) / (topY - midY)
z1 = midZ
x0 = botX
z0 = botZ
y = botY
while y <= topY:
#print int(x0), int(y), int(z0), int(x1), int(y), int(z1)
draw_line( int(x0), int(y), int(z0), int(x1), int(y), int(z1), screen, zbuffer, color )
if y > midY:
if topY != midY:
dx1 = float(topX - midX) / (topY - midY)
dz1 = float(topZ - midZ) / (topY - midY)
else:
dx1 = float(midX - botX) / (midY - botY)
dz1 = float(midZ - botZ) / (midY - botY)
x0 += dx0
x1 += dx1
y += 1
z0 += dz0
z1 += dz1
def add_polygon( polygons, x0, y0, z0, x1, y1, z1, x2, y2, z2 ):
add_point(polygons, x0, y0, z0);
add_point(polygons, x1, y1, z1);
add_point(polygons, x2, y2, z2);
def draw_polygons( matrix, screen, zbuffer, color, shadeType, ambient ):
line = [0,0,0]
if len(matrix) < 2:
print 'Need at least 3 points to draw'
return
point = 0
while point < len(matrix) - 2:
normal = calculate_normal(matrix, point)[:]
#print normal
if normal[2] > 0:
scanline_convert(matrix, point, screen, zbuffer, color, shadeType, ambient)
draw_line( int(matrix[point][0]),
int(matrix[point][1]),
matrix[point][2],
int(matrix[point+1][0]),
int(matrix[point+1][1]),
matrix[point+1][2],
screen, zbuffer, line)
draw_line( int(matrix[point+2][0]),
int(matrix[point+2][1]),
matrix[point+2][2],
int(matrix[point+1][0]),
int(matrix[point+1][1]),
matrix[point+1][2],
screen, zbuffer, line)
draw_line( int(matrix[point][0]),
int(matrix[point][1]),
matrix[point][2],
int(matrix[point+2][0]),
int(matrix[point+2][1]),
matrix[point+2][2],
screen, zbuffer, line)
point+= 3
def add_box( polygons, x, y, z, width, height, depth ):
x1 = x + width
y1 = y - height
z1 = z - depth
#front
add_polygon(polygons, x, y, z, x1, y1, z, x1, y, z);
add_polygon(polygons, x, y, z, x, y1, z, x1, y1, z);
#back
add_polygon(polygons, x1, y, z1, x, y1, z1, x, y, z1);
add_polygon(polygons, x1, y, z1, x1, y1, z1, x, y1, z1);
#right side
add_polygon(polygons, x1, y, z, x1, y1, z1, x1, y, z1);
add_polygon(polygons, x1, y, z, x1, y1, z, x1, y1, z1);
#left side
add_polygon(polygons, x, y, z1, x, y1, z, x, y, z);
add_polygon(polygons, x, y, z1, x, y1, z1, x, y1, z);
#top
add_polygon(polygons, x, y, z1, x1, y, z, x1, y, z1);
add_polygon(polygons, x, y, z1, x, y, z, x1, y, z);
#bottom
add_polygon(polygons, x, y1, z, x1, y1, z1, x1, y1, z);
add_polygon(polygons, x, y1, z, x, y1, z1, x1, y1, z1);
def add_sphere( edges, cx, cy, cz, r, step ):
points = generate_sphere(cx, cy, cz, r, step)
num_steps = int(1/step+0.1)
lat_start = 0
lat_stop = num_steps
longt_start = 0
longt_stop = num_steps
num_steps+= 1
for lat in range(lat_start, lat_stop):
for longt in range(longt_start, longt_stop):
p0 = lat * (num_steps) + longt
p1 = p0+1
p2 = (p1+num_steps) % (num_steps * (num_steps-1))
p3 = (p0+num_steps) % (num_steps * (num_steps-1))
if longt != num_steps - 2:
add_polygon( edges, points[p0][0],
points[p0][1],
points[p0][2],
points[p1][0],
points[p1][1],
points[p1][2],
points[p2][0],
points[p2][1],
points[p2][2])
if longt != 0:
add_polygon( edges, points[p0][0],
points[p0][1],
points[p0][2],
points[p2][0],
points[p2][1],
points[p2][2],
points[p3][0],
points[p3][1],
points[p3][2])
def generate_sphere( cx, cy, cz, r, step ):
points = []
num_steps = int(1/step+0.1)
rot_start = 0
rot_stop = num_steps
circ_start = 0
circ_stop = num_steps
for rotation in range(rot_start, rot_stop):
rot = step * rotation
for circle in range(circ_start, circ_stop+1):
circ = step * circle
x = r * math.cos(math.pi * circ) + cx
y = r * math.sin(math.pi * circ) * math.cos(2*math.pi * rot) + cy
z = r * math.sin(math.pi * circ) * math.sin(2*math.pi * rot) + cz
points.append([x, y, z])
#print 'rotation: %d\tcircle%d'%(rotation, circle)
return points
def add_torus( edges, cx, cy, cz, r0, r1, step ):
points = generate_torus(cx, cy, cz, r0, r1, step)
num_steps = int(1/step+0.1)
lat_start = 0
lat_stop = num_steps
longt_start = 0
longt_stop = num_steps
for lat in range(lat_start, lat_stop):
for longt in range(longt_start, longt_stop):
p0 = lat * (num_steps) + longt;
if (longt == num_steps - 1):
p1 = p0 - longt;
else:
p1 = p0 + 1;
p2 = (p1 + num_steps) % (num_steps * num_steps);
p3 = (p0 + num_steps) % (num_steps * num_steps);
add_polygon(edges,
points[p0][0],
points[p0][1],
points[p0][2],
points[p3][0],
points[p3][1],
points[p3][2],
points[p2][0],
points[p2][1],
points[p2][2] )
add_polygon(edges,
points[p0][0],
points[p0][1],
points[p0][2],
points[p2][0],
points[p2][1],
points[p2][2],
points[p1][0],
points[p1][1],
points[p1][2] )
def generate_torus( cx, cy, cz, r0, r1, step ):
points = []
num_steps = int(1/step+0.1)
rot_start = 0
rot_stop = num_steps
circ_start = 0
circ_stop = num_steps
for rotation in range(rot_start, rot_stop):
rot = step * rotation
for circle in range(circ_start, circ_stop):
circ = step * circle
x = math.cos(2*math.pi * rot) * (r0 * math.cos(2*math.pi * circ) + r1) + cx;
y = r0 * math.sin(2*math.pi * circ) + cy;
z = -1*math.sin(2*math.pi * rot) * (r0 * math.cos(2*math.pi * circ) + r1) + cz;
points.append([x, y, z])
return points
def add_circle( points, cx, cy, cz, r, step ):
x0 = r + cx
y0 = cy
t = step
while t <= 1.00001:
x1 = r * math.cos(2*math.pi * t) + cx;
y1 = r * math.sin(2*math.pi * t) + cy;
add_edge(points, x0, y0, cz, x1, y1, cz)
x0 = x1
y0 = y1
t+= step
def add_curve( points, x0, y0, x1, y1, x2, y2, x3, y3, step, curve_type ):
xcoefs = generate_curve_coefs(x0, x1, x2, x3, curve_type)[0]
ycoefs = generate_curve_coefs(y0, y1, y2, y3, curve_type)[0]
t = step
while t <= 1.00001:
x = xcoefs[0] * t*t*t + xcoefs[1] * t*t + xcoefs[2] * t + xcoefs[3]
y = ycoefs[0] * t*t*t + ycoefs[1] * t*t + ycoefs[2] * t + ycoefs[3]
add_edge(points, x0, y0, 0, x, y, 0)
x0 = x
y0 = y
t+= step
def draw_lines( matrix, screen, zbuffer, color ):
if len(matrix) < 2:
print 'Need at least 2 points to draw'
return
point = 0
while point < len(matrix) - 1:
draw_line( int(matrix[point][0]),
int(matrix[point][1]),
matrix[point][2],
int(matrix[point+1][0]),
int(matrix[point+1][1]),
matrix[point+1][2],
screen, zbuffer, color)
point+= 2
def add_edge( matrix, x0, y0, z0, x1, y1, z1 ):
add_point(matrix, x0, y0, z0)
add_point(matrix, x1, y1, z1)
def add_point( matrix, x, y, z=0 ):
matrix.append( [x, y, z, 1] )
def draw_line( x0, y0, z0, x1, y1, z1, screen, zbuffer, color ):
#swap points if going right -> left
if x0 > x1:
xt = x0
yt = y0
zt = z0
x0 = x1
y0 = y1
z0 = z1
x1 = xt
y1 = yt
z1 = zt
x = x0
y = y0
z = z0
A = 2 * (y1 - y0)
B = -2 * (x1 - x0)
wide = False
tall = False
if ( abs(x1-x0) >= abs(y1 - y0) ): #octants 1/8
wide = True
loop_start = x
loop_end = x1
dx_east = dx_northeast = 1
dy_east = 0
d_east = A
distance = x1 - x
if ( A > 0 ): #octant 1
d = A + B/2
dy_northeast = 1
d_northeast = A + B
else: #octant 8
d = A - B/2
dy_northeast = -1
d_northeast = A - B
else: #octants 2/7
tall = True
dx_east = 0
dx_northeast = 1
distance = abs(y1 - y)
if ( A > 0 ): #octant 2
d = A/2 + B
dy_east = dy_northeast = 1
d_northeast = A + B
d_east = B
loop_start = y
loop_end = y1
else: #octant 7
d = A/2 - B
dy_east = dy_northeast = -1
d_northeast = A - B
d_east = -1 * B
loop_start = y1
loop_end = y
while ( loop_start < loop_end ):
plot( screen, zbuffer, color, x, y, z )
if ( (wide and ((A > 0 and d > 0) or (A < 0 and d < 0))) or
(tall and ((A > 0 and d < 0) or (A < 0 and d > 0 )))):
x+= dx_northeast
y+= dy_northeast
d+= d_northeast
else:
x+= dx_east
y+= dy_east
d+= d_east
loop_start+= 1
plot( screen, zbuffer, color, x, y, z )
| UTF-8 | Python | false | false | 12,777 | py | 2 | draw.py | 1 | 0.447836 | 0.403538 | 0 | 427 | 28.908665 | 99 |
HongyuHe/leetcode-new-round | 17,746,804,888,419 | 407df985075f4463b8d3da9380e8cb92d1e8aae9 | 2b44fc688cef32f96bfff7c97917788bd219788e | /trees/226_invert_bst.py | fa58af3931f8f6752402f8af6a6cf5d8b91bb633 | [] | no_license | https://github.com/HongyuHe/leetcode-new-round | 536725f77c764f7e32f062a20d8f6eed601e6b57 | 59c4af3b94771deb4f3c4a4fabbdc96c918312c8 | refs/heads/main | 2023-08-26T17:55:29.568825 | 2023-08-25T20:28:30 | 2023-08-25T20:28:30 | 231,279,414 | 7 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # Definition for a binary tree node.
class TreeNode:
def __init__(self, val=0, left=None, right=None):
self.val = val
self.left = left
self.right = right
class Solution:
def invertTree(self, root: Optional[TreeNode]) -> Optional[TreeNode]:
queue = [root]
while queue:
## Pop the next node
node = queue[0]
queue = queue[1:]
if not node: continue
queue.append(node.left)
queue.append(node.right)
## Swape
node.left, node.right = node.right, node.left
return root
# if len(root)==0: return root
# level = 1
# start = 1
# while start < len(root):
# num_nodes = 2**level
# end = min(len(root), start+num_nodes)
# level_set = root[start: end]
# root = root[: start] + reversed(level_set) + root[end: ]
# start = end
# return root | UTF-8 | Python | false | false | 1,044 | py | 108 | 226_invert_bst.py | 107 | 0.476054 | 0.469349 | 0 | 34 | 29.735294 | 73 |
kel89/Schedule-Optimizer | 10,746,008,198,788 | 6bb9a1facd3d55ba2112f42e21119bd0a3317985 | 905e8f033dad1dfd32eed18694b1893a3d6d88d1 | /Requirement.py | 1878c2d307d0258384d08dc7f0a4d9a1d6d277cf | [] | no_license | https://github.com/kel89/Schedule-Optimizer | 1ea076427546ce266475ef117295b7405b7e9a2b | e684c3c893261e3003a33db951ecb3d25a97badc | refs/heads/master | 2020-03-17T23:54:30.027554 | 2018-05-19T14:12:44 | 2018-05-19T14:12:44 | 134,067,277 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # Kenneth Lipke
# 3/27/2018
# Requirement.py
"""
Containts Requirement class, which stores grade level requirements
"""
import tkinter as tk
class MiniRequirement():
"""
Small version of Requirement, just has not tkinter relation so that
it can be pickled
"""
def __init__(self, Req):
"""
Takes in an instance of a Requirement and pulls
the relevant information
"""
self.grade = Req.grade
self.course1 = Req.course1
self.course2 = Req.course2
# Continue to fill in with other Requirement fileds
def create_full(self):
"""
Creates an instance of Requirement, can be used when
no pickling is needed
"""
r = Requirement(self.grade, self.course1, self.course2)
return r
class Requirement():
"""
Class that reprseents a requirement, containts grade, and the one or two classes
that must be taken by students in this grade
"""
def __init__(self, grade, course1, course2=None):
"""
Parameters
----------
grade - grade leve (int) that requirement applies to
course1 - the course name (string) that student must take
course2 - the second 'OR' course the students must take
(None if not included)
"""
self.grade = grade
self.course1 = course1
self.course2 = course2
self.labels = [] # list of all labels for this requirement
def create_label(self, parent, **args):
"""
Creates a tkinter Label attached to the parent widget
that describes the requirement in text
"""
s = "Students in grade " + str(self.grade) + " must take " + \
str(self.course1)
if self.course2 != None:
s += " or " + self.course2
l = tk.Label(parent, text=s, justify = 'left')
l.pack(**args)
self.labels.append(l)
#return self.label
def remove_label(self):
"""
Removes the label (all labels) from wherever it was put
"""
for l in self.labels:
l.destroy()
def describe(self):
"""
Returns string description
"""
s = "Students in grade " + str(self.grade) + " must take " + \
str(self.course1)
if self.course2 != None:
s += " or " + self.course2
return s
| UTF-8 | Python | false | false | 2,058 | py | 12 | Requirement.py | 9 | 0.667153 | 0.654033 | 0 | 93 | 21.107527 | 81 |
rduplain/programming-contests | 10,316,511,467,860 | 29f4cc6900362023533e80c9a3317de3029c5db9 | 662f30b53e92771f411e7a79a9e6cb7647fcbdb8 | /filip/main.py | 2eeac41bf5b917c253791915318082b699a455cf | [] | no_license | https://github.com/rduplain/programming-contests | e4581ce87e703324fac2dddeeac67af2fa62882e | 903fa4722d25831a194abdc97936ca329b2ef410 | refs/heads/master | 2021-10-08T21:50:59.765305 | 2018-12-18T06:29:11 | 2018-12-18T06:29:11 | 108,608,895 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import sys
def flip(s):
return ''.join(reversed(s))
def filip(fd):
x, y = fd.readline().strip().split(' ')
x, y = int(flip(x)), int(flip(y))
return max((x, y))
def main(fd):
print(filip(fd))
if __name__ == '__main__':
main(sys.stdin)
| UTF-8 | Python | false | false | 266 | py | 45 | main.py | 19 | 0.526316 | 0.526316 | 0 | 19 | 13 | 43 |
bissim/FLY-graph | 10,471,130,314,213 | 9455cdd37abe5d8381407f1c07d7a66740f0e7d0 | 508467e2cc9f3172e34f5b965e6f2495b21fa35d | /python/tests/fly/graph/test_graph_connectivity.py | ae6082f8b958d2da912e395d86c932e1e1bb6bf2 | [
"MIT"
] | permissive | https://github.com/bissim/FLY-graph | cfdf9fc25427842f99c4b76a2146315b1f370ca6 | 8bcc3226a525e6abafe64ac6c7b63584a7f8206e | refs/heads/master | 2021-06-23T07:39:05.036245 | 2021-03-13T18:20:03 | 2021-03-13T18:20:03 | 205,849,337 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #! python3
'''
Graph connectivity test
'''
import unittest
from unittest import main
from fly.graph.graph import Graph
from test_graph import GraphTestCase
class GraphConnectivityTest(GraphTestCase):
"""
"""
_TEST_TITLE = "GRAPH CONNECTIVITY TEST"
def test_isConnected(self):
"""
"""
pass
if __name__ == '__main__':
main(verbosity=2)
| UTF-8 | Python | false | false | 384 | py | 61 | test_graph_connectivity.py | 35 | 0.632813 | 0.627604 | 0 | 24 | 15 | 43 |
vanhoefm/krackattacks-scripts | 6,760,278,561,152 | 8cfb863a635a6d27bced7d57ef8abf345b47da56 | 4674b8088ffdf55905d44995f08a0792a3e4cd5c | /tests/hwsim/test_wpas_mesh.py | c3cfb7b0b46e0b10c2feb67c2339e1cd10c27127 | [
"BSD-3-Clause",
"BSD-2-Clause"
] | permissive | https://github.com/vanhoefm/krackattacks-scripts | 41daca791638a92aa4cfa68a582e46119037560e | 4b78669686f74efe664c6543b1b5b1616b22f902 | refs/heads/research | 2022-10-29T20:21:11.512335 | 2022-10-16T18:44:41 | 2022-10-16T18:44:41 | 107,408,514 | 2,184 | 577 | NOASSERTION | false | 2021-07-06T12:43:49 | 2017-10-18T12:58:08 | 2021-07-04T15:45:56 | 2021-07-06T12:43:48 | 17,680 | 3,008 | 753 | 2 | C | false | false | # wpa_supplicant mesh mode tests
# Copyright (c) 2014, cozybit Inc.
#
# This software may be distributed under the terms of the BSD license.
# See README for more details.
import logging
logger = logging.getLogger()
import os
import struct
import subprocess
import time
import hwsim_utils
import hostapd
from wpasupplicant import WpaSupplicant
from utils import HwsimSkip, alloc_fail, fail_test, wait_fail_trigger
from tshark import run_tshark
from test_ap_ht import set_world_reg
from hwsim_utils import set_group_map
def check_mesh_support(dev, secure=False):
if "MESH" not in dev.get_capability("modes"):
raise HwsimSkip("Driver does not support mesh")
if secure and "SAE" not in dev.get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
def check_mesh_scan(dev, params, other_started=False, beacon_int=0):
if not other_started:
dev.dump_monitor()
id = dev.request("SCAN " + params)
if "FAIL" in id:
raise Exception("Failed to start scan")
id = int(id)
if other_started:
ev = dev.wait_event(["CTRL-EVENT-SCAN-STARTED"])
if ev is None:
raise Exception("Other scan did not start")
if "id=" + str(id) in ev:
raise Exception("Own scan id unexpectedly included in start event")
ev = dev.wait_event(["CTRL-EVENT-SCAN-RESULTS"])
if ev is None:
raise Exception("Other scan did not complete")
if "id=" + str(id) in ev:
raise Exception(
"Own scan id unexpectedly included in completed event")
ev = dev.wait_event(["CTRL-EVENT-SCAN-STARTED"])
if ev is None:
raise Exception("Scan did not start")
if "id=" + str(id) not in ev:
raise Exception("Scan id not included in start event")
ev = dev.wait_event(["CTRL-EVENT-SCAN-RESULTS"])
if ev is None:
raise Exception("Scan did not complete")
if "id=" + str(id) not in ev:
raise Exception("Scan id not included in completed event")
res = dev.request("SCAN_RESULTS")
if res.find("[MESH]") < 0:
raise Exception("Scan did not contain a MESH network")
bssid = res.splitlines()[1].split(' ')[0]
bss = dev.get_bss(bssid)
if bss is None:
raise Exception("Could not get BSS entry for mesh")
if 'mesh_capability' not in bss:
raise Exception("mesh_capability missing from BSS entry")
if beacon_int:
if 'beacon_int' not in bss:
raise Exception("beacon_int missing from BSS entry")
if str(beacon_int) != bss['beacon_int']:
raise Exception("Unexpected beacon_int in BSS entry: " + bss['beacon_int'])
if '[MESH]' not in bss['flags']:
raise Exception("BSS output did not include MESH flag")
def check_mesh_group_added(dev):
ev = dev.wait_event(["MESH-GROUP-STARTED"])
if ev is None:
raise Exception("Test exception: Couldn't join mesh")
def check_mesh_group_removed(dev):
ev = dev.wait_event(["MESH-GROUP-REMOVED"])
if ev is None:
raise Exception("Test exception: Couldn't leave mesh")
def check_mesh_peer_connected(dev, timeout=10):
ev = dev.wait_event(["MESH-PEER-CONNECTED"], timeout=timeout)
if ev is None:
raise Exception("Test exception: Remote peer did not connect.")
def check_mesh_peer_disconnected(dev):
ev = dev.wait_event(["MESH-PEER-DISCONNECTED"])
if ev is None:
raise Exception("Test exception: Peer disconnect event not detected.")
def test_wpas_add_set_remove_support(dev):
"""wpa_supplicant MESH add/set/remove network support"""
check_mesh_support(dev[0])
id = dev[0].add_network()
dev[0].set_network(id, "mode", "5")
dev[0].remove_network(id)
def add_open_mesh_network(dev, freq="2412", start=True, beacon_int=0,
basic_rates=None, chwidth=0, disable_vht=False,
disable_ht40=False):
id = dev.add_network()
dev.set_network(id, "mode", "5")
dev.set_network_quoted(id, "ssid", "wpas-mesh-open")
dev.set_network(id, "key_mgmt", "NONE")
if freq:
dev.set_network(id, "frequency", freq)
if chwidth > 0:
dev.set_network(id, "max_oper_chwidth", str(chwidth))
if beacon_int:
dev.set_network(id, "beacon_int", str(beacon_int))
if basic_rates:
dev.set_network(id, "mesh_basic_rates", basic_rates)
if disable_vht:
dev.set_network(id, "disable_vht", "1")
if disable_ht40:
dev.set_network(id, "disable_ht40", "1")
if start:
dev.mesh_group_add(id)
return id
def test_wpas_mesh_group_added(dev):
"""wpa_supplicant MESH group add"""
check_mesh_support(dev[0])
add_open_mesh_network(dev[0])
# Check for MESH-GROUP-STARTED event
check_mesh_group_added(dev[0])
def test_wpas_mesh_group_remove(dev):
"""wpa_supplicant MESH group remove"""
check_mesh_support(dev[0])
add_open_mesh_network(dev[0])
# Check for MESH-GROUP-STARTED event
check_mesh_group_added(dev[0])
dev[0].mesh_group_remove()
# Check for MESH-GROUP-REMOVED event
check_mesh_group_removed(dev[0])
dev[0].mesh_group_remove()
def test_wpas_mesh_peer_connected(dev):
"""wpa_supplicant MESH peer connected"""
check_mesh_support(dev[0])
add_open_mesh_network(dev[0], beacon_int=160)
add_open_mesh_network(dev[1], beacon_int=160)
# Check for mesh joined
check_mesh_group_added(dev[0])
check_mesh_group_added(dev[1])
# Check for peer connected
check_mesh_peer_connected(dev[0])
check_mesh_peer_connected(dev[1])
def test_wpas_mesh_peer_disconnected(dev):
"""wpa_supplicant MESH peer disconnected"""
check_mesh_support(dev[0])
add_open_mesh_network(dev[0])
add_open_mesh_network(dev[1])
# Check for mesh joined
check_mesh_group_added(dev[0])
check_mesh_group_added(dev[1])
# Check for peer connected
check_mesh_peer_connected(dev[0])
check_mesh_peer_connected(dev[1])
# Remove group on dev 1
dev[1].mesh_group_remove()
# Device 0 should get a disconnection event
check_mesh_peer_disconnected(dev[0])
def test_wpas_mesh_mode_scan(dev):
"""wpa_supplicant MESH scan support"""
check_mesh_support(dev[0])
add_open_mesh_network(dev[0])
add_open_mesh_network(dev[1], beacon_int=175)
# Check for mesh joined
check_mesh_group_added(dev[0])
check_mesh_group_added(dev[1])
# Check for Mesh scan
check_mesh_scan(dev[0], "use_id=1 freq=2412", beacon_int=175)
def test_wpas_mesh_open(dev, apdev):
"""wpa_supplicant open MESH network connectivity"""
check_mesh_support(dev[0])
add_open_mesh_network(dev[0], freq="2462", basic_rates="60 120 240")
add_open_mesh_network(dev[1], freq="2462", basic_rates="60 120 240")
# Check for mesh joined
check_mesh_group_added(dev[0])
check_mesh_group_added(dev[1])
# Check for peer connected
check_mesh_peer_connected(dev[0])
check_mesh_peer_connected(dev[1])
# Test connectivity 0->1 and 1->0
hwsim_utils.test_connectivity(dev[0], dev[1])
state = dev[0].get_status_field("wpa_state")
if state != "COMPLETED":
raise Exception("Unexpected wpa_state on dev0: " + state)
state = dev[1].get_status_field("wpa_state")
if state != "COMPLETED":
raise Exception("Unexpected wpa_state on dev1: " + state)
mode = dev[0].get_status_field("mode")
if mode != "mesh":
raise Exception("Unexpected mode: " + mode)
def test_wpas_mesh_open_no_auto(dev, apdev):
"""wpa_supplicant open MESH network connectivity"""
check_mesh_support(dev[0])
id = add_open_mesh_network(dev[0], start=False)
dev[0].set_network(id, "dot11MeshMaxRetries", "16")
dev[0].set_network(id, "dot11MeshRetryTimeout", "255")
dev[0].mesh_group_add(id)
id = add_open_mesh_network(dev[1], start=False)
dev[1].set_network(id, "no_auto_peer", "1")
dev[1].mesh_group_add(id)
# Check for mesh joined
check_mesh_group_added(dev[0])
check_mesh_group_added(dev[1])
# Check for peer connected
check_mesh_peer_connected(dev[0], timeout=30)
check_mesh_peer_connected(dev[1])
# Test connectivity 0->1 and 1->0
hwsim_utils.test_connectivity(dev[0], dev[1])
def test_mesh_open_no_auto2(dev, apdev):
"""Open mesh network connectivity, no_auto on both peers"""
check_mesh_support(dev[0])
id = add_open_mesh_network(dev[0], start=False)
dev[0].set_network(id, "no_auto_peer", "1")
dev[0].mesh_group_add(id)
id = add_open_mesh_network(dev[1], start=False)
dev[1].set_network(id, "no_auto_peer", "1")
dev[1].mesh_group_add(id)
check_mesh_group_added(dev[0])
check_mesh_group_added(dev[1])
ev = dev[0].wait_event(["will not initiate new peer link"], timeout=10)
if ev is None:
raise Exception("Missing no-initiate message")
addr1 = dev[1].own_addr()
if "OK" not in dev[0].request("MESH_PEER_ADD " + addr1):
raise Exception("MESH_PEER_ADD failed")
if "FAIL" not in dev[0].request("MESH_PEER_ADD ff:ff:ff:ff:ff:ff"):
raise Exception("MESH_PEER_ADD with unknown STA succeeded")
check_mesh_peer_connected(dev[0], timeout=30)
check_mesh_peer_connected(dev[1])
if "FAIL" not in dev[0].request("MESH_PEER_ADD " + addr1):
raise Exception("MESH_PEER_ADD succeeded for connected STA")
hwsim_utils.test_connectivity(dev[0], dev[1])
def test_mesh_open_rssi_threshold(dev, apdev):
"""Open mesh network with RSSI threshold"""
check_mesh_support(dev[0])
_test_mesh_open_rssi_threshold(dev, apdev, -255, -255)
_test_mesh_open_rssi_threshold(dev, apdev, 0, 0)
_test_mesh_open_rssi_threshold(dev, apdev, 1, 0)
def _test_mesh_open_rssi_threshold(dev, apdev, value, expected):
id = add_open_mesh_network(dev[0], start=False)
dev[0].set_network(id, "mesh_rssi_threshold", str(value))
dev[0].mesh_group_add(id)
check_mesh_group_added(dev[0])
cmd = subprocess.Popen([ "iw", "dev", dev[0].ifname, "get", "mesh_param",
"mesh_rssi_threshold" ], stdout=subprocess.PIPE)
mesh_rssi_threshold = int(cmd.stdout.read().split(" ")[0])
dev[0].mesh_group_remove()
check_mesh_group_removed(dev[0])
if mesh_rssi_threshold != expected:
raise Exception("mesh_rssi_threshold should be " + str(expected) +
": " + str(mesh_rssi_threshold))
def add_mesh_secure_net(dev, psk=True, pmf=False, pairwise=None, group=None):
id = dev.add_network()
dev.set_network(id, "mode", "5")
dev.set_network_quoted(id, "ssid", "wpas-mesh-sec")
dev.set_network(id, "key_mgmt", "SAE")
dev.set_network(id, "frequency", "2412")
if psk:
dev.set_network_quoted(id, "psk", "thisismypassphrase!")
if pmf:
dev.set_network(id, "ieee80211w", "2")
if pairwise:
dev.set_network(id, "pairwise", pairwise)
if group:
dev.set_network(id, "group", group)
return id
def test_wpas_mesh_secure(dev, apdev):
"""wpa_supplicant secure MESH network connectivity"""
check_mesh_support(dev[0], secure=True)
dev[0].request("SET sae_groups ")
id = add_mesh_secure_net(dev[0])
dev[0].mesh_group_add(id)
dev[1].request("SET sae_groups ")
id = add_mesh_secure_net(dev[1])
dev[1].mesh_group_add(id)
# Check for mesh joined
check_mesh_group_added(dev[0])
check_mesh_group_added(dev[1])
# Check for peer connected
check_mesh_peer_connected(dev[0])
check_mesh_peer_connected(dev[1])
# Test connectivity 0->1 and 1->0
hwsim_utils.test_connectivity(dev[0], dev[1])
state = dev[0].get_status_field("wpa_state")
if state != "COMPLETED":
raise Exception("Unexpected wpa_state on dev0: " + state)
state = dev[1].get_status_field("wpa_state")
if state != "COMPLETED":
raise Exception("Unexpected wpa_state on dev1: " + state)
def test_mesh_secure_pmf(dev, apdev):
"""Secure mesh network connectivity with PMF enabled"""
check_mesh_support(dev[0], secure=True)
dev[0].request("SET sae_groups ")
id = add_mesh_secure_net(dev[0], pmf=True)
dev[0].mesh_group_add(id)
dev[1].request("SET sae_groups ")
id = add_mesh_secure_net(dev[1], pmf=True)
dev[1].mesh_group_add(id)
# Check for mesh joined
check_mesh_group_added(dev[0])
check_mesh_group_added(dev[1])
# Check for peer connected
check_mesh_peer_connected(dev[0])
check_mesh_peer_connected(dev[1])
# Test connectivity 0->1 and 1->0
hwsim_utils.test_connectivity(dev[0], dev[1])
def run_mesh_secure(dev, cipher):
if cipher not in dev[0].get_capability("pairwise"):
raise HwsimSkip("Cipher %s not supported" % cipher)
check_mesh_support(dev[0], secure=True)
dev[0].request("SET sae_groups ")
id = add_mesh_secure_net(dev[0], pairwise=cipher, group=cipher)
dev[0].mesh_group_add(id)
dev[1].request("SET sae_groups ")
id = add_mesh_secure_net(dev[1], pairwise=cipher, group=cipher)
dev[1].mesh_group_add(id)
# Check for mesh joined
check_mesh_group_added(dev[0])
check_mesh_group_added(dev[1])
# Check for peer connected
check_mesh_peer_connected(dev[0])
check_mesh_peer_connected(dev[1])
# Test connectivity 0->1 and 1->0
hwsim_utils.test_connectivity(dev[0], dev[1])
def test_mesh_secure_ccmp(dev, apdev):
"""Secure mesh with CCMP"""
run_mesh_secure(dev, "CCMP")
def test_mesh_secure_gcmp(dev, apdev):
"""Secure mesh with GCMP"""
run_mesh_secure(dev, "GCMP")
def test_mesh_secure_gcmp_256(dev, apdev):
"""Secure mesh with GCMP-256"""
run_mesh_secure(dev, "GCMP-256")
def test_mesh_secure_ccmp_256(dev, apdev):
"""Secure mesh with CCMP-256"""
run_mesh_secure(dev, "CCMP-256")
def test_mesh_secure_invalid_pairwise_cipher(dev, apdev):
"""Secure mesh and invalid group cipher"""
check_mesh_support(dev[0], secure=True)
dev[0].request("SET sae_groups ")
id = add_mesh_secure_net(dev[0], pairwise="TKIP", group="CCMP")
if dev[0].mesh_group_add(id) != None:
raise Exception("Unexpected group add success")
ev = dev[0].wait_event(["mesh: Invalid pairwise cipher"], timeout=1)
if ev is None:
raise Exception("Invalid pairwise cipher not reported")
def test_mesh_secure_invalid_group_cipher(dev, apdev):
"""Secure mesh and invalid group cipher"""
check_mesh_support(dev[0], secure=True)
dev[0].request("SET sae_groups ")
id = add_mesh_secure_net(dev[0], pairwise="CCMP", group="TKIP")
if dev[0].mesh_group_add(id) != None:
raise Exception("Unexpected group add success")
ev = dev[0].wait_event(["mesh: Invalid group cipher"], timeout=1)
if ev is None:
raise Exception("Invalid group cipher not reported")
def test_wpas_mesh_secure_sae_group_mismatch(dev, apdev):
"""wpa_supplicant secure MESH and SAE group mismatch"""
check_mesh_support(dev[0], secure=True)
addr0 = dev[0].p2p_interface_addr()
addr1 = dev[1].p2p_interface_addr()
addr2 = dev[2].p2p_interface_addr()
dev[0].request("SET sae_groups 19 25")
id = add_mesh_secure_net(dev[0])
dev[0].mesh_group_add(id)
dev[1].request("SET sae_groups 19")
id = add_mesh_secure_net(dev[1])
dev[1].mesh_group_add(id)
dev[2].request("SET sae_groups 26")
id = add_mesh_secure_net(dev[2])
dev[2].mesh_group_add(id)
check_mesh_group_added(dev[0])
check_mesh_group_added(dev[1])
check_mesh_group_added(dev[2])
ev = dev[0].wait_event(["MESH-PEER-CONNECTED"])
if ev is None:
raise Exception("Remote peer did not connect")
if addr1 not in ev:
raise Exception("Unexpected peer connected: " + ev)
ev = dev[1].wait_event(["MESH-PEER-CONNECTED"])
if ev is None:
raise Exception("Remote peer did not connect")
if addr0 not in ev:
raise Exception("Unexpected peer connected: " + ev)
ev = dev[2].wait_event(["MESH-PEER-CONNECTED"], timeout=1)
if ev is not None:
raise Exception("Unexpected peer connection at dev[2]: " + ev)
ev = dev[0].wait_event(["MESH-PEER-CONNECTED"], timeout=0.1)
if ev is not None:
raise Exception("Unexpected peer connection: " + ev)
ev = dev[1].wait_event(["MESH-PEER-CONNECTED"], timeout=0.1)
if ev is not None:
raise Exception("Unexpected peer connection: " + ev)
dev[0].request("SET sae_groups ")
dev[1].request("SET sae_groups ")
dev[2].request("SET sae_groups ")
def test_wpas_mesh_secure_sae_group_negotiation(dev, apdev):
"""wpa_supplicant secure MESH and SAE group negotiation"""
check_mesh_support(dev[0], secure=True)
addr0 = dev[0].own_addr()
addr1 = dev[1].own_addr()
#dev[0].request("SET sae_groups 21 20 25 26")
dev[0].request("SET sae_groups 26")
id = add_mesh_secure_net(dev[0])
dev[0].mesh_group_add(id)
dev[1].request("SET sae_groups 19 26")
id = add_mesh_secure_net(dev[1])
dev[1].mesh_group_add(id)
check_mesh_group_added(dev[0])
check_mesh_group_added(dev[1])
check_mesh_peer_connected(dev[0])
check_mesh_peer_connected(dev[1])
dev[0].request("SET sae_groups ")
dev[1].request("SET sae_groups ")
def test_wpas_mesh_secure_sae_missing_password(dev, apdev):
"""wpa_supplicant secure MESH and missing SAE password"""
check_mesh_support(dev[0], secure=True)
id = add_mesh_secure_net(dev[0], psk=False)
dev[0].set_network(id, "psk", "8f20b381f9b84371d61b5080ad85cac3c61ab3ca9525be5b2d0f4da3d979187a")
dev[0].mesh_group_add(id)
ev = dev[0].wait_event(["MESH-GROUP-STARTED", "Could not join mesh"],
timeout=5)
if ev is None:
raise Exception("Timeout on mesh start event")
if "MESH-GROUP-STARTED" in ev:
raise Exception("Unexpected mesh group start")
ev = dev[0].wait_event(["MESH-GROUP-STARTED"], timeout=0.1)
if ev is not None:
raise Exception("Unexpected mesh group start")
def test_wpas_mesh_secure_no_auto(dev, apdev):
"""wpa_supplicant secure MESH network connectivity"""
check_mesh_support(dev[0], secure=True)
dev[0].request("SET sae_groups 19")
id = add_mesh_secure_net(dev[0])
dev[0].mesh_group_add(id)
dev[1].request("SET sae_groups 19")
id = add_mesh_secure_net(dev[1])
dev[1].set_network(id, "no_auto_peer", "1")
dev[1].mesh_group_add(id)
# Check for mesh joined
check_mesh_group_added(dev[0])
check_mesh_group_added(dev[1])
# Check for peer connected
check_mesh_peer_connected(dev[0], timeout=30)
check_mesh_peer_connected(dev[1])
# Test connectivity 0->1 and 1->0
hwsim_utils.test_connectivity(dev[0], dev[1])
dev[0].request("SET sae_groups ")
dev[1].request("SET sae_groups ")
def test_wpas_mesh_secure_dropped_frame(dev, apdev):
"""Secure mesh network connectivity when the first plink Open is dropped"""
check_mesh_support(dev[0], secure=True)
dev[0].request("SET ext_mgmt_frame_handling 1")
dev[0].request("SET sae_groups ")
id = add_mesh_secure_net(dev[0])
dev[0].mesh_group_add(id)
dev[1].request("SET sae_groups ")
id = add_mesh_secure_net(dev[1])
dev[1].mesh_group_add(id)
# Check for mesh joined
check_mesh_group_added(dev[0])
check_mesh_group_added(dev[1])
# Drop the first Action frame (plink Open) to test unexpected order of
# Confirm/Open messages.
count = 0
while True:
count += 1
if count > 10:
raise Exception("Did not see Action frames")
rx_msg = dev[0].mgmt_rx()
if rx_msg is None:
raise Exception("MGMT-RX timeout")
if rx_msg['subtype'] == 13:
logger.info("Drop the first Action frame")
break
if "OK" not in dev[0].request("MGMT_RX_PROCESS freq={} datarate={} ssi_signal={} frame={}".format(rx_msg['freq'], rx_msg['datarate'], rx_msg['ssi_signal'], rx_msg['frame'].encode('hex'))):
raise Exception("MGMT_RX_PROCESS failed")
dev[0].request("SET ext_mgmt_frame_handling 0")
# Check for peer connected
check_mesh_peer_connected(dev[0])
check_mesh_peer_connected(dev[1])
# Test connectivity 0->1 and 1->0
hwsim_utils.test_connectivity(dev[0], dev[1])
def test_mesh_secure_fail(dev, apdev):
"""Secure mesh network connectivity failure"""
check_mesh_support(dev[0], secure=True)
dev[0].request("SET sae_groups ")
id = add_mesh_secure_net(dev[0], pmf=True)
dev[0].mesh_group_add(id)
dev[1].request("SET sae_groups ")
id = add_mesh_secure_net(dev[1], pmf=True)
with fail_test(dev[0], 1, "wpa_driver_nl80211_sta_add;mesh_mpm_auth_peer"):
dev[1].mesh_group_add(id)
check_mesh_group_added(dev[0])
check_mesh_group_added(dev[1])
check_mesh_peer_connected(dev[0])
check_mesh_peer_connected(dev[1])
def test_wpas_mesh_ctrl(dev):
"""wpa_supplicant ctrl_iface mesh command error cases"""
check_mesh_support(dev[0])
if "FAIL" not in dev[0].request("MESH_GROUP_ADD 123"):
raise Exception("Unexpected MESH_GROUP_ADD success")
id = dev[0].add_network()
if "FAIL" not in dev[0].request("MESH_GROUP_ADD %d" % id):
raise Exception("Unexpected MESH_GROUP_ADD success")
dev[0].set_network(id, "mode", "5")
dev[0].set_network(id, "key_mgmt", "WPA-PSK")
if "FAIL" not in dev[0].request("MESH_GROUP_ADD %d" % id):
raise Exception("Unexpected MESH_GROUP_ADD success")
if "FAIL" not in dev[0].request("MESH_GROUP_REMOVE foo"):
raise Exception("Unexpected MESH_GROUP_REMOVE success")
def test_wpas_mesh_dynamic_interface(dev):
"""wpa_supplicant mesh with dynamic interface"""
check_mesh_support(dev[0])
mesh0 = None
mesh1 = None
try:
mesh0 = dev[0].request("MESH_INTERFACE_ADD ifname=mesh0")
if "FAIL" in mesh0:
raise Exception("MESH_INTERFACE_ADD failed")
mesh1 = dev[1].request("MESH_INTERFACE_ADD")
if "FAIL" in mesh1:
raise Exception("MESH_INTERFACE_ADD failed")
wpas0 = WpaSupplicant(ifname=mesh0)
wpas1 = WpaSupplicant(ifname=mesh1)
logger.info(mesh0 + " address " + wpas0.get_status_field("address"))
logger.info(mesh1 + " address " + wpas1.get_status_field("address"))
add_open_mesh_network(wpas0)
add_open_mesh_network(wpas1)
check_mesh_group_added(wpas0)
check_mesh_group_added(wpas1)
check_mesh_peer_connected(wpas0)
check_mesh_peer_connected(wpas1)
hwsim_utils.test_connectivity(wpas0, wpas1)
# Must not allow MESH_GROUP_REMOVE on dynamic interface
if "FAIL" not in wpas0.request("MESH_GROUP_REMOVE " + mesh0):
raise Exception("Invalid MESH_GROUP_REMOVE accepted")
if "FAIL" not in wpas1.request("MESH_GROUP_REMOVE " + mesh1):
raise Exception("Invalid MESH_GROUP_REMOVE accepted")
# Must not allow MESH_GROUP_REMOVE on another radio interface
if "FAIL" not in wpas0.request("MESH_GROUP_REMOVE " + mesh1):
raise Exception("Invalid MESH_GROUP_REMOVE accepted")
if "FAIL" not in wpas1.request("MESH_GROUP_REMOVE " + mesh0):
raise Exception("Invalid MESH_GROUP_REMOVE accepted")
wpas0.remove_ifname()
wpas1.remove_ifname()
if "OK" not in dev[0].request("MESH_GROUP_REMOVE " + mesh0):
raise Exception("MESH_GROUP_REMOVE failed")
if "OK" not in dev[1].request("MESH_GROUP_REMOVE " + mesh1):
raise Exception("MESH_GROUP_REMOVE failed")
if "FAIL" not in dev[0].request("MESH_GROUP_REMOVE " + mesh0):
raise Exception("Invalid MESH_GROUP_REMOVE accepted")
if "FAIL" not in dev[1].request("MESH_GROUP_REMOVE " + mesh1):
raise Exception("Invalid MESH_GROUP_REMOVE accepted")
logger.info("Make sure another dynamic group can be added")
mesh0 = dev[0].request("MESH_INTERFACE_ADD ifname=mesh0")
if "FAIL" in mesh0:
raise Exception("MESH_INTERFACE_ADD failed")
mesh1 = dev[1].request("MESH_INTERFACE_ADD")
if "FAIL" in mesh1:
raise Exception("MESH_INTERFACE_ADD failed")
wpas0 = WpaSupplicant(ifname=mesh0)
wpas1 = WpaSupplicant(ifname=mesh1)
logger.info(mesh0 + " address " + wpas0.get_status_field("address"))
logger.info(mesh1 + " address " + wpas1.get_status_field("address"))
add_open_mesh_network(wpas0)
add_open_mesh_network(wpas1)
check_mesh_group_added(wpas0)
check_mesh_group_added(wpas1)
check_mesh_peer_connected(wpas0)
check_mesh_peer_connected(wpas1)
hwsim_utils.test_connectivity(wpas0, wpas1)
finally:
if mesh0:
dev[0].request("MESH_GROUP_REMOVE " + mesh0)
if mesh1:
dev[1].request("MESH_GROUP_REMOVE " + mesh1)
def test_wpas_mesh_dynamic_interface_remove(dev):
"""wpa_supplicant mesh with dynamic interface and removal"""
wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5')
wpas.interface_add("wlan5")
check_mesh_support(wpas)
mesh5 = wpas.request("MESH_INTERFACE_ADD ifname=mesh5")
if "FAIL" in mesh5:
raise Exception("MESH_INTERFACE_ADD failed")
wpas5 = WpaSupplicant(ifname=mesh5)
logger.info(mesh5 + " address " + wpas5.get_status_field("address"))
add_open_mesh_network(wpas5)
add_open_mesh_network(dev[0])
check_mesh_group_added(wpas5)
check_mesh_group_added(dev[0])
check_mesh_peer_connected(wpas5)
check_mesh_peer_connected(dev[0])
hwsim_utils.test_connectivity(wpas5, dev[0])
# Remove the main interface while mesh interface is in use
wpas.interface_remove("wlan5")
def test_wpas_mesh_max_peering(dev, apdev, params):
"""Mesh max peering limit"""
check_mesh_support(dev[0])
try:
dev[0].request("SET max_peer_links 1")
# first, connect dev[0] and dev[1]
add_open_mesh_network(dev[0])
add_open_mesh_network(dev[1])
for i in range(2):
ev = dev[i].wait_event(["MESH-PEER-CONNECTED"])
if ev is None:
raise Exception("dev%d did not connect with any peer" % i)
# add dev[2] which will try to connect with both dev[0] and dev[1],
# but can complete connection only with dev[1]
add_open_mesh_network(dev[2])
for i in range(1, 3):
ev = dev[i].wait_event(["MESH-PEER-CONNECTED"])
if ev is None:
raise Exception("dev%d did not connect the second peer" % i)
ev = dev[0].wait_event(["MESH-PEER-CONNECTED"], timeout=1)
if ev is not None:
raise Exception("dev0 connection beyond max peering limit")
ev = dev[2].wait_event(["MESH-PEER-CONNECTED"], timeout=0.1)
if ev is not None:
raise Exception("dev2 reported unexpected peering: " + ev)
for i in range(3):
dev[i].mesh_group_remove()
check_mesh_group_removed(dev[i])
finally:
dev[0].request("SET max_peer_links 99")
addr0 = dev[0].own_addr()
addr1 = dev[1].own_addr()
addr2 = dev[2].own_addr()
capfile = os.path.join(params['logdir'], "hwsim0.pcapng")
filt = "wlan.fc.type_subtype == 8"
out = run_tshark(capfile, filt, [ "wlan.sa", "wlan.mesh.config.cap" ])
pkts = out.splitlines()
one = [ 0, 0, 0 ]
zero = [ 0, 0, 0 ]
for pkt in pkts:
addr, cap = pkt.split('\t')
cap = int(cap, 16)
if addr == addr0:
idx = 0
elif addr == addr1:
idx = 1
elif addr == addr2:
idx = 2
else:
continue
if cap & 0x01:
one[idx] += 1
else:
zero[idx] += 1
logger.info("one: " + str(one))
logger.info("zero: " + str(zero))
if zero[0] == 0:
raise Exception("Accepting Additional Mesh Peerings not cleared")
if one[0] == 0:
raise Exception("Accepting Additional Mesh Peerings was not set in the first Beacon frame")
if zero[1] > 0 or zero[2] > 0 or one[1] == 0 or one[2] == 0:
raise Exception("Unexpected value in Accepting Additional Mesh Peerings from other STAs")
def test_wpas_mesh_open_5ghz(dev, apdev):
"""wpa_supplicant open MESH network on 5 GHz band"""
try:
_test_wpas_mesh_open_5ghz(dev, apdev)
finally:
dev[0].request("MESH_GROUP_REMOVE " + dev[0].ifname)
dev[1].request("MESH_GROUP_REMOVE " + dev[1].ifname)
subprocess.call(['iw', 'reg', 'set', '00'])
dev[0].flush_scan_cache()
dev[1].flush_scan_cache()
def _test_wpas_mesh_open_5ghz(dev, apdev):
check_mesh_support(dev[0])
subprocess.call(['iw', 'reg', 'set', 'US'])
for i in range(2):
for j in range(5):
ev = dev[i].wait_event(["CTRL-EVENT-REGDOM-CHANGE"], timeout=5)
if ev is None:
raise Exception("No regdom change event")
if "alpha2=US" in ev:
break
add_open_mesh_network(dev[i], freq="5180")
# Check for mesh joined
check_mesh_group_added(dev[0])
check_mesh_group_added(dev[1])
# Check for peer connected
check_mesh_peer_connected(dev[0])
check_mesh_peer_connected(dev[1])
# Test connectivity 0->1 and 1->0
hwsim_utils.test_connectivity(dev[0], dev[1])
dev[0].mesh_group_remove()
dev[1].mesh_group_remove()
check_mesh_group_removed(dev[0])
check_mesh_group_removed(dev[1])
dev[0].dump_monitor()
dev[1].dump_monitor()
def test_wpas_mesh_open_5ghz_coex(dev, apdev):
"""Mesh network on 5 GHz band and 20/40 coex change"""
try:
_test_wpas_mesh_open_5ghz_coex(dev, apdev)
finally:
dev[0].request("MESH_GROUP_REMOVE " + dev[0].ifname)
dev[1].request("MESH_GROUP_REMOVE " + dev[1].ifname)
set_world_reg(apdev0=apdev[0], dev0=dev[0])
dev[0].flush_scan_cache()
dev[1].flush_scan_cache()
def _test_wpas_mesh_open_5ghz_coex(dev, apdev):
check_mesh_support(dev[0])
subprocess.call(['iw', 'reg', 'set', 'US'])
# Start a 20 MHz BSS on channel 40 that would be the secondary channel of
# HT40+ mesh on channel 36.
params = { "ssid": "test-ht40",
"hw_mode": "a",
"channel": "40",
"country_code": "US" }
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
for i in range(2):
for j in range(5):
ev = dev[i].wait_event(["CTRL-EVENT-REGDOM-CHANGE"], timeout=5)
if ev is None:
raise Exception("No regdom change event")
if "alpha2=US" in ev:
break
dev[i].scan_for_bss(bssid, freq=5200)
add_open_mesh_network(dev[i], freq="5180")
check_mesh_group_added(dev[0])
check_mesh_group_added(dev[1])
check_mesh_peer_connected(dev[0])
check_mesh_peer_connected(dev[1])
freq = dev[0].get_status_field("freq")
if freq != "5200":
raise Exception("Unexpected STATUS freq=" + freq)
sig = dev[0].request("SIGNAL_POLL").splitlines()
if "FREQUENCY=5200" not in sig:
raise Exception("Unexpected SIGNAL_POLL output: " + str(sig))
hapd.disable()
dev[0].mesh_group_remove()
dev[1].mesh_group_remove()
check_mesh_group_removed(dev[0])
check_mesh_group_removed(dev[1])
dev[0].dump_monitor()
dev[1].dump_monitor()
def test_wpas_mesh_open_ht40(dev, apdev):
"""Mesh and HT40 support difference"""
try:
_test_wpas_mesh_open_ht40(dev, apdev)
finally:
dev[0].request("MESH_GROUP_REMOVE " + dev[0].ifname)
dev[1].request("MESH_GROUP_REMOVE " + dev[1].ifname)
dev[2].request("MESH_GROUP_REMOVE " + dev[2].ifname)
subprocess.call(['iw', 'reg', 'set', '00'])
dev[0].flush_scan_cache()
dev[1].flush_scan_cache()
dev[2].flush_scan_cache()
def _test_wpas_mesh_open_ht40(dev, apdev):
check_mesh_support(dev[0])
subprocess.call(['iw', 'reg', 'set', 'US'])
for i in range(3):
for j in range(5):
ev = dev[i].wait_event(["CTRL-EVENT-REGDOM-CHANGE"], timeout=5)
if ev is None:
raise Exception("No regdom change event")
if "alpha2=US" in ev:
break
add_open_mesh_network(dev[i], freq="5180", disable_vht=True,
disable_ht40=(i == 2))
check_mesh_group_added(dev[0])
check_mesh_group_added(dev[1])
check_mesh_group_added(dev[2])
check_mesh_peer_connected(dev[0])
check_mesh_peer_connected(dev[1])
check_mesh_peer_connected(dev[2])
hwsim_utils.test_connectivity(dev[0], dev[1])
hwsim_utils.test_connectivity(dev[0], dev[2])
hwsim_utils.test_connectivity(dev[1], dev[2])
dev[0].mesh_group_remove()
dev[1].mesh_group_remove()
dev[2].mesh_group_remove()
check_mesh_group_removed(dev[0])
check_mesh_group_removed(dev[1])
check_mesh_group_removed(dev[2])
dev[0].dump_monitor()
dev[1].dump_monitor()
dev[2].dump_monitor()
def test_wpas_mesh_open_vht_80p80(dev, apdev):
"""wpa_supplicant open MESH network on VHT 80+80 MHz channel"""
try:
_test_wpas_mesh_open_vht_80p80(dev, apdev)
finally:
dev[0].request("MESH_GROUP_REMOVE " + dev[0].ifname)
dev[1].request("MESH_GROUP_REMOVE " + dev[1].ifname)
subprocess.call(['iw', 'reg', 'set', '00'])
dev[0].flush_scan_cache()
dev[1].flush_scan_cache()
def _test_wpas_mesh_open_vht_80p80(dev, apdev):
check_mesh_support(dev[0])
subprocess.call(['iw', 'reg', 'set', 'US'])
for i in range(2):
for j in range(5):
ev = dev[i].wait_event(["CTRL-EVENT-REGDOM-CHANGE"], timeout=5)
if ev is None:
raise Exception("No regdom change event")
if "alpha2=US" in ev:
break
add_open_mesh_network(dev[i], freq="5180", chwidth=3)
# Check for mesh joined
check_mesh_group_added(dev[0])
check_mesh_group_added(dev[1])
# Check for peer connected
check_mesh_peer_connected(dev[0])
check_mesh_peer_connected(dev[1])
# Test connectivity 0->1 and 1->0
hwsim_utils.test_connectivity(dev[0], dev[1])
sig = dev[0].request("SIGNAL_POLL").splitlines()
if "WIDTH=80+80 MHz" not in sig:
raise Exception("Unexpected SIGNAL_POLL value(2): " + str(sig))
if "CENTER_FRQ1=5210" not in sig:
raise Exception("Unexpected SIGNAL_POLL value(3): " + str(sig))
if "CENTER_FRQ2=5775" not in sig:
raise Exception("Unexpected SIGNAL_POLL value(4): " + str(sig))
sig = dev[1].request("SIGNAL_POLL").splitlines()
if "WIDTH=80+80 MHz" not in sig:
raise Exception("Unexpected SIGNAL_POLL value(2b): " + str(sig))
if "CENTER_FRQ1=5210" not in sig:
raise Exception("Unexpected SIGNAL_POLL value(3b): " + str(sig))
if "CENTER_FRQ2=5775" not in sig:
raise Exception("Unexpected SIGNAL_POLL value(4b): " + str(sig))
dev[0].mesh_group_remove()
dev[1].mesh_group_remove()
check_mesh_group_removed(dev[0])
check_mesh_group_removed(dev[1])
dev[0].dump_monitor()
dev[1].dump_monitor()
def test_mesh_open_vht_160(dev, apdev):
"""Open mesh network on VHT 160 MHz channel"""
try:
_test_mesh_open_vht_160(dev, apdev)
finally:
dev[0].request("MESH_GROUP_REMOVE " + dev[0].ifname)
dev[1].request("MESH_GROUP_REMOVE " + dev[1].ifname)
subprocess.call(['iw', 'reg', 'set', '00'])
dev[0].flush_scan_cache()
dev[1].flush_scan_cache()
def _test_mesh_open_vht_160(dev, apdev):
check_mesh_support(dev[0])
subprocess.call(['iw', 'reg', 'set', 'ZA'])
for i in range(2):
for j in range(5):
ev = dev[i].wait_event(["CTRL-EVENT-REGDOM-CHANGE"], timeout=5)
if ev is None:
raise Exception("No regdom change event")
if "alpha2=ZA" in ev:
break
cmd = subprocess.Popen(["iw", "reg", "get"], stdout=subprocess.PIPE)
reg = cmd.stdout.read()
found = False
for entry in reg.splitlines():
if "@ 160)" in entry and "DFS" not in entry:
found = True
break
if not found:
raise HwsimSkip("160 MHz channel without DFS not supported in regulatory information")
add_open_mesh_network(dev[i], freq="5520", chwidth=2)
# Check for mesh joined
check_mesh_group_added(dev[0])
check_mesh_group_added(dev[1])
# Check for peer connected
check_mesh_peer_connected(dev[0])
check_mesh_peer_connected(dev[1])
dev[0].dump_monitor()
dev[1].dump_monitor()
# Test connectivity 0->1 and 1->0
hwsim_utils.test_connectivity(dev[0], dev[1])
sig = dev[0].request("SIGNAL_POLL").splitlines()
if "WIDTH=160 MHz" not in sig:
raise Exception("Unexpected SIGNAL_POLL value(2): " + str(sig))
if "FREQUENCY=5520" not in sig:
raise Exception("Unexpected SIGNAL_POLL value(3): " + str(sig))
sig = dev[1].request("SIGNAL_POLL").splitlines()
if "WIDTH=160 MHz" not in sig:
raise Exception("Unexpected SIGNAL_POLL value(2b): " + str(sig))
if "FREQUENCY=5520" not in sig:
raise Exception("Unexpected SIGNAL_POLL value(3b): " + str(sig))
dev[0].mesh_group_remove()
dev[1].mesh_group_remove()
check_mesh_group_removed(dev[0])
check_mesh_group_removed(dev[1])
dev[0].dump_monitor()
dev[1].dump_monitor()
def test_wpas_mesh_password_mismatch(dev, apdev):
"""Mesh network and one device with mismatching password"""
check_mesh_support(dev[0], secure=True)
dev[0].request("SET sae_groups ")
id = add_mesh_secure_net(dev[0])
dev[0].mesh_group_add(id)
dev[1].request("SET sae_groups ")
id = add_mesh_secure_net(dev[1])
dev[1].mesh_group_add(id)
dev[2].request("SET sae_groups ")
id = add_mesh_secure_net(dev[2])
dev[2].set_network_quoted(id, "psk", "wrong password")
dev[2].mesh_group_add(id)
# The two peers with matching password need to be able to connect
check_mesh_group_added(dev[0])
check_mesh_group_added(dev[1])
check_mesh_peer_connected(dev[0])
check_mesh_peer_connected(dev[1])
ev = dev[2].wait_event(["MESH-SAE-AUTH-FAILURE"], timeout=20)
if ev is None:
raise Exception("dev2 did not report auth failure (1)")
ev = dev[2].wait_event(["MESH-SAE-AUTH-FAILURE"], timeout=20)
if ev is None:
raise Exception("dev2 did not report auth failure (2)")
dev[2].dump_monitor()
count = 0
ev = dev[0].wait_event(["MESH-SAE-AUTH-FAILURE"], timeout=5)
if ev is None:
logger.info("dev0 did not report auth failure")
else:
if "addr=" + dev[2].own_addr() not in ev:
raise Exception("Unexpected peer address in dev0 event: " + ev)
count += 1
dev[0].dump_monitor()
ev = dev[1].wait_event(["MESH-SAE-AUTH-FAILURE"], timeout=5)
if ev is None:
logger.info("dev1 did not report auth failure")
else:
if "addr=" + dev[2].own_addr() not in ev:
raise Exception("Unexpected peer address in dev1 event: " + ev)
count += 1
dev[1].dump_monitor()
hwsim_utils.test_connectivity(dev[0], dev[1])
for i in range(2):
try:
hwsim_utils.test_connectivity(dev[i], dev[2], timeout=1)
raise Exception("Data connectivity test passed unexpectedly")
except Exception, e:
if "data delivery failed" not in str(e):
raise
if count == 0:
raise Exception("Neither dev0 nor dev1 reported auth failure")
def test_wpas_mesh_password_mismatch_retry(dev, apdev, params):
"""Mesh password mismatch and retry [long]"""
if not params['long']:
raise HwsimSkip("Skip test case with long duration due to --long not specified")
check_mesh_support(dev[0], secure=True)
dev[0].request("SET sae_groups ")
id = add_mesh_secure_net(dev[0])
dev[0].mesh_group_add(id)
dev[1].request("SET sae_groups ")
id = add_mesh_secure_net(dev[1])
dev[1].set_network_quoted(id, "psk", "wrong password")
dev[1].mesh_group_add(id)
# Check for mesh joined
check_mesh_group_added(dev[0])
check_mesh_group_added(dev[1])
for i in range(4):
ev = dev[0].wait_event(["MESH-SAE-AUTH-FAILURE"], timeout=20)
if ev is None:
raise Exception("dev0 did not report auth failure (%d)" % i)
ev = dev[1].wait_event(["MESH-SAE-AUTH-FAILURE"], timeout=20)
if ev is None:
raise Exception("dev1 did not report auth failure (%d)" % i)
ev = dev[0].wait_event(["MESH-SAE-AUTH-BLOCKED"], timeout=10)
if ev is None:
raise Exception("dev0 did not report auth blocked")
ev = dev[1].wait_event(["MESH-SAE-AUTH-BLOCKED"], timeout=10)
if ev is None:
raise Exception("dev1 did not report auth blocked")
def test_mesh_wpa_auth_init_oom(dev, apdev):
"""Secure mesh network setup failing due to wpa_init() OOM"""
check_mesh_support(dev[0], secure=True)
dev[0].request("SET sae_groups ")
with alloc_fail(dev[0], 1, "wpa_init"):
id = add_mesh_secure_net(dev[0])
dev[0].mesh_group_add(id)
ev = dev[0].wait_event(["MESH-GROUP-STARTED"], timeout=0.2)
if ev is not None:
raise Exception("Unexpected mesh group start during OOM")
def test_mesh_wpa_init_fail(dev, apdev):
"""Secure mesh network setup local failure"""
check_mesh_support(dev[0], secure=True)
dev[0].request("SET sae_groups ")
with fail_test(dev[0], 1, "os_get_random;=__mesh_rsn_auth_init"):
id = add_mesh_secure_net(dev[0])
dev[0].mesh_group_add(id)
wait_fail_trigger(dev[0], "GET_FAIL")
dev[0].dump_monitor()
with alloc_fail(dev[0], 1, "mesh_rsn_auth_init"):
id = add_mesh_secure_net(dev[0])
dev[0].mesh_group_add(id)
wait_fail_trigger(dev[0], "GET_ALLOC_FAIL")
dev[0].dump_monitor()
with fail_test(dev[0], 1, "os_get_random;mesh_rsn_init_ampe_sta"):
id = add_mesh_secure_net(dev[0])
dev[0].mesh_group_add(id)
dev[1].request("SET sae_groups ")
id = add_mesh_secure_net(dev[1])
dev[1].mesh_group_add(id)
wait_fail_trigger(dev[0], "GET_FAIL")
def test_wpas_mesh_reconnect(dev, apdev):
"""Secure mesh network plink counting during reconnection"""
check_mesh_support(dev[0])
try:
_test_wpas_mesh_reconnect(dev)
finally:
dev[0].request("SET max_peer_links 99")
def _test_wpas_mesh_reconnect(dev):
dev[0].request("SET max_peer_links 2")
dev[0].request("SET sae_groups ")
id = add_mesh_secure_net(dev[0])
dev[0].set_network(id, "beacon_int", "100")
dev[0].mesh_group_add(id)
dev[1].request("SET sae_groups ")
id = add_mesh_secure_net(dev[1])
dev[1].mesh_group_add(id)
check_mesh_group_added(dev[0])
check_mesh_group_added(dev[1])
check_mesh_peer_connected(dev[0])
check_mesh_peer_connected(dev[1])
for i in range(3):
# Drop incoming management frames to avoid handling link close
dev[0].request("SET ext_mgmt_frame_handling 1")
dev[1].mesh_group_remove()
check_mesh_group_removed(dev[1])
dev[1].request("FLUSH")
dev[0].request("SET ext_mgmt_frame_handling 0")
id = add_mesh_secure_net(dev[1])
dev[1].mesh_group_add(id)
check_mesh_group_added(dev[1])
check_mesh_peer_connected(dev[1])
dev[0].dump_monitor()
dev[1].dump_monitor()
def test_wpas_mesh_gate_forwarding(dev, apdev, p):
"""Mesh forwards traffic to unknown sta to mesh gates"""
addr0 = dev[0].own_addr()
addr1 = dev[1].own_addr()
addr2 = dev[2].own_addr()
external_sta = '02:11:22:33:44:55'
# start 3 node connected mesh
check_mesh_support(dev[0])
for i in range(3):
add_open_mesh_network(dev[i])
check_mesh_group_added(dev[i])
for i in range(3):
check_mesh_peer_connected(dev[i])
hwsim_utils.test_connectivity(dev[0], dev[1])
hwsim_utils.test_connectivity(dev[1], dev[2])
hwsim_utils.test_connectivity(dev[0], dev[2])
# dev0 and dev1 are mesh gates
subprocess.call(['iw', 'dev', dev[0].ifname, 'set', 'mesh_param',
'mesh_gate_announcements=1'])
subprocess.call(['iw', 'dev', dev[1].ifname, 'set', 'mesh_param',
'mesh_gate_announcements=1'])
# wait for gate announcement frames
time.sleep(1)
# data frame from dev2 -> external sta should be sent to both gates
dev[2].request("DATA_TEST_CONFIG 1")
dev[2].request("DATA_TEST_TX {} {} 0".format(external_sta, addr2))
dev[2].request("DATA_TEST_CONFIG 0")
capfile = os.path.join(p['logdir'], "hwsim0.pcapng")
filt = "wlan.sa==%s && wlan_mgt.fixed.mesh_addr5==%s" % (addr2,
external_sta)
for i in range(15):
da = run_tshark(capfile, filt, [ "wlan.da" ])
if addr0 in da and addr1 in da:
logger.debug("Frames seen in tshark iteration %d" % i)
break
time.sleep(0.3)
if addr0 not in da:
raise Exception("Frame to gate %s not observed" % addr0)
if addr1 not in da:
raise Exception("Frame to gate %s not observed" % addr1)
def test_wpas_mesh_pmksa_caching(dev, apdev):
"""Secure mesh network and PMKSA caching"""
check_mesh_support(dev[0], secure=True)
dev[0].request("SET sae_groups ")
id = add_mesh_secure_net(dev[0])
dev[0].mesh_group_add(id)
dev[1].request("SET sae_groups ")
id = add_mesh_secure_net(dev[1])
dev[1].mesh_group_add(id)
# Check for mesh joined
check_mesh_group_added(dev[0])
check_mesh_group_added(dev[1])
# Check for peer connected
check_mesh_peer_connected(dev[0])
check_mesh_peer_connected(dev[1])
addr0 = dev[0].own_addr()
addr1 = dev[1].own_addr()
pmksa0 = dev[0].get_pmksa(addr1)
pmksa1 = dev[1].get_pmksa(addr0)
if pmksa0 is None or pmksa1 is None:
raise Exception("No PMKSA cache entry created")
if pmksa0['pmkid'] != pmksa1['pmkid']:
raise Exception("PMKID mismatch in PMKSA cache entries")
if "OK" not in dev[0].request("MESH_PEER_REMOVE " + addr1):
raise Exception("Failed to remove peer")
pmksa0b = dev[0].get_pmksa(addr1)
if pmksa0b is None:
raise Exception("PMKSA cache entry not maintained")
time.sleep(0.1)
if "FAIL" not in dev[0].request("MESH_PEER_ADD " + addr1):
raise Exception("MESH_PEER_ADD unexpectedly succeeded in no_auto_peer=0 case")
def test_wpas_mesh_pmksa_caching2(dev, apdev):
"""Secure mesh network and PMKSA caching with no_auto_peer=1"""
check_mesh_support(dev[0], secure=True)
addr0 = dev[0].own_addr()
addr1 = dev[1].own_addr()
dev[0].request("SET sae_groups ")
id = add_mesh_secure_net(dev[0])
dev[0].set_network(id, "no_auto_peer", "1")
dev[0].mesh_group_add(id)
dev[1].request("SET sae_groups ")
id = add_mesh_secure_net(dev[1])
dev[1].set_network(id, "no_auto_peer", "1")
dev[1].mesh_group_add(id)
# Check for mesh joined
check_mesh_group_added(dev[0])
check_mesh_group_added(dev[1])
# Check for peer connected
ev = dev[0].wait_event(["will not initiate new peer link"], timeout=10)
if ev is None:
raise Exception("Missing no-initiate message")
if "OK" not in dev[0].request("MESH_PEER_ADD " + addr1):
raise Exception("MESH_PEER_ADD failed")
check_mesh_peer_connected(dev[0])
check_mesh_peer_connected(dev[1])
pmksa0 = dev[0].get_pmksa(addr1)
pmksa1 = dev[1].get_pmksa(addr0)
if pmksa0 is None or pmksa1 is None:
raise Exception("No PMKSA cache entry created")
if pmksa0['pmkid'] != pmksa1['pmkid']:
raise Exception("PMKID mismatch in PMKSA cache entries")
if "OK" not in dev[0].request("MESH_PEER_REMOVE " + addr1):
raise Exception("Failed to remove peer")
pmksa0b = dev[0].get_pmksa(addr1)
if pmksa0b is None:
raise Exception("PMKSA cache entry not maintained")
ev = dev[0].wait_event(["will not initiate new peer link"], timeout=10)
if ev is None:
raise Exception("Missing no-initiate message (2)")
if "OK" not in dev[0].request("MESH_PEER_ADD " + addr1):
raise Exception("MESH_PEER_ADD failed (2)")
check_mesh_peer_connected(dev[0])
check_mesh_peer_connected(dev[1])
pmksa0c = dev[0].get_pmksa(addr1)
pmksa1c = dev[1].get_pmksa(addr0)
if pmksa0c is None or pmksa1c is None:
raise Exception("No PMKSA cache entry created (2)")
if pmksa0c['pmkid'] != pmksa1c['pmkid']:
raise Exception("PMKID mismatch in PMKSA cache entries")
if pmksa0['pmkid'] != pmksa0c['pmkid']:
raise Exception("PMKID changed")
hwsim_utils.test_connectivity(dev[0], dev[1])
def test_wpas_mesh_pmksa_caching_no_match(dev, apdev):
"""Secure mesh network and PMKSA caching with no PMKID match"""
check_mesh_support(dev[0], secure=True)
addr0 = dev[0].own_addr()
addr1 = dev[1].own_addr()
dev[0].request("SET sae_groups ")
id = add_mesh_secure_net(dev[0])
dev[0].set_network(id, "no_auto_peer", "1")
dev[0].mesh_group_add(id)
dev[1].request("SET sae_groups ")
id = add_mesh_secure_net(dev[1])
dev[1].set_network(id, "no_auto_peer", "1")
dev[1].mesh_group_add(id)
# Check for mesh joined
check_mesh_group_added(dev[0])
check_mesh_group_added(dev[1])
# Check for peer connected
ev = dev[0].wait_event(["will not initiate new peer link"], timeout=10)
if ev is None:
raise Exception("Missing no-initiate message")
if "OK" not in dev[0].request("MESH_PEER_ADD " + addr1):
raise Exception("MESH_PEER_ADD failed")
check_mesh_peer_connected(dev[0])
check_mesh_peer_connected(dev[1])
pmksa0 = dev[0].get_pmksa(addr1)
pmksa1 = dev[1].get_pmksa(addr0)
if pmksa0 is None or pmksa1 is None:
raise Exception("No PMKSA cache entry created")
if pmksa0['pmkid'] != pmksa1['pmkid']:
raise Exception("PMKID mismatch in PMKSA cache entries")
if "OK" not in dev[0].request("MESH_PEER_REMOVE " + addr1):
raise Exception("Failed to remove peer")
if "OK" not in dev[1].request("PMKSA_FLUSH"):
raise Exception("Failed to flush PMKSA cache")
ev = dev[0].wait_event(["will not initiate new peer link"], timeout=10)
if ev is None:
raise Exception("Missing no-initiate message (2)")
if "OK" not in dev[0].request("MESH_PEER_ADD " + addr1):
raise Exception("MESH_PEER_ADD failed (2)")
check_mesh_peer_connected(dev[0])
check_mesh_peer_connected(dev[1])
pmksa0c = dev[0].get_pmksa(addr1)
pmksa1c = dev[1].get_pmksa(addr0)
if pmksa0c is None or pmksa1c is None:
raise Exception("No PMKSA cache entry created (2)")
if pmksa0c['pmkid'] != pmksa1c['pmkid']:
raise Exception("PMKID mismatch in PMKSA cache entries")
if pmksa0['pmkid'] == pmksa0c['pmkid']:
raise Exception("PMKID did not change")
hwsim_utils.test_connectivity(dev[0], dev[1])
def test_mesh_pmksa_caching_oom(dev, apdev):
"""Secure mesh network and PMKSA caching failing due to OOM"""
check_mesh_support(dev[0], secure=True)
addr0 = dev[0].own_addr()
addr1 = dev[1].own_addr()
dev[0].request("SET sae_groups ")
id = add_mesh_secure_net(dev[0])
dev[0].set_network(id, "no_auto_peer", "1")
dev[0].mesh_group_add(id)
dev[1].request("SET sae_groups ")
id = add_mesh_secure_net(dev[1])
dev[1].set_network(id, "no_auto_peer", "1")
dev[1].mesh_group_add(id)
# Check for mesh joined
check_mesh_group_added(dev[0])
check_mesh_group_added(dev[1])
# Check for peer connected
ev = dev[0].wait_event(["will not initiate new peer link"], timeout=10)
if ev is None:
raise Exception("Missing no-initiate message")
if "OK" not in dev[0].request("MESH_PEER_ADD " + addr1):
raise Exception("MESH_PEER_ADD failed")
check_mesh_peer_connected(dev[0])
check_mesh_peer_connected(dev[1])
if "OK" not in dev[0].request("MESH_PEER_REMOVE " + addr1):
raise Exception("Failed to remove peer")
pmksa0b = dev[0].get_pmksa(addr1)
if pmksa0b is None:
raise Exception("PMKSA cache entry not maintained")
ev = dev[0].wait_event(["will not initiate new peer link"], timeout=10)
if ev is None:
raise Exception("Missing no-initiate message (2)")
with alloc_fail(dev[0], 1, "wpa_auth_sta_init;mesh_rsn_auth_sae_sta"):
if "OK" not in dev[0].request("MESH_PEER_ADD " + addr1):
raise Exception("MESH_PEER_ADD failed (2)")
wait_fail_trigger(dev[0], "GET_ALLOC_FAIL")
def test_wpas_mesh_pmksa_caching_ext(dev, apdev):
"""Secure mesh network and PMKSA caching and external storage"""
check_mesh_support(dev[0], secure=True)
dev[0].request("SET sae_groups ")
id = add_mesh_secure_net(dev[0])
dev[0].mesh_group_add(id)
dev[1].request("SET sae_groups ")
id = add_mesh_secure_net(dev[1])
dev[1].mesh_group_add(id)
# Check for mesh joined
check_mesh_group_added(dev[0])
check_mesh_group_added(dev[1])
# Check for peer connected
check_mesh_peer_connected(dev[0])
check_mesh_peer_connected(dev[1])
dev[0].dump_monitor()
dev[1].dump_monitor()
addr0 = dev[0].own_addr()
addr1 = dev[1].own_addr()
pmksa0 = dev[0].get_pmksa(addr1)
pmksa1 = dev[1].get_pmksa(addr0)
if pmksa0 is None or pmksa1 is None:
raise Exception("No PMKSA cache entry created")
if pmksa0['pmkid'] != pmksa1['pmkid']:
raise Exception("PMKID mismatch in PMKSA cache entries")
res1 = dev[1].request("MESH_PMKSA_GET any")
res2 = dev[1].request("MESH_PMKSA_GET " + addr0)
logger.info("MESH_PMKSA_GET: " + res1)
if "UNKNOWN COMMAND" in res1:
raise HwsimSkip("MESH_PMKSA_GET not supported in the build")
logger.info("MESH_PMKSA_GET: " + res2)
if pmksa0['pmkid'] not in res1:
raise Exception("PMKID not included in PMKSA entry")
if res1 != res2:
raise Exception("Unexpected difference in MESH_PMKSA_GET output")
dev[1].mesh_group_remove()
check_mesh_group_removed(dev[1])
dev[0].dump_monitor()
dev[1].dump_monitor()
res = dev[1].get_pmksa(addr0)
if res is not None:
raise Exception("Unexpected PMKSA cache entry remaining")
if "OK" not in dev[1].request("MESH_PMKSA_ADD " + res2):
raise Exception("MESH_PMKSA_ADD failed")
dev[1].mesh_group_add(id)
check_mesh_group_added(dev[1])
check_mesh_peer_connected(dev[1])
dev[0].dump_monitor()
dev[1].dump_monitor()
pmksa1b = dev[1].get_pmksa(addr0)
if pmksa1b is None:
raise Exception("No PMKSA cache entry created after external storage restore")
if pmksa1['pmkid'] != pmksa1b['pmkid']:
raise Exception("PMKID mismatch in PMKSA cache entries after external storage restore")
hwsim_utils.test_connectivity(dev[0], dev[1])
res = dev[1].request("MESH_PMKSA_GET foo")
if "FAIL" not in res:
raise Exception("Invalid MESH_PMKSA_GET accepted")
dev[1].mesh_group_remove()
check_mesh_group_removed(dev[1])
dev[0].dump_monitor()
dev[1].dump_monitor()
dev[1].request("REMOVE_NETWORK all")
res = dev[1].request("MESH_PMKSA_GET any")
if "FAIL" not in res:
raise Exception("MESH_PMKSA_GET accepted when not in mesh")
tests = [ "foo",
"02:02:02:02:02:02",
"02:02:02:02:02:02 q",
"02:02:02:02:02:02 c3d51a7ccfca0c6d5287291a7169d79b",
"02:02:02:02:02:02 c3d51a7ccfca0c6d5287291a7169d79b q",
"02:02:02:02:02:02 c3d51a7ccfca0c6d5287291a7169d79b 1bed4fa22ece7997ca1bdc8b829019fe63acac91cba3405522c24c91f7cfb49f",
"02:02:02:02:02:02 c3d51a7ccfca0c6d5287291a7169d79b 1bed4fa22ece7997ca1bdc8b829019fe63acac91cba3405522c24c91f7cfb49f q" ]
for t in tests:
if "FAIL" not in dev[1].request("MESH_PMKSA_ADD " + t):
raise Exception("Invalid MESH_PMKSA_ADD accepted")
def test_mesh_oom(dev, apdev):
"""Mesh network setup failing due to OOM"""
check_mesh_support(dev[0], secure=True)
dev[0].request("SET sae_groups ")
with alloc_fail(dev[0], 1, "mesh_config_create"):
add_open_mesh_network(dev[0])
ev = dev[0].wait_event(["Failed to init mesh"])
if ev is None:
raise Exception("Init failure not reported")
with alloc_fail(dev[0], 3, "=wpa_supplicant_mesh_init"):
add_open_mesh_network(dev[0], basic_rates="60 120 240")
ev = dev[0].wait_event(["Failed to init mesh"])
if ev is None:
raise Exception("Init failure not reported")
for i in range(1, 66):
dev[0].dump_monitor()
logger.info("Test instance %d" % i)
try:
with alloc_fail(dev[0], i, "wpa_supplicant_mesh_init"):
add_open_mesh_network(dev[0])
wait_fail_trigger(dev[0], "GET_ALLOC_FAIL")
ev = dev[0].wait_event(["Failed to init mesh",
"MESH-GROUP-STARTED"])
if ev is None:
raise Exception("Init failure not reported")
except Exception, e:
if i < 15:
raise
logger.info("Ignore no-oom for i=%d" % i)
with alloc_fail(dev[0], 4, "=wpa_supplicant_mesh_init"):
id = add_mesh_secure_net(dev[0])
dev[0].mesh_group_add(id)
ev = dev[0].wait_event(["Failed to init mesh"])
if ev is None:
raise Exception("Init failure not reported")
def test_mesh_add_interface_oom(dev):
"""wpa_supplicant mesh with dynamic interface addition failing"""
check_mesh_support(dev[0])
for i in range(1, 3):
mesh = None
try:
with alloc_fail(dev[0], i, "wpas_mesh_add_interface"):
mesh = dev[0].request("MESH_INTERFACE_ADD").strip()
finally:
if mesh and mesh != "FAIL":
dev[0].request("MESH_GROUP_REMOVE " + mesh)
def test_mesh_scan_oom(dev):
"""wpa_supplicant mesh scan results and OOM"""
check_mesh_support(dev[0])
add_open_mesh_network(dev[0])
check_mesh_group_added(dev[0])
for i in range(5):
dev[1].scan(freq="2412")
res = dev[1].request("SCAN_RESULTS")
if "[MESH]" in res:
break
for r in res.splitlines():
if "[MESH]" in r:
break
bssid = r.split('\t')[0]
bss = dev[1].get_bss(bssid)
if bss is None:
raise Exception("Could not get BSS entry for mesh")
for i in range(1, 3):
with alloc_fail(dev[1], i, "mesh_attr_text"):
bss = dev[1].get_bss(bssid)
if bss and "mesh_id" in bss:
raise Exception("Unexpected BSS result during OOM")
def test_mesh_drv_fail(dev, apdev):
"""Mesh network setup failing due to driver command failure"""
check_mesh_support(dev[0], secure=True)
dev[0].request("SET sae_groups ")
with fail_test(dev[0], 1, "nl80211_join_mesh"):
add_open_mesh_network(dev[0])
ev = dev[0].wait_event(["mesh join error"])
if ev is None:
raise Exception("Join failure not reported")
dev[0].dump_monitor()
with fail_test(dev[0], 1, "wpa_driver_nl80211_if_add"):
if "FAIL" not in dev[0].request("MESH_INTERFACE_ADD").strip():
raise Exception("Interface added unexpectedly")
dev[0].dump_monitor()
with fail_test(dev[0], 1, "wpa_driver_nl80211_init_mesh"):
add_open_mesh_network(dev[0])
ev = dev[0].wait_event(["Could not join mesh"])
if ev is None:
raise Exception("Join failure not reported")
def test_mesh_sae_groups_invalid(dev, apdev):
"""Mesh with invalid SAE group configuration"""
check_mesh_support(dev[0], secure=True)
dev[0].request("SET sae_groups 26")
id = add_mesh_secure_net(dev[0])
dev[0].mesh_group_add(id)
dev[1].request("SET sae_groups 123 122 121")
id = add_mesh_secure_net(dev[1])
dev[1].mesh_group_add(id)
check_mesh_group_added(dev[0])
check_mesh_group_added(dev[1])
ev = dev[0].wait_event(["new peer notification"], timeout=10)
if ev is None:
raise Exception("dev[0] did not see peer")
ev = dev[1].wait_event(["new peer notification"], timeout=10)
if ev is None:
raise Exception("dev[1] did not see peer")
ev = dev[0].wait_event(["MESH-PEER-CONNECTED"], timeout=0.1)
if ev is not None:
raise Exception("Unexpected connection(0)")
ev = dev[1].wait_event(["MESH-PEER-CONNECTED"], timeout=0.01)
if ev is not None:
raise Exception("Unexpected connection(1)")
# Additional coverage in mesh_rsn_sae_group() with non-zero
# wpa_s->mesh_rsn->sae_group_index.
dev[0].dump_monitor()
dev[1].dump_monitor()
id = add_mesh_secure_net(dev[2])
dev[2].mesh_group_add(id)
check_mesh_group_added(dev[2])
check_mesh_peer_connected(dev[0])
check_mesh_peer_connected(dev[2])
ev = dev[1].wait_event(["new peer notification"], timeout=10)
if ev is None:
raise Exception("dev[1] did not see peer(2)")
dev[0].dump_monitor()
dev[1].dump_monitor()
dev[2].dump_monitor()
dev[0].request("SET sae_groups ")
dev[1].request("SET sae_groups ")
def test_mesh_sae_failure(dev, apdev):
"""Mesh and local SAE failures"""
check_mesh_support(dev[0], secure=True)
dev[0].request("SET sae_groups ")
dev[1].request("SET sae_groups ")
funcs = [ (1, "=mesh_rsn_auth_sae_sta", True),
(1, "mesh_rsn_build_sae_commit;mesh_rsn_auth_sae_sta", False),
(1, "auth_sae_init_committed;mesh_rsn_auth_sae_sta", True),
(1, "=mesh_rsn_protect_frame", True),
(2, "=mesh_rsn_protect_frame", True),
(1, "aes_siv_encrypt;mesh_rsn_protect_frame", True),
(1, "=mesh_rsn_process_ampe", True),
(1, "aes_siv_decrypt;mesh_rsn_process_ampe", True) ]
for count, func, success in funcs:
id = add_mesh_secure_net(dev[0])
dev[0].mesh_group_add(id)
with alloc_fail(dev[1], count, func):
id = add_mesh_secure_net(dev[1])
dev[1].mesh_group_add(id)
check_mesh_group_added(dev[0])
check_mesh_group_added(dev[1])
if success:
# retry is expected to work
check_mesh_peer_connected(dev[0])
check_mesh_peer_connected(dev[1])
else:
wait_fail_trigger(dev[1], "GET_ALLOC_FAIL")
dev[0].mesh_group_remove()
dev[1].mesh_group_remove()
check_mesh_group_removed(dev[0])
check_mesh_group_removed(dev[1])
def test_mesh_failure(dev, apdev):
"""Mesh and local failures"""
check_mesh_support(dev[0])
funcs = [ (1, "ap_sta_add;mesh_mpm_add_peer", True),
(1, "wpabuf_alloc;mesh_mpm_send_plink_action", True) ]
for count, func, success in funcs:
add_open_mesh_network(dev[0])
with alloc_fail(dev[1], count, func):
add_open_mesh_network(dev[1])
check_mesh_group_added(dev[0])
check_mesh_group_added(dev[1])
if success:
# retry is expected to work
check_mesh_peer_connected(dev[0])
check_mesh_peer_connected(dev[1])
else:
wait_fail_trigger(dev[1], "GET_ALLOC_FAIL")
dev[0].mesh_group_remove()
dev[1].mesh_group_remove()
check_mesh_group_removed(dev[0])
check_mesh_group_removed(dev[1])
funcs = [ (1, "mesh_mpm_init_link", True) ]
for count, func, success in funcs:
add_open_mesh_network(dev[0])
with fail_test(dev[1], count, func):
add_open_mesh_network(dev[1])
check_mesh_group_added(dev[0])
check_mesh_group_added(dev[1])
if success:
# retry is expected to work
check_mesh_peer_connected(dev[0])
check_mesh_peer_connected(dev[1])
else:
wait_fail_trigger(dev[1], "GET_FAIL")
dev[0].mesh_group_remove()
dev[1].mesh_group_remove()
check_mesh_group_removed(dev[0])
check_mesh_group_removed(dev[1])
def test_mesh_invalid_frequency(dev, apdev):
"""Mesh and invalid frequency configuration"""
check_mesh_support(dev[0])
add_open_mesh_network(dev[0], freq=None)
ev = dev[0].wait_event(["MESH-GROUP-STARTED",
"Could not join mesh"])
if ev is None or "Could not join mesh" not in ev:
raise Exception("Mesh join failure not reported")
dev[0].request("REMOVE_NETWORK all")
add_open_mesh_network(dev[0], freq="2413")
ev = dev[0].wait_event(["MESH-GROUP-STARTED",
"Could not join mesh"])
if ev is None or "Could not join mesh" not in ev:
raise Exception("Mesh join failure not reported")
def test_mesh_default_beacon_int(dev, apdev):
"""Mesh and default beacon interval"""
check_mesh_support(dev[0])
try:
dev[0].request("SET beacon_int 200")
add_open_mesh_network(dev[0])
check_mesh_group_added(dev[0])
finally:
dev[0].request("SET beacon_int 0")
def test_mesh_scan_parse_error(dev, apdev):
"""Mesh scan element parse error"""
check_mesh_support(dev[0])
params = { "ssid": "open",
"beacon_int": "2000" }
hapd = hostapd.add_ap(apdev[0], params)
bssid = apdev[0]['bssid']
hapd.set('vendor_elements', 'dd0201')
for i in range(10):
dev[0].scan(freq=2412)
if bssid in dev[0].request("SCAN_RESULTS"):
break
# This will fail in IE parsing due to the truncated IE in the Probe
# Response frame.
bss = dev[0].request("BSS " + bssid)
def test_mesh_missing_mic(dev, apdev):
"""Secure mesh network and missing MIC"""
check_mesh_support(dev[0], secure=True)
dev[0].request("SET ext_mgmt_frame_handling 1")
dev[0].request("SET sae_groups ")
id = add_mesh_secure_net(dev[0])
dev[0].mesh_group_add(id)
dev[1].request("SET sae_groups ")
id = add_mesh_secure_net(dev[1])
dev[1].mesh_group_add(id)
# Check for mesh joined
check_mesh_group_added(dev[0])
check_mesh_group_added(dev[1])
count = 0
remove_mic = True
while True:
count += 1
if count > 15:
raise Exception("Did not see Action frames")
rx_msg = dev[0].mgmt_rx()
if rx_msg is None:
ev = dev[1].wait_event(["MESH-PEER-CONNECTED"], timeout=0.01)
if ev:
break
raise Exception("MGMT-RX timeout")
if rx_msg['subtype'] == 13:
payload = rx_msg['payload']
frame = rx_msg['frame']
(categ, action) = struct.unpack('BB', payload[0:2])
if categ == 15 and action == 1 and remove_mic:
# Mesh Peering Open
pos = frame.find('\x8c\x10')
if not pos:
raise Exception("Could not find MIC element")
logger.info("Found MIC at %d" % pos)
# Remove MIC
rx_msg['frame'] = frame[0:pos]
remove_mic = False
if "OK" not in dev[0].request("MGMT_RX_PROCESS freq={} datarate={} ssi_signal={} frame={}".format(rx_msg['freq'], rx_msg['datarate'], rx_msg['ssi_signal'], rx_msg['frame'].encode('hex'))):
raise Exception("MGMT_RX_PROCESS failed")
ev = dev[1].wait_event(["MESH-PEER-CONNECTED"], timeout=0.01)
if ev:
break
def test_mesh_pmkid_mismatch(dev, apdev):
"""Secure mesh network and PMKID mismatch"""
check_mesh_support(dev[0], secure=True)
addr0 = dev[0].own_addr()
addr1 = dev[1].own_addr()
dev[0].request("SET sae_groups ")
id = add_mesh_secure_net(dev[0])
dev[0].set_network(id, "no_auto_peer", "1")
dev[0].mesh_group_add(id)
dev[1].request("SET sae_groups ")
id = add_mesh_secure_net(dev[1])
dev[1].set_network(id, "no_auto_peer", "1")
dev[1].mesh_group_add(id)
# Check for mesh joined
check_mesh_group_added(dev[0])
check_mesh_group_added(dev[1])
# Check for peer connected
ev = dev[0].wait_event(["will not initiate new peer link"], timeout=10)
if ev is None:
raise Exception("Missing no-initiate message")
if "OK" not in dev[0].request("MESH_PEER_ADD " + addr1):
raise Exception("MESH_PEER_ADD failed")
check_mesh_peer_connected(dev[0])
check_mesh_peer_connected(dev[1])
if "OK" not in dev[0].request("MESH_PEER_REMOVE " + addr1):
raise Exception("Failed to remove peer")
ev = dev[0].wait_event(["will not initiate new peer link"], timeout=10)
if ev is None:
raise Exception("Missing no-initiate message (2)")
dev[0].dump_monitor()
dev[1].dump_monitor()
dev[0].request("SET ext_mgmt_frame_handling 1")
if "OK" not in dev[0].request("MESH_PEER_ADD " + addr1):
raise Exception("MESH_PEER_ADD failed (2)")
count = 0
break_pmkid = True
while True:
count += 1
if count > 50:
raise Exception("Did not see Action frames")
rx_msg = dev[0].mgmt_rx()
if rx_msg is None:
ev = dev[1].wait_event(["MESH-PEER-CONNECTED"], timeout=0.1)
if ev:
break
raise Exception("MGMT-RX timeout")
if rx_msg['subtype'] == 13:
payload = rx_msg['payload']
frame = rx_msg['frame']
(categ, action) = struct.unpack('BB', payload[0:2])
if categ == 15 and action == 1 and break_pmkid:
# Mesh Peering Open
pos = frame.find('\x75\x14')
if not pos:
raise Exception("Could not find Mesh Peering Management element")
logger.info("Found Mesh Peering Management element at %d" % pos)
# Break PMKID to hit "Mesh RSN: Invalid PMKID (Chosen PMK did
# not match calculated PMKID)"
rx_msg['frame'] = frame[0:pos + 6] + '\x00\x00\x00\x00' + frame[pos + 10:]
break_pmkid = False
if "OK" not in dev[0].request("MGMT_RX_PROCESS freq={} datarate={} ssi_signal={} frame={}".format(rx_msg['freq'], rx_msg['datarate'], rx_msg['ssi_signal'], rx_msg['frame'].encode('hex'))):
raise Exception("MGMT_RX_PROCESS failed")
ev = dev[1].wait_event(["MESH-PEER-CONNECTED"], timeout=0.01)
if ev:
break
def test_mesh_peering_proto(dev, apdev):
"""Mesh peering management protocol testing"""
check_mesh_support(dev[0])
dev[0].request("SET ext_mgmt_frame_handling 1")
add_open_mesh_network(dev[0], beacon_int=160)
add_open_mesh_network(dev[1], beacon_int=160)
count = 0
test = 1
while True:
count += 1
if count > 50:
raise Exception("Did not see Action frames")
rx_msg = dev[0].mgmt_rx()
if rx_msg is None:
ev = dev[1].wait_event(["MESH-PEER-CONNECTED"], timeout=0.01)
if ev:
break
raise Exception("MGMT-RX timeout")
if rx_msg['subtype'] == 13:
payload = rx_msg['payload']
frame = rx_msg['frame']
(categ, action) = struct.unpack('BB', payload[0:2])
if categ == 15 and action == 1 and test == 1:
# Mesh Peering Open
pos = frame.find('\x75\x04')
if not pos:
raise Exception("Could not find Mesh Peering Management element")
logger.info("Found Mesh Peering Management element at %d" % pos)
# Remove the element to hit
# "MPM: No Mesh Peering Management element"
rx_msg['frame'] = frame[0:pos]
test += 1
elif categ == 15 and action == 1 and test == 2:
# Mesh Peering Open
pos = frame.find('\x72\x0e')
if not pos:
raise Exception("Could not find Mesh ID element")
logger.info("Found Mesh ID element at %d" % pos)
# Remove the element to hit
# "MPM: No Mesh ID or Mesh Configuration element"
rx_msg['frame'] = frame[0:pos] + frame[pos + 16:]
test += 1
elif categ == 15 and action == 1 and test == 3:
# Mesh Peering Open
pos = frame.find('\x72\x0e')
if not pos:
raise Exception("Could not find Mesh ID element")
logger.info("Found Mesh ID element at %d" % pos)
# Replace Mesh ID to hit "MPM: Mesh ID or Mesh Configuration
# element do not match local MBSS"
rx_msg['frame'] = frame[0:pos] + '\x72\x0etest-test-test' + frame[pos + 16:]
test += 1
elif categ == 15 and action == 1 and test == 4:
# Mesh Peering Open
# Remove IEs to hit
# "MPM: Ignore too short action frame 1 ie_len 0"
rx_msg['frame'] = frame[0:26]
test += 1
elif categ == 15 and action == 1 and test == 5:
# Mesh Peering Open
# Truncate IEs to hit
# "MPM: Failed to parse PLINK IEs"
rx_msg['frame'] = frame[0:30]
test += 1
elif categ == 15 and action == 1 and test == 6:
# Mesh Peering Open
pos = frame.find('\x75\x04')
if not pos:
raise Exception("Could not find Mesh Peering Management element")
logger.info("Found Mesh Peering Management element at %d" % pos)
# Truncate the element to hit
# "MPM: Invalid peer mgmt ie" and
# "MPM: Mesh parsing rejected frame"
rx_msg['frame'] = frame[0:pos] + '\x75\x00\x00\x00' + frame[pos + 6:]
test += 1
if "OK" not in dev[0].request("MGMT_RX_PROCESS freq={} datarate={} ssi_signal={} frame={}".format(rx_msg['freq'], rx_msg['datarate'], rx_msg['ssi_signal'], rx_msg['frame'].encode('hex'))):
raise Exception("MGMT_RX_PROCESS failed")
ev = dev[1].wait_event(["MESH-PEER-CONNECTED"], timeout=0.01)
if ev:
break
if test != 7:
raise Exception("Not all test frames completed")
def test_mesh_mpm_init_proto(dev, apdev):
"""Mesh peering management protocol testing for peer addition"""
check_mesh_support(dev[0])
add_open_mesh_network(dev[0])
check_mesh_group_added(dev[0])
dev[0].dump_monitor()
dev[0].request("SET ext_mgmt_frame_handling 1")
addr = "020000000100"
hdr = "d000ac00020000000000" + addr + addr + "1000"
fixed = "0f010000"
supp_rates = "010802040b168c129824"
ext_supp_rates = "3204b048606c"
mesh_id = "720e777061732d6d6573682d6f70656e"
mesh_conf = "710701010001000009"
mpm = "75040000079d"
ht_capab = "2d1a7c001bffff000000000000000000000100000000000000000000"
ht_oper = "3d160b000000000000000000000000000000000000000000"
dev[0].request("NOTE no supported rates")
frame = hdr + fixed + ext_supp_rates + mesh_id + mesh_conf + mpm + ht_capab + ht_oper
if "OK" not in dev[0].request("MGMT_RX_PROCESS freq=2412 datarate=0 ssi_signal=-30 frame=%s" % frame):
raise Exception("MGMT_RX_PROCESS failed")
dev[0].request("NOTE Invalid supported rates element length 33+0")
long_supp_rates = "012100112233445566778899aabbccddeeff00112233445566778899aabbccddeeff00"
frame = hdr + fixed + long_supp_rates + mesh_id + mesh_conf + mpm + ht_capab + ht_oper
if "OK" not in dev[0].request("MGMT_RX_PROCESS freq=2412 datarate=0 ssi_signal=-30 frame=%s" % frame):
raise Exception("MGMT_RX_PROCESS failed")
dev[0].request("NOTE Too short mesh config")
short_mesh_conf = "710401010001"
frame = hdr + fixed + supp_rates + mesh_id + short_mesh_conf + mpm + ht_capab + ht_oper
if "OK" not in dev[0].request("MGMT_RX_PROCESS freq=2412 datarate=0 ssi_signal=-30 frame=%s" % frame):
raise Exception("MGMT_RX_PROCESS failed")
dev[0].request("NOTE Add STA failure")
frame = hdr + fixed + supp_rates + ext_supp_rates + mesh_id + mesh_conf + mpm + ht_capab + ht_oper
with fail_test(dev[0], 1, "wpa_driver_nl80211_sta_add"):
if "OK" not in dev[0].request("MGMT_RX_PROCESS freq=2412 datarate=0 ssi_signal=-30 frame=%s" % frame):
raise Exception("MGMT_RX_PROCESS failed")
dev[0].request("NOTE Send Action failure")
with fail_test(dev[0], 1, "driver_nl80211_send_action"):
if "OK" not in dev[0].request("MGMT_RX_PROCESS freq=2412 datarate=0 ssi_signal=-30 frame=%s" % frame):
raise Exception("MGMT_RX_PROCESS failed")
dev[0].request("NOTE Set STA failure")
addr = "020000000101"
hdr = "d000ac00020000000000" + addr + addr + "1000"
frame = hdr + fixed + supp_rates + ext_supp_rates + mesh_id + mesh_conf + mpm + ht_capab + ht_oper
with fail_test(dev[0], 2, "wpa_driver_nl80211_sta_add"):
if "OK" not in dev[0].request("MGMT_RX_PROCESS freq=2412 datarate=0 ssi_signal=-30 frame=%s" % frame):
raise Exception("MGMT_RX_PROCESS failed")
dev[0].request("NOTE ap_sta_add OOM")
addr = "020000000102"
hdr = "d000ac00020000000000" + addr + addr + "1000"
frame = hdr + fixed + supp_rates + ext_supp_rates + mesh_id + mesh_conf + mpm + ht_capab + ht_oper
with alloc_fail(dev[0], 1, "ap_sta_add"):
if "OK" not in dev[0].request("MGMT_RX_PROCESS freq=2412 datarate=0 ssi_signal=-30 frame=%s" % frame):
raise Exception("MGMT_RX_PROCESS failed")
dev[0].request("NOTE hostapd_get_aid() failure")
addr = "020000000103"
hdr = "d000ac00020000000000" + addr + addr + "1000"
frame = hdr + fixed + supp_rates + ext_supp_rates + mesh_id + mesh_conf + mpm + ht_capab + ht_oper
with fail_test(dev[0], 1, "hostapd_get_aid"):
if "OK" not in dev[0].request("MGMT_RX_PROCESS freq=2412 datarate=0 ssi_signal=-30 frame=%s" % frame):
raise Exception("MGMT_RX_PROCESS failed")
if "OK" not in dev[0].request("MESH_PEER_REMOVE 02:00:00:00:01:00"):
raise Exception("Failed to remove peer")
if "FAIL" not in dev[0].request("MESH_PEER_REMOVE 02:00:00:00:01:02"):
raise Exception("Unexpected MESH_PEER_REMOVE success")
if "FAIL" not in dev[1].request("MESH_PEER_REMOVE 02:00:00:00:01:02"):
raise Exception("Unexpected MESH_PEER_REMOVE success(2)")
if "FAIL" not in dev[1].request("MESH_PEER_ADD 02:00:00:00:01:02"):
raise Exception("Unexpected MESH_PEER_ADD success")
def test_mesh_holding(dev, apdev):
"""Mesh MPM FSM and HOLDING state event OPN_ACPT"""
check_mesh_support(dev[0])
add_open_mesh_network(dev[0])
add_open_mesh_network(dev[1])
check_mesh_group_added(dev[0])
check_mesh_group_added(dev[1])
check_mesh_peer_connected(dev[0])
check_mesh_peer_connected(dev[1])
addr0 = dev[0].own_addr()
addr1 = dev[1].own_addr()
dev[0].request("SET ext_mgmt_frame_handling 1")
if "OK" not in dev[0].request("MESH_PEER_REMOVE " + addr1):
raise Exception("Failed to remove peer")
rx_msg = dev[0].mgmt_rx()
if rx_msg is None:
raise Exception("MGMT-RX timeout")
if rx_msg['subtype'] != 13:
raise Exception("Unexpected management frame")
payload = rx_msg['payload']
(categ, action) = struct.unpack('BB', payload[0:2])
if categ != 0x0f or action != 0x03:
raise Exception("Did not see Mesh Peering Close")
peer_lid = payload[-6:-4].encode("hex")
my_lid = payload[-4:-2].encode("hex")
# Drop Mesh Peering Close and instead, process an unexpected Mesh Peering
# Open to trigger transmission of another Mesh Peering Close in the HOLDING
# state based on an OPN_ACPT event.
dst = addr0.replace(':', '')
src = addr1.replace(':', '')
hdr = "d000ac00" + dst + src + src + "1000"
fixed = "0f010000"
supp_rates = "010802040b168c129824"
ext_supp_rates = "3204b048606c"
mesh_id = "720e777061732d6d6573682d6f70656e"
mesh_conf = "710701010001000009"
mpm = "7504" + my_lid + peer_lid
ht_capab = "2d1a7c001bffff000000000000000000000100000000000000000000"
ht_oper = "3d160b000000000000000000000000000000000000000000"
frame = hdr + fixed + supp_rates + ext_supp_rates + mesh_id + mesh_conf + mpm + ht_capab + ht_oper
if "OK" not in dev[0].request("MGMT_RX_PROCESS freq=2412 datarate=0 ssi_signal=-30 frame=%s" % frame):
raise Exception("MGMT_RX_PROCESS failed")
time.sleep(0.1)
def test_mesh_cnf_rcvd_event_cls_acpt(dev, apdev):
"""Mesh peering management protocol testing - CLS_ACPT event in CNF_RCVD"""
check_mesh_support(dev[0])
add_open_mesh_network(dev[0])
check_mesh_group_added(dev[0])
dev[0].dump_monitor()
dev[0].request("SET ext_mgmt_frame_handling 1")
add_open_mesh_network(dev[1])
check_mesh_group_added(dev[1])
addr0 = dev[0].own_addr()
addr1 = dev[1].own_addr()
rx_msg = dev[0].mgmt_rx()
# Drop Mesh Peering Open
rx_msg = dev[0].mgmt_rx()
# Allow Mesh Peering Confirm to go through
if "OK" not in dev[0].request("MGMT_RX_PROCESS freq={} datarate={} ssi_signal={} frame={}".format(rx_msg['freq'], rx_msg['datarate'], rx_msg['ssi_signal'], rx_msg['frame'].encode('hex'))):
raise Exception("MGMT_RX_PROCESS failed")
payload = rx_msg['payload']
peer_lid = payload[51:53].encode("hex")
my_lid = payload[53:55].encode("hex")
dst = addr0.replace(':', '')
src = addr1.replace(':', '')
hdr = "d000ac00" + dst + src + src + "1000"
fixed = "0f03"
mesh_id = "720e777061732d6d6573682d6f70656e"
mpm = "75080000" + peer_lid + my_lid + "3700"
frame = hdr + fixed + mesh_id + mpm
# Inject Mesh Peering Close to hit "state CNF_RCVD event CLS_ACPT" to
# HOLDING transition.
if "OK" not in dev[0].request("MGMT_RX_PROCESS freq=2412 datarate=0 ssi_signal=-30 frame=" + frame):
raise Exception("MGMT_RX_PROCESS failed")
def test_mesh_opn_snt_event_cls_acpt(dev, apdev):
"""Mesh peering management protocol testing - CLS_ACPT event in OPN_SNT"""
check_mesh_support(dev[0])
add_open_mesh_network(dev[0])
check_mesh_group_added(dev[0])
dev[0].dump_monitor()
dev[0].request("SET ext_mgmt_frame_handling 1")
add_open_mesh_network(dev[1])
check_mesh_group_added(dev[1])
addr0 = dev[0].own_addr()
addr1 = dev[1].own_addr()
rx_msg = dev[0].mgmt_rx()
# Drop Mesh Peering Open
rx_msg = dev[0].mgmt_rx()
# Drop Mesh Peering Confirm
payload = rx_msg['payload']
peer_lid = "0000"
my_lid = payload[53:55].encode("hex")
dst = addr0.replace(':', '')
src = addr1.replace(':', '')
hdr = "d000ac00" + dst + src + src + "1000"
fixed = "0f03"
mesh_id = "720e777061732d6d6573682d6f70656e"
mpm = "75080000" + peer_lid + my_lid + "3700"
frame = hdr + fixed + mesh_id + mpm
# Inject Mesh Peering Close to hit "state OPN_SNTevent CLS_ACPT" to
# HOLDING transition.
if "OK" not in dev[0].request("MGMT_RX_PROCESS freq=2412 datarate=0 ssi_signal=-30 frame=" + frame):
raise Exception("MGMT_RX_PROCESS failed")
def test_mesh_select_network(dev):
"""Mesh network and SELECT_NETWORK"""
check_mesh_support(dev[0])
id0 = add_open_mesh_network(dev[0], start=False)
id1 = add_open_mesh_network(dev[1], start=False)
dev[0].select_network(id0)
dev[1].select_network(id1)
check_mesh_group_added(dev[0])
check_mesh_group_added(dev[1])
check_mesh_peer_connected(dev[0])
check_mesh_peer_connected(dev[1])
hwsim_utils.test_connectivity(dev[0], dev[1])
def test_mesh_forwarding(dev):
"""Mesh with two stations that can't reach each other directly"""
try:
set_group_map(dev[0], 1)
set_group_map(dev[1], 3)
set_group_map(dev[2], 2)
check_mesh_support(dev[0])
for i in range(3):
add_open_mesh_network(dev[i])
check_mesh_group_added(dev[i])
for i in range(3):
check_mesh_peer_connected(dev[i])
hwsim_utils.test_connectivity(dev[0], dev[1])
hwsim_utils.test_connectivity(dev[1], dev[2])
hwsim_utils.test_connectivity(dev[0], dev[2])
finally:
# reset groups
set_group_map(dev[0], 1)
set_group_map(dev[1], 1)
set_group_map(dev[2], 1)
def test_mesh_forwarding_secure(dev):
"""Mesh with two stations that can't reach each other directly (RSN)"""
check_mesh_support(dev[0], secure=True)
try:
set_group_map(dev[0], 1)
set_group_map(dev[1], 3)
set_group_map(dev[2], 2)
for i in range(3):
dev[i].request("SET sae_groups ")
id = add_mesh_secure_net(dev[i])
dev[i].mesh_group_add(id)
check_mesh_group_added(dev[i])
for i in range(3):
check_mesh_peer_connected(dev[i])
hwsim_utils.test_connectivity(dev[0], dev[1])
hwsim_utils.test_connectivity(dev[1], dev[2])
hwsim_utils.test_connectivity(dev[0], dev[2])
finally:
# reset groups
set_group_map(dev[0], 1)
set_group_map(dev[1], 1)
set_group_map(dev[2], 1)
| UTF-8 | Python | false | false | 85,944 | py | 420 | test_wpas_mesh.py | 145 | 0.609839 | 0.57392 | 0.000256 | 2,308 | 36.237435 | 196 |
tanmaymane18/zabbix-data-visualisation | 5,677,946,770,278 | e35e495e5611cb3e986662a7fc8f1eb382e63a53 | ebc0a0905655c5850108d7e45c4602e8622fd52e | /app.py | 333a1452a3a678bcca0e49e0c71af78bf4ac7b98 | [] | no_license | https://github.com/tanmaymane18/zabbix-data-visualisation | bfa6ae500cfdce7d8796f53f319b1c8d104711ec | 0212401e9c3806d51a345fd2b7cf42c25ece8d2e | refs/heads/master | 2020-04-04T10:35:31.105925 | 2018-11-02T12:03:30 | 2018-11-02T12:03:30 | 155,860,256 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | '''
importing required modules
datetime for date data type
dash, dash_core_compnents, dash_html are required to design the website
dash.dependencies are required for managing input and output
pymysql for connecting to database
'''
import datetime as dt
import dash
from dash.dependencies import Input, Output, Event
import dash_core_components as dcc
import dash_html_components as html
import pymysql
# a dictionary type variable to save the server names w.r.t their hostid
server_name = {'10157':'rp1', '10158':'rp2'}
# initialisation
app = dash.Dash()
# mentioning the link of external css
app.css.append_css({'external_url':'https://stackpath.bootstrapcdn.com/bootstrap/4.1.1/css/bootstrap.min.css'})
# title of the page
app.title='Project'
# layout of the page
app.layout = html.Div(
html.Div(children = [
html.Div( children = [html.Label('Select server : '),
# a dropdown menu for selecting servers
dcc.Dropdown(id='servers',
options=[
{'label':'rp1', 'value':'10157'},
{'label':'rp2', 'value':'10158'}
],
multi = True
)], className='col-sm-4'
),
html.Div( children=[html.Label('Select the item : '),
# a dropdown menu for selecting items
dcc.Dropdown(id='items',
options=[
{'label': 'Free disk space in %', 'value': 'vfs.fs.size[/,pfree]'},
{'label': 'Free swap space in %', 'value': 'system.swap.size[,pfree]'},
{'label': 'Processor load(1)', 'value': 'system.cpu.load[percpu,avg1]'},
{'label': 'Processor load(5)', 'value': 'system.cpu.load[percpu,avg5]'},
{'label': 'Processor load(15)', 'value': 'system.cpu.load[percpu,avg15]'},
{'label':'CPU time(idle)', 'value':'system.cpu.util[,idle]'},
{'label':'CPU time(interrupt)', 'value':'system.cpu.util[,interrupt]'},
{'label':'CPU time(iowait)', 'value':'system.cpu.util[,iowait]'},
{'label':'CPU time(system)', 'value':'system.cpu.util[,system]'},
{'label':'CPU time(user)', 'value':'system.cpu.util[,user]'}
])], className='col-sm-4'
),
# two divisions of datepicker for choosing the FROM date andd TO date
html.Div( children = [html.Label('From : '),
html.Br(),
dcc.DatePickerSingle(id='from',
min_date_allowed = dt.datetime(2017, 3, 4),
max_date_allowed = dt.date.today(),
date = dt.date.today())]
, className='col-sm-2'
),
html.Div( children = [html.Label('To : '),
html.Br(),
dcc.DatePickerSingle(id='to',
min_date_allowed = dt.datetime(2017, 3, 4),
max_date_allowed = dt.date.today(),
date = dt.date.today())
], className='col-sm-2'
),
html.Div( children = [html.Label('Probability : '),
html.Br(),
# a dropdown menu to select the condtion for checking the probability
dcc.Dropdown(id='cond',
options=[
{'label':'>', 'value':'>'},
{'label':'<', 'value':'<'},
{'label':'=', 'value':'='}])
], className='col-sm-6'
),
html.Div( children=[html.Label('Enter a number : '),
html.Br(),
# an input div for entering the number
dcc.Input(id = 'num',
type='float',
placeholder = 'Enter a numberic value')
], className='col-sm-6'
),
# a html division for displaying the result of probability
# by default it shows 'Enter a value to check the probability !'
# probability is calculated using the cal_prob() function
html.Div(id='prob_output',className='col-sm-5'),
# a html division for displaying the desired graph
# queries are run in update_graph() function
html.Div(dcc.Graph(id='graph'), className='col-sm-12')
], className = 'row', style={'width':'100%'}),
className = 'container-fluid')
# callback to update the graph
@app.callback(
Output(component_id='graph', component_property='figure'),
[Input(component_id='servers', component_property='value'),
Input(component_id='items', component_property='value'),
Input(component_id='from', component_property='date'),
Input(component_id='to', component_property='date')])
# function for executing queries and returning the draph data
def update_graph(servers, items, From, to):
try :
if len(servers) == 1 :
conn = pymysql.connect(user='root', db='zabbix')
cur = conn.cursor()
cur.execute("SELECT value, clock FROM history WHERE itemid IN (SELECT itemid FROM items WHERE hostid = %s AND key_ = '%s') AND clock BETWEEN UNIX_TIMESTAMP(STR_TO_DATE('%s', '%s')) AND UNIX_TIMESTAMP(STR_TO_DATE('%s', '%s'));"% (servers[0], items, From, '%Y-%m-%d', to, '%Y-%m-%d'))
r=cur.fetchall()
values = [i[0] for i in r]
clock = [i[1] for i in r]
clock = list(map(dt.datetime.fromtimestamp, clock))
data = {'data':[{'x':clock, 'y':values, 'type':'lines', 'name':'values'}]}
cur.close()
return data
else :
conn = pymysql.connect(user='root', db='zabbix')
cur = conn.cursor()
cur.execute("SELECT value, clock FROM history WHERE itemid IN (SELECT itemid FROM items WHERE hostid = %s AND key_ = '%s') AND clock BETWEEN UNIX_TIMESTAMP(STR_TO_DATE('%s', '%s')) AND UNIX_TIMESTAMP(STR_TO_DATE('%s', '%s'));"% (servers[0], items, From, '%Y-%m-%d', to, '%Y-%m-%d'))
r=cur.fetchall()
values_1 = [i[0] for i in r]
clock_1 = [i[1] for i in r]
clock_1 = list(map(dt.datetime.fromtimestamp, clock_1))
cur.execute("SELECT value, clock FROM history WHERE itemid IN (SELECT itemid FROM items WHERE hostid = %s AND key_ = '%s') AND clock BETWEEN UNIX_TIMESTAMP(STR_TO_DATE('%s', '%s')) AND UNIX_TIMESTAMP(STR_TO_DATE('%s', '%s'));"% (servers[1], items, From, '%Y-%m-%d', to, '%Y-%m-%d'))
r=cur.fetchall()
values_2 = [i[0] for i in r]
clock_2 = [i[1] for i in r]
clock_2 = list(map(dt.datetime.fromtimestamp, clock_2))
data = {'data':[{'x':clock_1, 'y':values_1, 'type':'lines', 'name':'values-{}'.format(server_name[servers[0]])},
{'x':clock_2, 'y':values_2, 'type':'lines', 'name':'values-{}'.format(server_name[servers[1]])}]}
cur.close()
return data
except :
pass
# callback for calculating probability
@app.callback(
Output(component_id='prob_output', component_property='children'),
[Input(component_id='cond', component_property='value'),
Input(component_id='num', component_property='value'),
Input(component_id='servers', component_property='value'),
Input(component_id='items', component_property='value'),
Input(component_id='from', component_property='date'),
Input(component_id='to', component_property='date')])
# function that calculates probability
def cal_prob(cond, num, servers, items, From, to):
try :
if len(servers) == 1 :
conn = pymysql.connect(user='root', db='zabbix')
cur = conn.cursor()
cur.execute("SELECT value FROM history WHERE itemid IN (SELECT itemid FROM items WHERE hostid = %s AND key_ = '%s') AND clock BETWEEN UNIX_TIMESTAMP(STR_TO_DATE('%s', '%s')) AND UNIX_TIMESTAMP(STR_TO_DATE('%s', '%s'));"% (servers[0], items, From, '%Y-%m-%d', to, '%Y-%m-%d'))
r=cur.fetchall()
total = [i for i in r]
cur.execute("SELECT value FROM history WHERE value %c %s AND itemid IN (SELECT itemid FROM items WHERE hostid = %s AND key_ = '%s') AND clock BETWEEN UNIX_TIMESTAMP(STR_TO_DATE('%s', '%s')) AND UNIX_TIMESTAMP(STR_TO_DATE('%s', '%s'));"% (cond, num, servers[0], items, From, '%Y-%m-%d', to, '%Y-%m-%d'))
r=cur.fetchall()
cur.close()
fav = [i for i in r]
prob = round((len(fav)/len(total))*100, 2)
return "{} : {}%".format(server_name[servers[0]], prob)
else :
conn = pymysql.connect(user='root', db = 'zabbix')
cur = conn.cursor()
cur.execute("SELECT value FROM history WHERE itemid IN (SELECT itemid FROM items WHERE hostid = %s AND key_ = '%s') AND clock BETWEEN UNIX_TIMESTAMP(STR_TO_DATE('%s', '%s')) AND UNIX_TIMESTAMP(STR_TO_DATE('%s', '%s'));"% (servers[0], items, From, '%Y-%m-%d', to, '%Y-%m-%d'))
r = cur.fetchall()
total_1 = [i for i in r]
cur.execute("SELECT value FROM history WHERE value %c %s AND itemid IN (SELECT itemid FROM items WHERE hostid = %s AND key_ = '%s') AND clock BETWEEN UNIX_TIMESTAMP(STR_TO_DATE('%s', '%s')) AND UNIX_TIMESTAMP(STR_TO_DATE('%s', '%s'));"% (cond, num, servers[0], items, From, '%Y-%m-%d', to, '%Y-%m-%d'))
r = cur.fetchall()
fav_1 = [i for i in r]
cur.execute("SELECT value FROM history WHERE itemid IN (SELECT itemid FROM items WHERE hostid = %s AND key_ = '%s') AND clock BETWEEN UNIX_TIMESTAMP(STR_TO_DATE('%s', '%s')) AND UNIX_TIMESTAMP(STR_TO_DATE('%s', '%s'));"% (servers[1], items, From, '%Y-%m-%d', to, '%Y-%m-%d'))
r = cur.fetchall()
total_2 = [i for i in r]
cur.execute("SELECT value FROM history WHERE value %c %s AND itemid IN (SELECT itemid FROM items WHERE hostid = %s AND key_ = '%s') AND clock BETWEEN UNIX_TIMESTAMP(STR_TO_DATE('%s', '%s')) AND UNIX_TIMESTAMP(STR_TO_DATE('%s', '%s'));"% (cond, num, servers[1], items, From, '%Y-%m-%d', to, '%Y-%m-%d'))
r = cur.fetchall()
fav_2 = [i for i in r]
prob_1 = round((len(fav_1)/len(total_1))*100, 2)
prob_2 = round((len(fav_2)/len(total_1))*100, 2)
return "{} : {}% {} : {}%".format(server_name[servers[0]], prob_1, server_name[servers[1]], prob_2)
except :
return "Enter a value to check the probability !"
if __name__ == '__main__':
app.run_server(debug=True)
| UTF-8 | Python | false | false | 11,223 | py | 2 | app.py | 1 | 0.516885 | 0.50646 | 0 | 204 | 54.004902 | 326 |
spkcspider/spkcspider | 5,093,831,237,184 | fa90a4672d2c88bee59a4a9c9b77063d7fa47d1d | 3dd165cb7dbda01992c7b65b96c76b9a9945b344 | /spkcspider/apps/spider_filets/migrations/0009_textfilet_push.py | db17467ad6a40fff16dc586312fc67d7ac84b6f6 | [
"MIT"
] | permissive | https://github.com/spkcspider/spkcspider | 41f6f4fc86e6a329c730883b4407c03b47c213ce | 97e448b4da412acebd66c4469c7fcdd07bf90ed2 | refs/heads/master | 2023-01-27T12:47:47.118970 | 2020-12-04T17:01:39 | 2020-12-04T17:02:01 | 108,724,496 | 5 | 0 | MIT | false | 2020-12-04T17:12:19 | 2017-10-29T10:27:27 | 2020-12-04T17:11:41 | 2020-12-04T17:11:34 | 3,901 | 5 | 0 | 0 | Python | false | false | # Generated by Django 2.1.5 on 2019-01-09 19:27
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('spider_filets', '0008_auto_20181121_0749'),
]
operations = [
migrations.AddField(
model_name='textfilet',
name='push',
field=models.BooleanField(blank=True, default=False, help_text='Improve ranking of this content.'),
),
]
| UTF-8 | Python | false | false | 452 | py | 262 | 0009_textfilet_push.py | 198 | 0.615044 | 0.54646 | 0 | 18 | 24.111111 | 111 |
cadeef/bear-exporter | 14,697,378,120,972 | 3e44032794fecc9a514ae8d24a95f156bce4f57c | c7d97f228f78579d87be5f41a43e6688039095f7 | /bear_exporter/note.py | 575a352b2d81cabe23339c5021ca3af91df9957e | [] | no_license | https://github.com/cadeef/bear-exporter | c7201f62bfe9f076c1beccd47c999ecb87078130 | ed3825c05c51776c2a091a9156e5ea4c76774bad | refs/heads/main | 2023-02-04T09:07:49.066802 | 2020-12-26T03:22:34 | 2020-12-26T03:22:34 | 296,979,565 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from peewee import AutoField, BlobField, BooleanField, CharField, IntegerField
from bear_exporter.base import AppleTimestampField, BaseModel
class Note(BaseModel):
archived = IntegerField(column_name="ZARCHIVED", null=True)
archived_date = AppleTimestampField(column_name="ZARCHIVEDDATE", null=True)
conflict_uuid = CharField(column_name="ZCONFLICTUNIQUEIDENTIFIER", null=True)
conflict_uuid_date = AppleTimestampField(
column_name="ZCONFLICTUNIQUEIDENTIFIERDATE", null=True
)
creation_date = AppleTimestampField(column_name="ZCREATIONDATE", null=True)
encrypted = IntegerField(column_name="ZENCRYPTED", null=True)
encrypted_data = BlobField(column_name="ZENCRYPTEDDATA", null=True)
encryption_uuid = CharField(column_name="ZENCRYPTIONUNIQUEIDENTIFIER", null=True)
folder = IntegerField(column_name="ZFOLDER", index=True, null=True)
# has_files = IntegerField(column_name='ZHASFILES', null=True)
has_files = BooleanField(column_name="ZHASFILES", null=True)
has_images = IntegerField(column_name="ZHASIMAGES", null=True)
has_source_code = IntegerField(column_name="ZHASSOURCECODE", null=True)
last_editing_device = CharField(column_name="ZLASTEDITINGDEVICE", null=True)
locked = IntegerField(column_name="ZLOCKED", null=True)
locked_date = AppleTimestampField(column_name="ZLOCKEDDATE", null=True)
modification_date = AppleTimestampField(column_name="ZMODIFICATIONDATE", null=True)
order = IntegerField(column_name="ZORDER", null=True)
order_date = AppleTimestampField(column_name="ZORDERDATE", null=True)
password = IntegerField(column_name="ZPASSWORD", index=True, null=True)
deleted = IntegerField(column_name="ZPERMANENTLYDELETED", null=True)
pinned = IntegerField(column_name="ZPINNED", null=True)
pinned_date = AppleTimestampField(column_name="ZPINNEDDATE", null=True)
server_data = IntegerField(column_name="ZSERVERDATA", index=True, null=True)
shown_in_today_widget = IntegerField(column_name="ZSHOWNINTODAYWIDGET", null=True)
skip_sync = IntegerField(column_name="ZSKIPSYNC", null=True)
subtitle = CharField(column_name="ZSUBTITLE", null=True)
text = CharField(column_name="ZTEXT", null=True)
title = CharField(column_name="ZTITLE", null=True)
todo_completed = IntegerField(column_name="ZTODOCOMPLETED", null=True)
todo_in_completed = IntegerField(column_name="ZTODOINCOMPLETED", null=True)
trashed = BooleanField(column_name="ZTRASHED", null=True)
trashed_date = AppleTimestampField(column_name="ZTRASHEDDATE", null=True)
uuid = CharField(column_name="ZUNIQUEIDENTIFIER", null=True)
vector_clock = BlobField(column_name="ZVECTORCLOCK", null=True)
z_ent = IntegerField(column_name="Z_ENT", null=True)
z_opt = IntegerField(column_name="Z_OPT", null=True)
id = AutoField(column_name="Z_PK", null=True)
class Meta:
table_name = "ZSFNOTE"
def has_includes(self) -> bool:
"""Does note have any included files (or images)?"""
return self.has_images or self.has_files
# def files(self) -> List[File]:
# """Returns a List of files associated with the note"""
# if not self.has_includes():
# return []
# fc = FileCollection(self.db)
# return fc.filter(by="note_id", match=self.id)
def export(self):
pass
| UTF-8 | Python | false | false | 3,367 | py | 14 | note.py | 12 | 0.71785 | 0.71785 | 0 | 64 | 51.609375 | 87 |
Starou/Python-Stub-Server | 15,418,932,639,885 | c18049149a77a8289bdc154f43bcb596ce1d7ae6 | dd33ba6002a0b9740a45a8260ea672479820f91c | /setup.py | d03a4bed3e8d159fe9b24ca58acc530d07724f3a | [
"BSD-2-Clause-Views"
] | permissive | https://github.com/Starou/Python-Stub-Server | ff870f016e7429760afc6f9d4e7735cc3e2351e1 | 7e5451d091aa8f8990a81a6c0d2326b1ce2fac7c | refs/heads/master | 2021-01-16T20:38:14.212499 | 2015-02-12T15:52:21 | 2015-02-12T15:52:21 | 30,700,494 | 0 | 0 | null | true | 2015-02-12T11:59:46 | 2015-02-12T11:59:46 | 2014-10-30T13:52:51 | 2013-06-11T20:50:23 | 137 | 0 | 0 | 0 | null | null | null | #!/usr/bin/env python
from distutils.core import setup
setup(name='stubserver',
version='0.3.1',
description='''A stub webserver used to enable blackbox testing of applications that call external web urls.
For example, an application that consumes data from an external REST api. The usage pattern is intended to be very
much like using a mock framework.''',
author='Chris Tarttelin and Point 2 inc',
author_email='chris@pyruby.co.uk',
url='http://www.pyruby.com/pythonstubserver',
packages=['stubserver'],
)
| UTF-8 | Python | false | false | 567 | py | 2 | setup.py | 2 | 0.691358 | 0.684303 | 0 | 14 | 39.5 | 121 |
lyy8762/rabbit | 15,650,860,829,309 | 582b5b54cb139c607588902de1723cd275b523ac | 6fed38f6a1b3fd7ac563b45b0afdaa27869966b4 | /demo/Demo/Lib/Project.py | 6f92ac00cf5e9ec1346d13c3afba860fa1deb6f2 | [] | no_license | https://github.com/lyy8762/rabbit | f1161002555c1906405a765a3656ed343a9b74d2 | 5bc95cb25feba70fb4f346b7c1e766cad25dd7fd | refs/heads/master | 2019-07-22T17:40:16.061643 | 2018-12-13T16:21:14 | 2018-12-13T16:21:14 | 36,235,987 | 0 | 1 | null | false | 2015-05-26T01:29:06 | 2015-05-25T14:34:18 | 2015-05-25T14:37:54 | 2015-05-26T01:29:06 | 0 | 0 | 1 | 0 | HTML | null | null | # -*- coding: utf-8 -*-
from Common import *
#*****************************************************
# 作者 : David
# 版本 : 1.0
# 功能说明: 取DB中的数据或固定值
# 参数说明:
#*****************************************************
def AppendCompareData(interfaceName, dbSheetData, i, num):
print interfaceName
cmd="sql="+interfaceName+"_GetXlsSql(dbSheetData, i)"
exec cmd
# print GetDBkeyInfos(sql, 0, num)
# print type(GetDBkeyInfos(sql, 0, num))
# print CodeChange(sql)
# return [CodeChange(GetDBkeyInfos(sql, 0, num)), CodeChange(sql)]
def ResultcodeCompare(code, xmlstring):
result_code=et.fromstring(xmlstring).find("result_code").text
if result_code!=code:
compareResult=CompairResult("FAIL")
return compareResult, "-1"
else:
compareResult=CompairResult("PASS")
return compareResult, "0"
#*****************************************************
# 作者 : David
# 版本 : 1.0
# 功能说明: 根据 sheet名,xml中取出的值与DB取出的值(或固定值)做比对
# 参数说明:
#*****************************************************
def SheetCompare(interfaceName, caseId, DataPath, sheetName, xmlobj,url,num):
global checkpointLosNum, checkpointSucNum
InterfaceDetailFlag=0
# xmlSheetData=ImportExcelData(xmlDataPath,sheetName)
SheetData=ImportDBXmlExcelData(DataPath,sheetName)
for i in range(len(SheetData[0])):
xmldata=AppendXmlData(SheetData[0], xmlobj, i, num)
dbdata=AppendCompareData(interfaceName, SheetData[1], i, num)
print xmldata[0], dbdata[0]
dbdata[0]=CodeChange(dbdata[0])
dbdata[1]=CodeChange(dbdata[1])
if xmldata[0] in ("None","", "0", CodeChange("无")) and dbdata[0] in ("None", "0", ""):
checkpointSucNum=checkpointSucNum+1
elif xmldata[0] != dbdata[0]:
checkpointLosNum=checkpointLosNum+1
InterfaceDetailFlag=-1
# WriteReportDetail(sheetName,xmldata[1], xmldata[0],dbdata[0],"FAILED",url)
print xmldata[0],dbdata[0]
WriteReportDetails(interfaceName, caseId, sheetName,xmldata[1], str(xmldata[0])[:240],str(dbdata[0])[:240],"FAILED",url, str(dbdata[1])[:240])
log.Output(sheetName+"的XML中 ["+xmldata[1]+"] 的值为: ["+xmldata[0]+"] ,数据库查询对应的值为: ["+dbdata[0]+"] ,不符合条件。对应的url为"+CodeChange(url)+"对应的sql为"+dbdata[1], "error")
else:
# WriteReportDetails(sheetName,xmldata[1], xmldata[0],dbdata[0],"PASS",url,dbdata[1])
checkpointSucNum=checkpointSucNum+1
# log.Output(sheetName+"的XML中 ["+xmldata[1]+"] 的值为: ["+xmldata[0]+"] ,数据库查询对应的值为: ["+dbdata[0]+"] ,符合条件。对应的url为"+url+"对应的sql为"+dbdata[1])
print checkpointLosNum, checkpointSucNum
return [checkpointLosNum, checkpointSucNum, InterfaceDetailFlag]
#*****************************************************
# 作者 : David
# 版本 : 1.0
# 功能说明: 特定比较后调用函数写统计数据
# 参数说明:
#*****************************************************
def CompairResult(result):
global checkpointLosNum, checkpointSucNum
InterfaceDetailFlag=0
if result=="FAIL":
checkpointLosNum=checkpointLosNum+1
InterfaceDetailFlag=-1
else:
checkpointSucNum=checkpointSucNum+1
print checkpointLosNum, checkpointSucNum
return [checkpointLosNum, checkpointSucNum, InterfaceDetailFlag]
#*****************************************************
# 作者 : David
# 版本 : 1.0
# 功能说明: content_book_list
# 参数说明:
#*****************************************************
def content_book_list(interfaceName, caseId,DataPath, xmlstring,url):
Runflag=0
try:
categoryNum=int(et.fromstring(xmlstring).find("size").text)
except:
categoryNum=0
if categoryNum==0:
compareResult=CompairResult("FAIL")
if compareResult[2]==-1:
Runflag=-1
else:
for tmpCategoryNum in range(categoryNum):
try:
xmlobj= et.fromstring(xmlstring).find("books").findall("book")[tmpCategoryNum]
except:
xmlobj=et.fromstring(xmlstring)
if myfilter=="3":
compareResult=SheetCompare(interfaceName, caseId, DataPath,"book", xmlobj,url,tmpCategoryNum)
elif myfilter=="9":
compareResult=SheetCompare(interfaceName, caseId, DataPath,"book1", xmlobj,url,tmpCategoryNum)
else:
compareResult=SheetCompare(interfaceName, caseId, DataPath, "book2", xmlobj,url,tmpCategoryNum)
if compareResult[2]==-1:
Runflag=-1
return compareResult, Runflag
def content_book_list_GetXlsSql(EecelData, i):
strsql=EecelData[i]
if "myid" in strsql:
strsql=strsql.replace("myid",str(myid))
if "myfrom" in strsql:
strsql=strsql.replace("myfrom",str(myfrom))
if "mysize" in strsql:
strsql=strsql.replace("mysize",str(mysize))
if "myid" in strsql:
strsql=strsql.replace("myfilter",str(myfilter))
return strsql
def content_book_list_GetXlsVa(TCRes, i):
global myid, myfilter, myfrom, mysize
if TCRes.Fields(i).Name=="listid":
myid=TCRes.Fields(i).value
if TCRes.Fields(i).Name=="from":
myfrom=TCRes.Fields(i).value
if TCRes.Fields(i).Name=="size":
mysize=TCRes.Fields(i).value
if TCRes.Fields(i).Name=="filter_by_progress":
myfilter=TCRes.Fields(i).value
#*****************************************************
# 作者 : David
# 版本 : 1.0
# 功能说明: content_category_list
# 参数说明:
#*****************************************************
def content_category_list(interfaceName,caseId, DataPath, xmlstring,url):
Runflag=0
# sql="select count(*) from book_classify"
# categoryNum=int(GetDBkeyInfo(sql))
# if categoryNum==0:
# compareResult=SheetCompare(interfaceName, caseId, DataPath, "content_category_list", xmlobj,url,0)
# else:
# for tmpCategoryNum in range(categoryNum):
# xmlobj= et.fromstring(xmlstring).find("categorys")
# compareResult=SheetCompare(interfaceName, caseId, DataPath, "content_category_list", xmlobj,url,tmpCategoryNum)
# if compareResult[2]==-1:
# Runflag=-1
# return compareResult, Runflag
def content_category_list_GetXlsSql(EecelData, i):
strsql=EecelData[i]
if "myid" in strsql:
strsql=strsql.replace("myid",str(myid))
return strsql
def content_category_list_GetXlsVa(TCRes, i):
global myid
if TCRes.Fields(i).Name=="listid":
myid=TCRes.Fields(i).value
#*****************************************************
# 作者 : David
# 版本 : 1.0
# 功能说明: content_sub_category_list
# 参数说明:
#*****************************************************
def content_sub_category_list(interfaceName,caseId, DataPath, xmlstring,url):
Runflag=0
sql="select count(*) from book_classify_sub where category_id ="+myid
categoryNum=int(GetDBkeyInfo(sql))
if categoryNum==0:
xmlobj= et.fromstring(xmlstring).find("subcategorys")
compareResult=SheetCompare(interfaceName, caseId, DataPath, "content_sub_category_list", xmlobj,url,0)
else:
for tmpCategoryNum in range(categoryNum):
xmlobj= et.fromstring(xmlstring).find("subcategorys")
compareResult=SheetCompare(interfaceName, caseId, DataPath, "content_sub_category_list", xmlobj,url,tmpCategoryNum)
if compareResult[2]==-1:
Runflag=-1
return compareResult, Runflag
def content_sub_category_list_GetXlsSql(EecelData, i):
strsql=EecelData[i]
if "myid" in strsql:
strsql=strsql.replace("myid",str(myid))
return strsql
def content_sub_category_list_GetXlsVa(TCRes, i):
global myid
if TCRes.Fields(i).Name=="category_id":
myid=TCRes.Fields(i).value
#*****************************************************
# 作者 : David
# 版本 : 1.0
# 功能说明: content_book_list
# 参数说明:
#*****************************************************
def content_category_book_list(interfaceName, caseId,DataPath, xmlstring,url):
Runflag=0
try:
categoryNum=int(et.fromstring(xmlstring).find("size").text)
except:
categoryNum=0
if categoryNum==0:
compareResult=CompairResult("FAIL")
if compareResult[2]==-1:
Runflag=-1
else:
for tmpCategoryNum in range(categoryNum):
try:
xmlobj= et.fromstring(xmlstring).find("books").findall("book")[tmpCategoryNum]
except:
xmlobj=et.fromstring(xmlstring)
if myfilter=="3":
compareResult=SheetCompare(interfaceName, caseId, DataPath,"book", xmlobj,url,tmpCategoryNum)
elif myfilter=="9":
compareResult=SheetCompare(interfaceName, caseId, DataPath,"book1", xmlobj,url,tmpCategoryNum)
else:
compareResult=SheetCompare(interfaceName, caseId, DataPath, "book2", xmlobj,url,tmpCategoryNum)
if compareResult[2]==-1:
Runflag=-1
return compareResult, Runflag
def content_category_book_list_GetXlsSql(EecelData, i):
strsql=EecelData[i]
if "myid" in strsql:
strsql=strsql.replace("myid",str(myid))
if "myfrom" in strsql:
strsql=strsql.replace("myfrom",str(myfrom))
if "mysize" in strsql:
strsql=strsql.replace("mysize",str(mysize))
if "myid" in strsql:
strsql=strsql.replace("myfilter",str(myfilter))
return strsql
def content_category_book_list_GetXlsVa(TCRes, i):
global myid, myfilter, myfrom, mysize
if TCRes.Fields(i).Name=="category_id":
myid=TCRes.Fields(i).value
if TCRes.Fields(i).Name=="from":
myfrom=TCRes.Fields(i).value
if TCRes.Fields(i).Name=="size":
mysize=TCRes.Fields(i).value
if TCRes.Fields(i).Name=="filter_by_progress":
myfilter=TCRes.Fields(i).value
#*****************************************************
# 作者 : David
# 版本 : 1.0
# 功能说明: content_bookinfo
# 参数说明:
#*****************************************************
def content_bookinfo(interfaceName,caseId, DataPath, xmlstring,url):
Runflag=0
xmlobj= et.fromstring(xmlstring)
compareResult=SheetCompare(interfaceName, caseId, DataPath, "book", xmlobj,url,0)
if compareResult[2]==-1:
Runflag=-1
return compareResult, Runflag
def content_bookinfo_GetXlsSql(EecelData, i):
strsql=EecelData[i]
if "myid" in strsql:
strsql=strsql.replace("myid",str(myid))
return strsql
def content_bookinfo_GetXlsVa(TCRes, i):
global myid
if TCRes.Fields(i).Name=="book_id":
myid=TCRes.Fields(i).value
#*****************************************************
# 作者 : David
# 版本 : 1.0
# 功能说明: content_bookinfo
# 参数说明:
#*****************************************************
def content_bookinfo_list(interfaceName,caseId, DataPath, xmlstring,url):
Runflag=0
# categoryNum=len(myid.split(","))
# if categoryNum==0:
# try:
# xmlobj= et.fromstring(xmlstring).find("books").findall("entry")[tmpCategoryNum]
# except:
# xmlobj=et.fromstring(xmlstring)
# compareResult=SheetCompare(interfaceName, caseId, DataPath, "book", xmlobj,url,0)
# if compareResult[2]==-1:
# Runflag=-1
# else:
# for tmpCategoryNum in range(categoryNum):
# try:
# xmlobj= et.fromstring(xmlstring).find("books").findall("entry")[tmpCategoryNum]
# except:
# xmlobj=et.fromstring(xmlstring)
# compareResult=SheetCompare(interfaceName, caseId, DataPath, "book", xmlobj,url,tmpCategoryNum)
# if compareResult[2]==-1:
# Runflag=-1
#
# return compareResult, Runflag
def content_bookinfo_list_GetXlsSql(EecelData, i):
strsql=EecelData[i]
if "myid" in strsql:
strsql=strsql.replace("myid",str(myid))
return strsql
def content_bookinfo_list_GetXlsVa(TCRes, i):
global myid
if TCRes.Fields(i).Name=="bookIdList":
myid=TCRes.Fields(i).value
#*****************************************************
# 作者 : David
# 版本 : 1.0
# 功能说明: content_bookinfo
# 参数说明:
#*****************************************************
def content_volume_list(interfaceName,caseId, DataPath, xmlstring,url):
Runflag=0
try:
categoryNum=int(et.fromstring(xmlstring).find("size").text)
except:
categoryNum=0
if categoryNum==0:
try:
xmlobj= et.fromstring(xmlstring).find("volumes")
except:
xmlobj=et.fromstring(xmlstring)
compareResult=SheetCompare(interfaceName, caseId, DataPath, "volumes", xmlobj,url,0)
if compareResult[2]==-1:
Runflag=-1
else:
for tmpCategoryNum in range(categoryNum):
try:
xmlobj= et.fromstring(xmlstring).find("volumes")
except:
xmlobj=et.fromstring(xmlstring)
compareResult=SheetCompare(interfaceName, caseId, DataPath, "volumes", xmlobj,url,tmpCategoryNum)
if compareResult[2]==-1:
Runflag=-1
return compareResult, Runflag
def content_volume_list_GetXlsSql(EecelData, i):
strsql=EecelData[i]
if "myid" in strsql:
strsql=strsql.replace("myid",str(myid))
return strsql
def content_volume_list_GetXlsVa(TCRes, i):
global myid
if TCRes.Fields(i).Name=="book_id":
myid=TCRes.Fields(i).value
#*****************************************************
# 作者 : David
# 版本 : 1.0
# 功能说明: content_bookinfo
# 参数说明:
#*****************************************************
def content_book_chapter_list(interfaceName,caseId, DataPath, xmlstring,url):
Runflag=0
try:
categoryNum=int(et.fromstring(xmlstring).find("size").text)
except:
categoryNum=0
if categoryNum==0:
compareResult=CompairResult("PASS")
else:
for tmpCategoryNum in range(categoryNum):
try:
xmlobj= et.fromstring(xmlstring).find("chapters")
except:
xmlobj=et.fromstring(xmlstring)
compareResult=SheetCompare(interfaceName, caseId, DataPath, "content_chapterinfo", xmlobj,url,tmpCategoryNum)
if compareResult[2]==-1:
Runflag=-1
return compareResult, Runflag
def content_book_chapter_list_GetXlsSql(EecelData, i):
strsql=EecelData[i]
if "myid" in strsql:
strsql=strsql.replace("myid",str(myid))
if "myfrom" in strsql:
strsql=strsql.replace("myfrom",str(myfrom))
return strsql
def content_book_chapter_list_GetXlsVa(TCRes, i):
global myid, myfrom
if TCRes.Fields(i).Name=="book_id":
myid=TCRes.Fields(i).value
print TCRes.Fields(i).Name
if TCRes.Fields(i).Name=="from":
if TCRes.Fields(i).value<>" " and TCRes.Fields(i).value<>None:
myfrom=TCRes.Fields(i).value
else:
myfrom=0
#*****************************************************
# 作者 : David
# 版本 : 1.0
# 功能说明: content_author_book_list
# 参数说明:
#*****************************************************
def content_author_book_list(interfaceName,caseId, DataPath, xmlstring,url):
Runflag=0
try:
categoryNum=int(et.fromstring(xmlstring).find("size").text)
except:
categoryNum=0
if categoryNum==0:
compareResult=CompairResult("PASS")
if compareResult[2]==-1:
Runflag=-1
else:
for tmpCategoryNum in range(categoryNum):
try:
xmlobj= et.fromstring(xmlstring).find("books").findall("book")[tmpCategoryNum]
except:
xmlobj=et.fromstring(xmlstring)
compareResult=SheetCompare(interfaceName, caseId, DataPath, "book", xmlobj,url,tmpCategoryNum)
if compareResult[2]==-1:
Runflag=-1
return compareResult, Runflag
def content_author_book_list_GetXlsSql(EecelData, i):
strsql=EecelData[i]
if "myid" in strsql:
strsql=strsql.replace("myid",str(myid))
if "myfrom" in strsql:
strsql=strsql.replace("myfrom",str(myfrom))
if "mysize" in strsql:
strsql=strsql.replace("mysize",str(mysize))
return strsql
def content_author_book_list_GetXlsVa(TCRes, i):
global myid,myfrom, mysize
if TCRes.Fields(i).Name=="author_id":
myid=TCRes.Fields(i).value
if TCRes.Fields(i).Name=="from":
myfrom=TCRes.Fields(i).value
if TCRes.Fields(i).Name=="size":
mysize=TCRes.Fields(i).value
| UTF-8 | Python | false | false | 17,264 | py | 30 | Project.py | 19 | 0.578897 | 0.569229 | 0 | 458 | 35.556769 | 169 |
srchamberlain/onda | 25,425 | f0e017c74d3fcaedf9a69e6ff21a7d3184e3631c | eb5614eb7089679f65be7c5a8f9748ce51087805 | /data_extraction_layer/instrument_layer/pilatus_petra.py | 61a72d1c85a3bd9c54bbbd9bacecbf855f4cc5dd | [] | no_license | https://github.com/srchamberlain/onda | 92bcee63533f0249a3feea3f7dda8ce8a060738e | 803271a5be3e98937541068692adfef96b14c00c | refs/heads/master | 2021-01-16T23:11:14.180702 | 2016-09-25T15:32:57 | 2016-09-25T15:32:57 | 64,957,050 | 0 | 0 | null | true | 2016-08-04T18:11:55 | 2016-08-04T18:11:54 | 2016-07-06T09:41:35 | 2016-07-29T15:32:00 | 342 | 0 | 0 | 0 | null | null | null | # This file is part of OnDA.
#
# OnDA is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OnDA is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with OnDA. If not, see <http://www.gnu.org/licenses/>.
import datetime
import scipy.constants
slab_shape = (2527, 2463)
native_shape = (2527, 2463)
def raw_data(evt):
return evt['filehandle'].data
def num_events_in_file(_):
return 1
def timestamp(evt):
header_data_list = evt['filehandle'].header[u'_array_data.header_contents'].split('\r\n')
return datetime.datetime.strptime(header_data_list[1], '# %Y-%m-%dT%H:%M:%S.%f')
def beam_energy(evt):
try:
header_data_list = evt['filehandle'].header[u'_array_data.header_contents'].split('\r\n')
wavelength = float(header_data_list[15].split()[2])
return float(scipy.costants.h * scipy.constants.c / (wavelength * scipy.constants.electron_volt))
except Exception:
return float(evt['monitor_params']['General']['fallback_beam_energy'])
def detector_distance(evt):
try:
header_data_list = evt['filehandle'].header[u'_array_data.header_contents'].split('\r\n')
return float(header_data_list[16].split()[2])
except Exception:
return float(evt['monitor_params']['General']['fallback_detector_distance'])
def filename_and_event(evt):
return evt['filename'], 0
| UTF-8 | Python | false | false | 1,825 | py | 35 | pilatus_petra.py | 25 | 0.68274 | 0.668493 | 0 | 55 | 32.181818 | 105 |
slimat/spl-meter-with-RPi | 11,536,282,180,202 | cc58b6e91bbf697e788ea5f5a06512e323d5fcac | 1b82f769bd8782379c9c27e6d118ed46f69cf97e | /testLCD_record5.py | 61df7d16730a24dadf5265191dc00496d4c0e726 | [
"MIT"
] | permissive | https://github.com/slimat/spl-meter-with-RPi | 499486ddb6c92a6bfbecbfa4181af7c056bcf8a0 | 781c50b1985ed0e3dc6ff09e48059be95659ba8c | refs/heads/master | 2021-01-06T09:24:16.984014 | 2020-01-31T04:27:43 | 2020-01-31T04:27:43 | 241,278,595 | 0 | 0 | null | true | 2020-02-18T05:15:50 | 2020-02-18T05:15:50 | 2020-01-31T04:27:45 | 2020-01-31T04:27:43 | 6,463 | 0 | 0 | 0 | null | false | false | #!/usr/bin/env python
import errno
import spl_lib as spl
from scipy.signal import lfilter
import numpy
import pyaudio
import wave
import numpy as np
import os, time
import RPi.GPIO as GPIO
import sys
from threading import Timer
import smbus2 as smbus
import subprocess
import concurrent.futures as cf
def get_path(base, tail, head=''):
return os.path.join(base, tail) if head == '' else get_path(head, get_path(base, tail)[1:])
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
HTML_PATH = get_path(BASE_DIR, 'html/main.html', 'file:///')
SINGLE_DECIBEL_FILE_PATH = get_path(BASE_DIR, 'decibel_data/single_decibel.txt')
MAX_DECIBEL_FILE_PATH = get_path(BASE_DIR, 'decibel_data/max_decibel.txt')
'''
Listen to mic
'''
def is_meaningful(old, new):
return abs(old - new) > 3
def update_text(path, content):
try:
f = open(path, 'w')
except IOError as e:
print(e)
else:
f.write(content)
f.close()
def click(id):
driver.find_element_by_id(id).click()
def open_html(path):
driver.get(path)
def update_max_if_new_is_larger_than_max(new, max):
print("update_max_if_new_is_larger_than_max called")
if new > max:
print("max observed")
update_text(MAX_DECIBEL_FILE_PATH, 'MAX: {:.2f} dBA'.format(new))
click('update_max_decibel')
return new
else:
return max
shutdownPin = 6;
resetPin = 5;
i2c = smbus.SMBus(1) # 1 is bus number
addr02=0x3e #lcd
_command=0x00
_data=0x40
_clear=0x01
_home=0x02
display_On=0x0f
LCD_2ndline=0x40+0x80
time.sleep(1)
#LCD AQM0802/1602
def command( code ):
i2c.write_byte_data(addr02, _command, code)
#time.sleep(0.1)
def writeLCD( message ):
mojilist=[]
for moji in message:
mojilist.append(ord(moji))
i2c.write_i2c_block_data(addr02, _data, mojilist)
#time.sleep(0.1)
#GPIO.setmode(GPIO.BCM)
# GPIO19 : reset button
#GPIO.setup(19, GPIO.IN, pull_up_down = GPIO.PUD_UP)
# GPIO26 : shutdown button
#GPIO.setup(26, GPIO.IN, pull_up_down = GPIO.PUD_UP)
def shutdown(channel):
os.system("sudo shutdown -h now")
def reboot(channel):
os.system("sudo reboot")
def init():
command(0x38)
command(0x39)
command(0x14)
command(0x73)
command(0x56)
command(0x6c)
command(0x38)
command(_clear)
command(display_On)
command(0x0c)
#usleep(39)
#main
init ()
command(_clear)
writeLCD("RMS: ")
fs = 16000
channel = 1
counter = 0
size = 2**13
#size = 9600
#size = 4000
NUMERATOR, DENOMINATOR = spl.A_weighting(fs)
audio = pyaudio.PyAudio()
def adCallback(in_data, frame_count, time_info, status):
data.append(in_data)
global buf
buf = np.frombuffer(in_data, dtype = "int16")/32768.0
return(None, pyaudio.paContinue)
stream = audio.open(format = pyaudio.paInt16,
channels = int(channel),
rate = int(fs),
input = True,
#frames_per_buffer = size,
frames_per_buffer = size)
#stream_callback = adCallback)
def listen(old=0, error_count=0, min_decibel=100, max_decibel=0):
#print("Listening")
#while True:
try:
## read() returns string. You need to decode it into an array later.
block = stream.read(size, exception_on_overflow = False)
except IOError as e:
error_count += 1
print(" (%d) Error type is: %s" % (error_count, e))
else:
## Int16 is a numpy data type which is Integer (-32768 to 32767)
## If you put Int8 or Int32, the result numbers will be ridiculous
decoded_block = numpy.fromstring(block, 'Int16')
## This is where you apply A-weighted filter
y = lfilter(NUMERATOR, DENOMINATOR, decoded_block)
new_decibel = 20*numpy.log10(spl.rms_flat(y))
if is_meaningful(old, new_decibel):
old = new_decibel
print('A-weighted: {:+.2f} dB'.format(new_decibel))
#update_text(SINGLE_DECIBEL_FILE_PATH, '{:.2f} dBA'.format(new_decibel))
#max_decibel = update_max_if_new_is_larger_than_max(new_decibel, max_decibel)
#click('update_decibel')
return round(new_decibel, 1)
#stream.stop_stream()
#stream.close()
#pa.terminate()
def rmscalic(sound):
soundSuper = type(sound)
print("type(soundSuper) : " + str(soundSuper))
data_dummy = []
for i in sound:
data_dummy.append(i)
print("type(data_dummy) : " + str(type(data_dummy)))
global rmslevel
##### make window function ####
window = np.hamming(len(data_dummy))
##### multiply variable sound with window function ####
windowedSound = data_dummy * window
#### FFT ####
spectrum = np.fft.fft(windowedSound)
#### calc Amplitude spectrum ####
Amp_spectrum = np.abs(spectrum)
#### calc xLabel ####
freqList = np.arange(0, len(data_dummy), 1.0) * fs/len(data_dummy)
#### plot Amp_spectrum ####
# plt.plot(freqList, Amp_spectrum)
#### calc rms ####
rmslevel = np.round(20*np.log10(np.sqrt(np.mean((np.square(data_dummy))))), 1)
if __name__ == '__main__':
executor = cf.ThreadPoolExecutor(max_workers = 4)
data = []
buf = []
#stream.start_stream()
print("Recording.")
time.sleep(1)
GPIO.setmode(GPIO.BCM)
GPIO.setup(resetPin, GPIO.IN, pull_up_down = GPIO.PUD_UP)
GPIO.setup(shutdownPin, GPIO.IN, pull_up_down = GPIO.PUD_UP)
GPIO.add_event_detect(resetPin, GPIO.FALLING, callback = reboot, bouncetime = 2000)
GPIO.add_event_detect(shutdownPin, GPIO.FALLING, callback = shutdown, bouncetime = 2000)
#print(spl_meter_text.listen())
#spl_meter_text.listen()
while 1:
executor.submit(command(LCD_2ndline)) # display at 2nd line of display
#print(listen())
#print(spl_meter_text.listen())
#listen()
# executor.submit(rmscalic(buf)) # display at 2nd line of display
#res = subprocess.check_output(['vcgencmd','measure_temp'])
#print(str(rmslevel) + " dB") # display rms level on big display
#print(str(rmscalic(buf)) + " dB") # display rms level on big display
#executor.submit(writeLCD(str(rmslevel)+' dB')) # display rms level on small display
executor.submit(writeLCD(str(listen())+'dBA')) # display rms level on small display
time.sleep(0.5)
if (GPIO.event_detected(resetPin)):
break
# print(resetPin)
#GPIO.add_event_detect(resetPin, GPIO.FALLING, callback = reboot, bouncetime = 1000)
if (GPIO.event_detected(shutdownPin)):
break
# GPIO.add_event_detect(shutdownPin, GPIO.FALLING, callback = shutdown, bouncetime = 1000)
time.sleep(1);
#GPIO.cleanup()
GPIO.cleanup()
| UTF-8 | Python | false | false | 6,891 | py | 1 | testLCD_record5.py | 1 | 0.61515 | 0.589319 | 0 | 233 | 28.575107 | 101 |
etsaf/textbook-solutions | 4,303,557,238,480 | feb63183a272cc26f8efa82bb68abbde046f5c6b | 411487aeac7b75acf970d8ef5a13003b3b8e658b | /5.3.1.py | 791a7ef6fe0d38da022e0724163b601324809761 | [] | no_license | https://github.com/etsaf/textbook-solutions | a4115559fe2a14419ef7e96d331833eb40eea009 | 3034d8088527191084dfd4b5a69495f990b9f585 | refs/heads/main | 2023-07-16T11:50:13.626606 | 2021-08-18T19:29:41 | 2021-08-18T19:29:41 | 397,710,796 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | def flipBit(x):
if (~x) == 0:
return len(list(str(x)))
prevLen = 0
currLen = 0
ans = 0
while x != 0:
if (x & 1) == 1:
prevZ = False
currLen += 1
else:
if x % 2 == 0:
prevLen = 0
else:
prevLen = currLen
currlen = 0
ans = max(prevLen + currLen + 1, ans)
x = x >> 1
return ans | UTF-8 | Python | false | false | 429 | py | 52 | 5.3.1.py | 51 | 0.375291 | 0.342657 | 0 | 19 | 21.631579 | 45 |
AdamArutyunov/ParlAI | 3,513,283,282,567 | 9843f89ea60a5b6e3a385d065646a59df87f041d | a608c62e3fe08f37db8422db821c59cc73062e44 | /tests/nightly/gpu/test_unlikelihood.py | 8d990b67c67666a1cd955bcd4257dbc40fff3d5a | [
"MIT"
] | permissive | https://github.com/AdamArutyunov/ParlAI | 48cacd38f17f87c1da98c79fe19a8f72d127cc8d | 06258ede9a7461439420e817f55796bf352543c2 | refs/heads/master | 2022-12-21T10:27:59.723515 | 2020-09-22T14:26:17 | 2020-09-22T14:26:17 | 284,688,333 | 1 | 4 | MIT | true | 2020-09-06T12:01:43 | 2020-08-03T12:00:47 | 2020-08-07T11:04:38 | 2020-09-06T12:01:43 | 60,350 | 0 | 0 | 0 | Python | false | false | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import os
import unittest
import parlai.utils.testing as testing_utils
"""
Integration tests for the Dialogue Unlikelihood project
"""
FAST_MODE = True
NUM_EXAMPLES = 512 if FAST_MODE else -1
@testing_utils.skipUnlessGPU
class TestUnlikelihood(unittest.TestCase):
def test_train_model_repeat_ul(self):
"""
Check the training script doesn't crash.
"""
opt = {
'model': 'projects.dialogue_unlikelihood.agents:RepetitionUnlikelihoodAgent',
'load_from_checkpoint': False,
'task': 'convai2',
'max_train_time': 120,
'validation_max_exs': 128,
'batchsize': 16,
'truncate': 32,
'short_final_eval': True,
}
testing_utils.train_model(opt)
def test_train_model_vocab_ul(self):
"""
Check the training script doesn't crash.
"""
with testing_utils.tempdir() as tmpdir:
fp = os.path.join(tmpdir, "counts.txt")
with open(fp, "w") as f:
f.write(
'{"word": "test", "word_id": 0, "count": 1, "prob": 1, "cumprob": 1, "bin": "frequent"}'
)
opt = {
'model': 'projects.dialogue_unlikelihood.agents:TransformerSequenceVocabUnlikelihoodAgent',
'load_from_checkpoint': False,
'task': 'convai2',
'max_train_time': 120,
'validation_max_exs': 128,
'batchsize': 16,
'truncate': 32,
'short_final_eval': True,
'label_truncate': 256,
'counts_file': fp,
}
testing_utils.train_model(opt)
def test_repeat_convai_contextonly(self):
"""
Verify recorded ppl and F1 scores for released models.
"""
valid, _ = testing_utils.eval_model(
{
'model_file': 'zoo:dialogue_unlikelihood/rep_convai2_ctxt/model',
'model': 'projects.dialogue_unlikelihood.agents:RepetitionUnlikelihoodAgent',
'task': 'convai2',
'beam_size': 1,
'batchsize': 64,
'num_examples': NUM_EXAMPLES,
},
skip_test=True,
)
if FAST_MODE:
self.assertAlmostEqual(valid['ppl'], 11.88, delta=0.1)
self.assertAlmostEqual(valid['f1'], 0.2047, delta=0.0002)
else:
self.assertAlmostEqual(valid['ppl'], 11.76, delta=0.1)
self.assertAlmostEqual(valid['f1'], 0.1937, delta=0.0002)
def test_repeat_convai_labelonly(self):
"""
Verify recorded ppl and F1 scores for released models.
"""
valid, _ = testing_utils.eval_model(
{
'model_file': 'zoo:dialogue_unlikelihood/rep_convai2_label/model',
'model': 'projects.dialogue_unlikelihood.agents:RepetitionUnlikelihoodAgent',
'task': 'convai2',
'beam_size': 1,
'batchsize': 64,
'num_examples': NUM_EXAMPLES,
},
skip_test=True,
)
if FAST_MODE:
self.assertAlmostEqual(valid['ppl'], 11.46, delta=0.1)
self.assertAlmostEqual(valid['f1'], 0.2130, delta=0.0002)
else:
self.assertAlmostEqual(valid['ppl'], 11.42, delta=0.1)
self.assertAlmostEqual(valid['f1'], 0.2029, delta=0.0002)
def test_repeat_convai_contextandlabel(self):
"""
Verify recorded ppl and F1 scores for released models.
"""
valid, _ = testing_utils.eval_model(
{
'model_file': 'zoo:dialogue_unlikelihood/rep_convai2_ctxt_and_label/model',
'model': 'projects.dialogue_unlikelihood.agents:RepetitionUnlikelihoodAgent',
'task': 'convai2',
'beam_size': 1,
'batchsize': 64,
'num_examples': NUM_EXAMPLES,
},
skip_test=True,
)
if FAST_MODE:
self.assertAlmostEqual(valid['ppl'], 11.98, delta=0.1)
self.assertAlmostEqual(valid['f1'], 0.2034, delta=0.0002)
else:
self.assertAlmostEqual(valid['ppl'], 11.85, delta=0.1)
self.assertAlmostEqual(valid['f1'], 0.1925, delta=0.0002)
@unittest.skip
def test_repeat_eli5_contextonly(self):
"""
Verify recorded ppl and F1 scores for released models.
"""
valid, _ = testing_utils.eval_model(
{
'model_file': 'zoo:dialogue_unlikelihood/rep_eli5_ctxt/model',
'model': 'projects.dialogue_unlikelihood.agents:RepetitionUnlikelihoodAgent',
'task': 'eli5',
'beam_size': 1,
'batchsize': 64,
'num_examples': NUM_EXAMPLES,
},
skip_test=True,
)
if FAST_MODE:
self.assertAlmostEqual(valid['ppl'], 21.71, delta=0.1)
self.assertAlmostEqual(valid['f1'], 0.1629, delta=0.0002)
else:
self.assertAlmostEqual(valid['ppl'], 21.37, delta=0.1)
self.assertAlmostEqual(valid['f1'], 0.1628, delta=0.0002)
@unittest.skip
def test_repeat_eli5_labelonly(self):
"""
Verify recorded ppl and F1 scores for released models.
"""
valid, _ = testing_utils.eval_model(
{
'model_file': 'zoo:dialogue_unlikelihood/rep_eli5_label/model',
'model': 'projects.dialogue_unlikelihood.agents:RepetitionUnlikelihoodAgent',
'task': 'eli5',
'beam_size': 1,
'batchsize': 64,
'num_examples': NUM_EXAMPLES,
},
skip_test=True,
)
if FAST_MODE:
self.assertAlmostEqual(valid['ppl'], 21.71, delta=0.1)
self.assertAlmostEqual(valid['f1'], 0.1777, delta=0.0002)
else:
self.assertAlmostEqual(valid['ppl'], 21.39, delta=0.1)
self.assertAlmostEqual(valid['f1'], 0.1825, delta=0.0002)
@unittest.skip
def test_repeat_eli5_contextandlabel(self):
"""
Verify recorded ppl and F1 scores for released models.
"""
valid, _ = testing_utils.eval_model(
{
'model_file': 'zoo:dialogue_unlikelihood/rep_eli5_ctxt_and_label/model',
'model': 'projects.dialogue_unlikelihood.agents:RepetitionUnlikelihoodAgent',
'task': 'eli5',
'beam_size': 1,
'batchsize': 64,
'num_examples': NUM_EXAMPLES,
},
skip_test=True,
)
if FAST_MODE:
self.assertAlmostEqual(valid['ppl'], 22.13, delta=0.1)
self.assertAlmostEqual(valid['f1'], 0.1805, delta=0.0002)
else:
self.assertAlmostEqual(valid['ppl'], 21.80, delta=0.1)
self.assertAlmostEqual(valid['f1'], 0.1843, delta=0.0002)
def test_repeat_wiki_contextonly(self):
"""
Verify recorded ppl and F1 scores for released models.
"""
valid, _ = testing_utils.eval_model(
{
'model_file': 'zoo:dialogue_unlikelihood/rep_wiki_ctxt/model',
'model': 'projects.dialogue_unlikelihood.agents:RepetitionUnlikelihoodAgent',
'task': 'wizard_of_wikipedia:GeneratorTeacher',
'beam_size': 1,
'batchsize': 64,
'num_examples': NUM_EXAMPLES,
'prepend_gold_knowledge': True,
},
skip_test=True,
)
if FAST_MODE:
self.assertAlmostEqual(valid['ppl'], 8.698, delta=0.1)
self.assertAlmostEqual(valid['f1'], 0.3430, delta=0.0002)
else:
self.assertAlmostEqual(valid['ppl'], 8.761, delta=0.1)
self.assertAlmostEqual(valid['f1'], 0.3456, delta=0.0002)
def test_repeat_wiki_labelonly(self):
"""
Verify recorded ppl and F1 scores for released models.
"""
valid, _ = testing_utils.eval_model(
{
'model_file': 'zoo:dialogue_unlikelihood/rep_wiki_label/model',
'model': 'projects.dialogue_unlikelihood.agents:RepetitionUnlikelihoodAgent',
'task': 'wizard_of_wikipedia:GeneratorTeacher',
'beam_size': 1,
'batchsize': 64,
'num_examples': NUM_EXAMPLES,
'prepend_gold_knowledge': True,
},
skip_test=True,
)
if FAST_MODE:
self.assertAlmostEqual(valid['ppl'], 8.284, delta=0.1)
self.assertAlmostEqual(valid['f1'], 0.3744, delta=0.0002)
else:
self.assertAlmostEqual(valid['ppl'], 8.326, delta=0.1)
self.assertAlmostEqual(valid['f1'], 0.3714, delta=0.0002)
def test_repeat_wiki_contextandlabel(self):
"""
Verify recorded ppl and F1 scores for released models.
"""
valid, _ = testing_utils.eval_model(
{
'model_file': 'zoo:dialogue_unlikelihood/rep_wiki_ctxt_and_label/model',
'model': 'projects.dialogue_unlikelihood.agents:RepetitionUnlikelihoodAgent',
'task': 'wizard_of_wikipedia:GeneratorTeacher',
'beam_size': 1,
'batchsize': 64,
'num_examples': NUM_EXAMPLES,
'prepend_gold_knowledge': True,
},
skip_test=True,
)
if FAST_MODE:
self.assertAlmostEqual(valid['ppl'], 8.433, delta=0.1)
self.assertAlmostEqual(valid['f1'], 0.3614, delta=0.0002)
else:
self.assertAlmostEqual(valid['ppl'], 8.498, delta=0.1)
self.assertAlmostEqual(valid['f1'], 0.3582, delta=0.0002)
def test_vocab_alpha1e0(self):
"""
Verify recorded ppl and F1 scores for released models.
"""
valid, _ = testing_utils.eval_model(
{
'model_file': 'zoo:dialogue_unlikelihood/vocab_alpha1e0/model',
'model': 'projects.dialogue_unlikelihood.agents:TransformerSequenceVocabUnlikelihoodAgent',
'task': 'convai2',
'beam_size': 1,
'batchsize': 12,
'num_examples': NUM_EXAMPLES,
'skip_generation': False,
},
skip_test=True,
)
if FAST_MODE:
self.assertAlmostEqual(valid['ppl'], 11.26, delta=0.1)
self.assertAlmostEqual(valid['f1'], 0.2115, delta=0.0002)
else:
self.assertAlmostEqual(valid['ppl'], 11.42, delta=0.1)
self.assertAlmostEqual(valid['f1'], 0.2004, delta=0.0002)
def test_vocab_alpha1e1(self):
"""
Verify recorded ppl and F1 scores for released models.
"""
valid, _ = testing_utils.eval_model(
{
'model_file': 'zoo:dialogue_unlikelihood/vocab_alpha1e1/model',
'model': 'projects.dialogue_unlikelihood.agents:TransformerSequenceVocabUnlikelihoodAgent',
'task': 'convai2',
'beam_size': 1,
'batchsize': 12,
'num_examples': NUM_EXAMPLES,
'skip_generation': False,
},
skip_test=True,
)
if FAST_MODE:
self.assertAlmostEqual(valid['ppl'], 11.66, delta=0.1)
self.assertAlmostEqual(valid['f1'], 0.2118, delta=0.0002)
else:
self.assertAlmostEqual(valid['ppl'], 11.82, delta=0.1)
self.assertAlmostEqual(valid['f1'], 0.2009, delta=0.0002)
def test_vocab_alpha1e2(self):
"""
Verify recorded ppl and F1 scores for released models.
"""
valid, _ = testing_utils.eval_model(
{
'model_file': 'zoo:dialogue_unlikelihood/vocab_alpha1e2/model',
'model': 'projects.dialogue_unlikelihood.agents:TransformerSequenceVocabUnlikelihoodAgent',
'task': 'convai2',
'beam_size': 1,
'batchsize': 12,
'num_examples': NUM_EXAMPLES,
'skip_generation': False,
},
skip_test=True,
)
if FAST_MODE:
self.assertAlmostEqual(valid['ppl'], 12.38, delta=0.1)
self.assertAlmostEqual(valid['f1'], 0.1997, delta=0.0002)
else:
self.assertAlmostEqual(valid['ppl'], 12.48, delta=0.1)
self.assertAlmostEqual(valid['f1'], 0.1903, delta=0.0002)
def test_vocab_alpha1e3(self):
"""
Verify recorded ppl and F1 scores for released models.
"""
valid, _ = testing_utils.eval_model(
{
'model_file': 'zoo:dialogue_unlikelihood/vocab_alpha1e3/model',
'model': 'projects.dialogue_unlikelihood.agents:TransformerSequenceVocabUnlikelihoodAgent',
'task': 'convai2',
'beam_size': 1,
'batchsize': 12,
'num_examples': NUM_EXAMPLES,
'skip_generation': False,
},
skip_test=True,
)
if FAST_MODE:
self.assertAlmostEqual(valid['ppl'], 14.12, delta=0.1)
self.assertAlmostEqual(valid['f1'], 0.1872, delta=0.0002)
else:
self.assertAlmostEqual(valid['ppl'], 14.27, delta=0.1)
self.assertAlmostEqual(valid['f1'], 0.1734, delta=0.0002)
if __name__ == '__main__':
unittest.main()
| UTF-8 | Python | false | false | 13,845 | py | 32 | test_unlikelihood.py | 24 | 0.535428 | 0.494763 | 0 | 366 | 36.827869 | 108 |
AlonsoIbarra/Arquitectura-Publica-Suscribe | 11,261,404,268,065 | ac6d28fcb50c5506fcf7cc6a87d5cd93ee402556 | 2c3d33d6f0a401e385efaa75febf326f8b05288e | /contexto/Usuario.py | 333bd7f4ae5622e11d8488d8c74170a00ca7b10a | [] | no_license | https://github.com/AlonsoIbarra/Arquitectura-Publica-Suscribe | dc1eaa57072f1b02a68c6f25fed3449badb4d162 | 92d506b0d39cf6bf8ea57d422c1ab457ff5d0d28 | refs/heads/master | 2020-05-22T11:12:17.117140 | 2017-04-06T04:54:56 | 2017-04-06T04:54:56 | 84,692,944 | 1 | 1 | null | true | 2017-03-12T02:00:29 | 2017-03-12T02:00:29 | 2017-03-07T05:04:53 | 2017-03-07T05:04:51 | 9 | 0 | 0 | 0 | null | null | null | class Usuario():
idUsuario = -1
nombre = ""
contrasena = ""
tipo = 0
def __init__(self):
pass
| UTF-8 | Python | false | false | 123 | py | 31 | Usuario.py | 28 | 0.471545 | 0.455285 | 0 | 8 | 14.375 | 23 |
chybot/crawler | 17,798,344,509,404 | d27c791c20928306626c24e1791fea5083e8f3ef | 7254bcbd4c1d48c5fa3c818767af361a6bd7f651 | /qyxx_all/guangdongs/CrawlerSzxy.py | 4fc7ea61485783722e117316e58dd3ea3173e4fa | [] | no_license | https://github.com/chybot/crawler | 16d939fa30da4fc83d49a80fda1e7269182db2fe | f04300e329768eb38e97b992f9fe065fa300e13d | refs/heads/master | 2021-01-11T00:14:02.066929 | 2016-08-23T13:52:43 | 2016-08-23T13:52:43 | 70,574,730 | 1 | 0 | null | true | 2016-10-11T08:54:03 | 2016-10-11T08:54:02 | 2016-10-10T07:04:16 | 2016-08-23T13:54:51 | 1,659 | 0 | 0 | 0 | null | null | null | # -*- coding: utf-8 -*-
# Created by David on 2016/5/9.
import sys
import random
import time
import re
import urlparse
import PyV8
reload(sys)
from qyxx_all.CrawlerBase import CrawlerBase
from lxml import etree
from qyxx_all.ModuleManager import Module,Event,Iterator,Adapter,Bypass,Sleep
from qyxx_all.util.crawler_util import CrawlerRunMode, InputType, OutputType, EventType, OutputParameterShowUpType
class CrawlerSzxy(CrawlerBase):
def __init__(self, pinyin, crawler_master):
self.crawler_master = crawler_master
config_dict = dict()
config_dict[CrawlerRunMode.COMPANY_ADAPTER] = [self.initConfigBaseInfo, self.initDetailList, self.initNbList, self.initNb, self.initResultCollect]
CrawlerBase.__init__(self, pinyin, config_dict, None, None)
self.initConfig()
pass
def initConfigBaseInfo(self):
module = Module(self.crawler_master.visitJbxx, u"基本信息")
adapter = Adapter({"source": u"深圳信用网"}, u"深圳信用网")
module.addAdapter(adapter)
module.appendUrl("company_url")
module.appendHeaders({'Host': 'www.szcredit.com.cn',
'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:46.0) Gecko/20100101 Firefox/46.0',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-Language': 'zh-CN,zh;q=0.8,en-US;q=0.5,en;q=0.3',
'Accept-Encoding': 'gzip, deflate',
'Referer': 'http://gsxt.gdgs.gov.cn/aiccips/CheckEntContext/showInfo.html',
'Connection': 'keep-alive'})
def getRid(company_url):
para_dict = urlparse.parse_qs(company_url)
val_list = para_dict.values()
if not val_list or not val_list[0]:
return None
return val_list[0][0]
module.appendOutput(name="rid", type=OutputType.FUNCTION, function=getRid)
module.appendOutput(name="detail_list", xpath=".//table//tr/td/a/@href", type=OutputType.LIST, show_up=OutputParameterShowUpType.OPTIONAL)
module.addSleep(Sleep(10))
self.module_manager.appendSubModule(module, True)
def initDetailList(self):
iterator = Iterator(seeds="detail_list", param_name="url")
module = Module(iterator=iterator, name=u"遍历详情列表")
self.module_manager.appendSubModule(module, True)
module.addSleep(Sleep(10))
self.initDetail(module)
def initDetail(self, module_super):
module = Module(self.crawler_master.visitGdxq, u"抓取详情")
module.appendUrl(lambda url: "http://www.szcredit.com.cn/web/GSZJGSPT/%s" % url.lstrip('/'))
module.addSleep(Sleep(10))
module_super.appendSubModule(module)
def initNbList(self):
module = Module(self.crawler_master.visitQynbList, u"抓取年报列表")
module.appendUrl(lambda rid: "http://www.szcredit.com.cn/web/GSZJGSPT/QynbDetail.aspx?rid=%s" % rid)
module.appendOutput("nb_list", ".//table//td/a", OutputType.LIST, show_up=OutputParameterShowUpType.OPTIONAL)
module.addSleep(Sleep(10))
self.module_manager.appendSubModule(module)
def initNb(self):
iterator = Iterator(seeds="nb_list", param_name="nb")
module = Module(iterator=iterator, name=u"遍历年报列表")
self.module_manager.appendSubModule(module, True)
self.initNbOne(module)
self.initNbBasx(module)
def initNbOne(self, module_super):
module = Module(self.crawler_master.visitQynb, u"抓取企业年报信息")
def prepare(nb):
mv_dict = dict()
mv_dict['nb_url'] = ''.join(nb.xpath('@href')).replace(' ','').replace('\t','')
mv_dict['nb_name'] = ''.join(nb.xpath('text()')).replace(u'年度报告','').strip()
return mv_dict
module.appendInput(input_type=InputType.FUNCTION, input_value=prepare)
module.appendUrl("nb_url")
def bypassFun(html):
if not html or u'深圳市市场监督管理局' not in html:
return True
return False
module.appendOutput(name="nb_post_data", type=OutputType.FUNCTION, function=self.getPostData, show_up=OutputParameterShowUpType.OPTIONAL)
module.addSleep(Sleep(10))
module_super.appendSubModule(module, True)
module.appendBypass(Bypass(condition_fuc=bypassFun, module_id="module_nb_basx", range_global=True))
def initNbBasx(self, module_super):
module = Module(self.crawler_master.visitQynb, u'抓取企业年报-备案事项')
module.module_id = "module_nb_basx"
module.appendUrl(lambda rid: "http://app02.szaic.gov.cn/NB.WebUI/WebPages/Publicity/NBInfo.aspx?rid=%s" % rid)
module.appendPostData("nb_post_data")
module.appendOutput(name="nb_post_data", type=OutputType.FUNCTION, function=self.getPostData, show_up=OutputParameterShowUpType.OPTIONAL)
module.addSleep(Sleep(10))
module_super.appendSubModule(module, True)
def initResultCollect(self):
module = Module(self.crawler_master.resultCollect, u"结果收集")
self.module_manager.appendSubModule(module)
def getPostData(self, html):
if not html:
return None
data_dict = dict()
tree = etree.HTML(html)
ih_list = tree.xpath('.//input[@type="hidden"]')
if not ih_list:
return None
for ih in ih_list:
key = ''.join(ih.xpath('@id'))
val = ''.join(ih.xpath('@value'))
data_dict[key] = val
return data_dict
if __name__ == "__main__":
pass
| UTF-8 | Python | false | false | 5,747 | py | 217 | CrawlerSzxy.py | 205 | 0.63234 | 0.622524 | 0 | 123 | 44.552846 | 154 |
StaticSpark/uml-auto-assessment | 7,705,171,362,269 | 111c0c4a1afb52eb847af00e18904784aa2c1916 | f8cf23404754196df90ae270d0b62ad11973816b | /COutputComparisonStandardLibrary/p68/configuration.py | 47c0aa243d10a7cfafbaa5754fe800598ffcbf2b | [] | no_license | https://github.com/StaticSpark/uml-auto-assessment | 5d542b26bdb2605fbc39fea90f566ba591b57dab | f3764e0b7a11af5e28ee11ae298db92a31909fe2 | refs/heads/master | 2021-01-24T19:37:07.788987 | 2012-08-30T16:46:04 | 2012-08-30T16:46:04 | 67,462,941 | 1 | 0 | null | true | 2016-09-06T01:38:09 | 2016-09-06T01:38:09 | 2016-09-06T01:37:03 | 2012-09-10T19:44:25 | 10,936 | 0 | 0 | 0 | null | null | null | substrings = ["day!", "great", "a", "is", "say,", "I", "This,"]
compile_command = "gcc -ansi -Wall p68.c"
number_of_test_cases = 1
# FORMAT: tests = [(stdin, args, reference_output_strings, files, hints), ...]
tests = [("This, I say, is a great day!.", "./a.out", "day! great a is say, I This,", "", ""),]
| UTF-8 | Python | false | false | 311 | py | 3,612 | configuration.py | 103 | 0.562701 | 0.553055 | 0 | 8 | 37.75 | 95 |
HarrisonMS/JsChallenges | 3,178,275,833,354 | 0f6e53068a1e60f10960a557835b65adf50878e9 | 1b1cd03874cdec2bb1b9e8f5995d85aeb6c6e8e4 | /Python/codewars/char_code_calc.py | 33a9a6ae1e01005b2c1685d745ccd259ee24f5ae | [] | no_license | https://github.com/HarrisonMS/JsChallenges | db5c34070c7381bfa60c022a931cb8eaa3e1b5c4 | 4a3e8a1029cd8de494f9d65b38cb05e2efb6a130 | refs/heads/master | 2023-08-11T02:33:46.660908 | 2021-10-01T20:47:18 | 2021-10-01T20:47:18 | 262,849,169 | 0 | 0 | null | false | 2020-05-27T02:04:08 | 2020-05-10T18:22:30 | 2020-05-23T17:54:42 | 2020-05-27T02:04:08 | 5 | 0 | 0 | 0 | JavaScript | false | false | def calc(x):
with7s = ''
char_ords = [ord(i) for i in x]
for i in char_ords:
with7s += str(i)
without7s = with7s.replace("7", "1")
return sum([int(i) for i in with7s]) - sum([int(i) for i in without7s])
def calc(x):
total1 = "".join(str(ord(char)) for char in x)
total2 = total1.replace("7","1")
return sum(int(x) for x in total1) - sum(int(x) for x in total2)
def calc(x):
return ''.join(str(ord(ch)) for ch in x).count('7') * 6 | UTF-8 | Python | false | false | 476 | py | 87 | char_code_calc.py | 86 | 0.571429 | 0.535714 | 0 | 16 | 28.8125 | 75 |
MFiveZJU/auditing-system | 1,151,051,261,618 | 8dbba991193a1f1d7f76ea67f8b3f5ecd23cc5e4 | 5b67f8f0864bad7bc5d9b632f5ab5e8ac506f159 | /auditor_management/views.py | 1653da0cec31c4a8140a338172e1563713aec3e3 | [] | no_license | https://github.com/MFiveZJU/auditing-system | 60828b1841b49959c9f7f812d877f0e0150a2592 | a7b3953bf2c57d6b2d1ba9b07e0d2e13762760b3 | refs/heads/master | 2020-12-29T00:42:04.389756 | 2015-05-27T05:32:30 | 2015-05-27T05:32:30 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from django.shortcuts import render
from django.http import HttpResponse
from django.http import HttpResponseRedirect
from auditor_management.forms.auditor_signup_form import AuditorSignupForm
from auditor_management.forms.auditor_signin_form import AuditorSigninForm
def index(request):
# return HttpResponse("Hello, world. You're at the auditor_management index.")
return render(request, 'auditor_management/signin.html', {'form': AuditorSigninForm()})
def signup(request):
if request.method == 'POST':
form = AuditorSignupForm(request.POST)
if form.is_valid():
auditor_signup()
return HttpResponseRedirect('/auditor_management/test/')
else:
form = AuditorSignupForm()
return render(request, 'auditor_management/signup.html', {'form': form})
def auditor_signup(request):
a = 5
# request.POST['foobar']
# if (Auditor().)
def signin(request):
if request.method == 'POST':
form = AuditorSigninForm(request.POST)
if form.is_valid():
return HttpResponseRedirect('/auditor_management/test/')
else:
form = AuditorSigninForm()
return render(request, 'auditor_management/signin.html', {'form': form})
def test(request):
return HttpResponse("test succeed.")
| UTF-8 | Python | false | false | 1,285 | py | 10 | views.py | 7 | 0.697276 | 0.696498 | 0 | 42 | 29.595238 | 91 |
eldt/Hackathon-GCP | 2,345,052,182,617 | 9e0c6100d2a9f79567b5abbfed299e4ab46d1e73 | 8ed44a7f7ebad36990325bd211dd1a34508fb66b | /parents.py | 1519d9559abf5231d5437493f2ea36d8f7946039 | [] | no_license | https://github.com/eldt/Hackathon-GCP | b34e25cff40c6b7af8af4342392adad9246e1d90 | 15931a2c7b6e399f07a6c37e7affe8413629a545 | refs/heads/master | 2020-04-26T12:53:48.127571 | 2019-03-03T17:00:21 | 2019-03-03T17:00:21 | 173,563,909 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # Checks if an input string is valid
def userinput(string):
IP = ""
while(not(IP)):
IP = str(input(string))
if(IP == ""): #Invalid word used
print("nothing inputed, try again")
elif(IP == "leave"):
break
return IP
# Defines what a valid string is
def checkvalid(ID):
find = -1
empty = 0
while((find == -1) and not(empty)):
infile = open("IDS.txt", "r")
for line in infile:
find = line.find(ID)
if(find != -1):
print("ID was found")
return True, line
else:
empty = 1
infile.close()
if(find == -1):
print("ID is was not found, try again or leave.")
return False, line
# Primary function, prompts login with a student ID, then lists options.
def ParentScript():
word = ""
line = ""
stu = ""
par = ""
ID = ""
valid = False
while(word != "leave"):
while(not(valid)):
word = userinput("Please login with your student's ID: ")
if(word == "leave"):
break
else:
word = "{0:0>6}".format(int(word))
valid, line = checkvalid(word)
stu, ID, par = line[:-1].split(" ")
print("")
print("Your (the parent's) name: " + par)
print("StudentName: " + stu)
print("Student ID: " + ID)
word = userinput("What do you want to do?\n[1] Check notifications\n[2] Edit notification preferences\n[0] Logout\n")
if(word == '1'):
print("This is where notifications should be displayed. The code is still in progress.")
if(word == '2'):
print("This is where you can choose which notifications you want to recieve. The code is still in progress.")
if(word == '0'):
break
def main():
ParentScript()
if(__name__ == "__main__"):
main()
| UTF-8 | Python | false | false | 2,019 | py | 8 | parents.py | 7 | 0.491828 | 0.483903 | 0 | 64 | 29.546875 | 125 |
FreeRangeLobster/TheKraken | 13,099,650,263,283 | b5bfe47ce1ae2ed44deb0aa5c04c8d08af817944 | 6d1e52ea9108a8e63475c61b768ed9a5e61b683d | /SerialPort/serialportmain.py | 9cd68672e106bb1ad03d8c0e3279578c82659b72 | [] | no_license | https://github.com/FreeRangeLobster/TheKraken | 43a0aa72479afaa9fbad14e81a4c5c220517f299 | 572ac3035f7fd558175654097aaf7b0127187adf | refs/heads/master | 2020-07-01T19:04:20.932758 | 2019-11-15T15:27:27 | 2019-11-15T15:27:27 | 201,265,133 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # -*- coding: utf-8 -*-
import serialport.serialportwindow
from PyQt5 import QtCore,QtGui
import sys
def main():
app = QtGui.QApplication(sys.argv)
win = serialport.serialportwindow.SerialPortWindow()
win.show()
sys.exit(app.exec_())
if __name__ == '__main__':
main() | UTF-8 | Python | false | false | 298 | py | 13 | serialportmain.py | 10 | 0.64094 | 0.634228 | 0 | 14 | 20.357143 | 56 |
chenwangwww/ppython | 11,759,620,462,560 | 66be3b22424f6c74c7d8ec9775f9c62e52a46253 | 55bcc8b6d5eb2878405f71ad5559cc3c80f5edc9 | /nlpAnylise/nlpctr.py | 6356f211510c2267beaeef02d06e4180ddf28377 | [
"Apache-2.0"
] | permissive | https://github.com/chenwangwww/ppython | 2989026d5ef6d8a733e8b62ef6d4d7bcd2783b38 | 13a2f1193714133701743bfdf1a8add61a29dd4c | refs/heads/master | 2023-05-03T01:15:01.828534 | 2021-05-13T06:13:55 | 2021-05-13T06:13:55 | 343,254,494 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from ltp import LTP
ltp = LTP()
class NlpCtr(object):
def __init__(self):
pass
def sdp_trans_res(self, strArr, sdpArr, posArr):
tempstrArr = strArr[0]
tempstrArr.insert(0, 'ROOT')
tempposArr = posArr[0]
tempposArr.insert(0, 'ROOT')
tempsdpArr = sdpArr[0]
tempArr = []
for item in tempsdpArr:
dic = {
'dep': tempstrArr[item[0]],
'gov': tempstrArr[item[1]],
'type': item[2],
'pos': tempposArr[item[0]]
}
tempArr.append(dic)
return tempArr
def trans_result(self, strArr, depArr, posArr):
tempstrArr = strArr[0]
tempstrArr.insert(0, 'ROOT')
tempposArr = posArr[0]
tempposArr.insert(0, 'ROOT')
tempdepArr = depArr[0]
tempArr = []
for item in tempdepArr:
dic = {
'dep': tempstrArr[item[0]],
'gov': tempstrArr[item[1]],
'type': item[2],
'pos': tempposArr[item[0]]
}
tempArr.append(dic)
return tempArr
def getHED(self, words):
root = None
for word in words:
if word['gov'] == 'ROOT' and word['type'] == 'HED':
root = word['dep']
return root
def getROOT(self, words):
root = None
for word in words:
if word['gov'] == 'ROOT' and word['type'] == 'Root':
root = word['dep']
return root
def getWord(self, words, GOV, wType):
sbv = None
for word in words:
if word['type'] == wType and word['gov'] == GOV:
sbv = word['dep']
return sbv
def getWordList(self, words, GOV, wType):
slist = []
for word in words:
if word['type'] == wType and word['gov'] == GOV:
slist.append(word['dep'])
res = slist if len(slist) > 0 else None
return res
def getsdpList(self, words, wType, GOV = None):
slist = []
for word in words:
if word['type'] == wType:
if GOV is None:
slist.append(word)
elif GOV == word['gov']:
slist.append(word['dep'])
res = slist if len(slist) > 0 else None
return res
def get_att_pob(self, words, att_sbv):
resl = []
if att_sbv is not None:
for a in att_sbv:
word1 = self.getWord(words, a, 'ADV')
word2 = self.getWord(words, word1, 'POB')
res = word1 + word2 + a if word1 is not None and word2 is not None else None
if res:
resl.append(res)
return None if len(resl) == 0 else resl
def abstractSentence(self, sentence):
dic = None
seg, hidden = ltp.seg([sentence])
dep = ltp.dep(hidden)
pos = ltp.pos(hidden)
words = self.trans_result(seg, dep, pos)
if len(words) > 0:
hed = self.getHED(words)
if hed is not None:
sbv = self.getWord(words, hed, 'SBV')
vob = self.getWord(words, hed, 'VOB')
fob = self.getWord(words, hed, 'FOB')
adv = self.getWordList(words, hed, 'ADV')
if adv is not None:
for a in adv:
pob = self.getWord(words, a, 'POB')
if a == '被' and sbv is None:
sbv = pob
att_sbv = self.getWordList(words, sbv, 'ATT')
attSS = self.get_att_pob(words, att_sbv)
att_vob = self.getWordList(words, vob, 'ATT')
obj = list(filter(lambda x: x is not None, [vob, fob]))
dic = {
'subject': sbv,
'pred': hed,
'object': obj[0] if len(obj) > 0 else None,
'attS': att_sbv,
'attSS': attSS,
'attO': att_vob,
'adv': adv,
}
return dic
def get_not_none(self, li):
for i in li:
if i is not None:
return i
return None
def abstractSentence2(self, sentence):
dic = None
seg, hidden = ltp.seg([sentence])
dep = ltp.dep(hidden)
pos = ltp.pos(hidden)
words = self.trans_result(seg, dep, pos)
if len(words) > 0:
hed = self.getHED(words)
if hed is not None:
coo_list = self.getWordList(words, hed, 'COO')
sbv = self.getWord(words, hed, 'SBV')
vob = self.getWord(words, hed, 'VOB')
fob = self.getWord(words, hed, 'FOB')
adv_list = self.getWordList(words, hed, 'ADV')
pob = None
if adv_list is not None:
for a in adv_list:
if a == '被':
pob = self.getWord(words, a, 'POB')
att_sbv = self.getWordList(words, sbv, 'ATT')
attSS = self.get_att_pob(words, att_sbv)
att_vob = self.getWordList(words, vob, 'ATT')
subject = self.get_not_none([sbv, pob])
object = self.get_not_none([vob, fob])
subject_coo_list = self.getWordList(words, subject, 'COO') if subject is not None else None
object_coo_list = self.getWordList(words, object, 'COO') if object is not None else None
dic = {
'subject': subject,
'pred': hed,
'object': object,
'attS': att_sbv,
'attSS': attSS,
'attO': att_vob,
'adv': adv_list,
'coo_list': coo_list,
'subject_coo_list': subject_coo_list,
'object_coo_list': object_coo_list,
}
return dic
def abst_sent_sdp(self, sentence, abstDic):
dic = None
seg, hidden = ltp.seg([sentence])
sdp = ltp.sdp(hidden)
pos = ltp.pos(hidden)
words_sdp = self.sdp_trans_res(seg, sdp, pos)
if len(words_sdp) > 0:
root = self.getROOT(words_sdp)
if root is not None:
agt = self.getsdpList(words_sdp, 'AGT', root)
pat = self.getsdpList(words_sdp, 'PAT', root)
tool = self.getsdpList(words_sdp, 'TOOL', root)
loc = self.getsdpList(words_sdp, 'LOC')
time = self.getsdpList(words_sdp, 'TIME', root)
matl = self.getsdpList(words_sdp, 'MATL', root)
mann = self.getsdpList(words_sdp, 'MANN', root)
sco = self.getsdpList(words_sdp, 'SCO', root)
reas = self.getsdpList(words_sdp, 'REAS', root)
meas = self.getsdpList(words_sdp, 'MEAS')
dic = {
'agt': agt,
'pat': pat,
'root': root,
'tool': tool,
'loc': loc,
'time': time,
'matl': matl,
'mann': mann,
'sco': sco,
'reas': reas,
'meas': meas,
}
return dic
def abst_sent(self, sentence):
res1 = self.abstractSentence2(sentence)
res2 = self.abst_sent_sdp(sentence, res1)
res3 = self.nlp_to_predlogistics(res1)
print(res1)
print(res2)
print(res3)
def nlp_to_predlogistics(self, dic):
res = []
actions = []
quantifier = ('some', 'x') if dic['subject'] == '部分' else None
if dic['attSS'] is not None:
for i in dic['attSS']:
actions.append((i, 'x'))
actions.append('and')
if dic['pred'] is not None and dic['object'] is not None:
actions.append((dic['pred'] + dic['object'], 'x'))
res.append([quantifier, tuple(actions)])
return res
def abstractComplex(self, sentence):
dic_arr = []
sents = sentence.split(',')
sent_first = sents[0]
dic = self.abstractSentence(sent_first)
dic_arr.append(dic)
for i in range(len(sents)):
if i != 0:
sent = sents[i]
dic_other = self.abstractSentence(sent)
if dic_other['subject'] is None:
dic_other['subject'] = dic['subject']
dic_arr.append(dic_other)
print(dic_arr)
class ComplexSentenceMgr(object):
def __init__(self):
self._nlpCtr = NlpCtr()
self.keywords = {
'binglie': [('不要', '而要')]
}
def getKeywords(self, subsent, type, index):
res = None
for item in self.keywords[type]:
inde = subsent.find(item[index])
if inde > -1:
res = subsent[0:inde] + subsent[inde + len(item[index]):]
return res
def abstractBingLie(self, sentence):
arr = sentence.split(',')
for i in range(len(arr)):
item = arr[i]
res = self.getKeywords(item, 'binglie', 0)
if res is not None:
arr[i] = res
else:
res = self.getKeywords(item, 'binglie', 1)
if res is not None:
arr[i] = res
return arr
def abstractSentence(self, sentence):
dic_arr = []
sents = self.abstractBingLie(sentence)
sent_first = sents[0]
dic = self._nlpCtr.abstractSentence(sent_first)
dic_arr.append(dic)
for i in range(len(sents)):
if i != 0:
sent = sents[i]
dic_other = self._nlpCtr.abstractSentence(sent)
if dic_other['subject'] is None:
dic_other['subject'] = dic['subject']
dic_arr.append(dic_other)
print(dic_arr)
nlpCtr = NlpCtr()
# nlpCtr.abstractSentence('妈妈用电饭煲煲汤。')
# nlpCtr.abstractSentence('我明天去哈尔滨。')
# nlpCtr.abstractSentence('小明昨天在哈尔滨生下一个小男孩。')
# nlpCtr.abst_sent('小明昨天在哈尔滨生下一个小男孩。')
# nlpCtr.abst_sent('学生们用纸折飞机。')
# nlpCtr.abst_sent('学生们用铅笔写字。')
# nlpCtr.abst_sent('军士齐声高喊。')
# nlpCtr.abst_sent('数学方面他是专家。')
nlpCtr.abst_sent('他因为酒驾被交警拘留了。')
# nlpCtr.abst_sent('周一早上升旗。')
# nlpCtr.abst_sent('一年有365天。')
# nlpCtr.abst_sent('在北京打工的不全是外地人。')
# nlpCtr.abst_sent('在北京打工的部分是外地人')
# nlpCtr.abst_sent('学术委员会的每个成员都是博士并且是教授。')
# nlpCtr.abstractComplex('我们不要空话,而要行动。')
# mgr = ComplexSentenceMgr()
# mgr.abstractSentence('我们不要空话,而要行动。') | UTF-8 | Python | false | false | 11,163 | py | 63 | nlpctr.py | 54 | 0.47725 | 0.472245 | 0 | 310 | 33.812903 | 107 |
Kvansipto/Stepik_course_final_task | 7,834,020,397,143 | 1404aba185e464bcfd025524bae61d63f7215958 | 660fed2c1a3b071c6c093a1f269aead78c506825 | /pages/product_page.py | d3891535c1a1c17de5d7f4228b58acb2d3b9249a | [] | no_license | https://github.com/Kvansipto/Stepik_course_final_task | d492eac6b0fc1c66469531758798b8977baa1162 | 3c52b5557b116d1e20400ea592f1ca94bccf4589 | refs/heads/main | 2023-07-17T18:45:08.830084 | 2021-09-06T17:03:48 | 2021-09-06T17:03:48 | 401,321,833 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from .base_page import BasePage
from .locators import ProductPageLocators
class ProductPage(BasePage):
def add_to_bucket(self):
basket_link = self.browser.find_element(*ProductPageLocators.BASKET_BUTTON)
basket_link.click()
def check_product_name(self):
product_name = self.browser.find_element(*ProductPageLocators.PRODUCT_NAME).text
alert_product_name = self.browser.find_element(*ProductPageLocators.PRODUCT_NAME_ALERT).text
assert product_name == alert_product_name, "Наименование товара в аллерте не совпадает с добавленным в корзину"
def check_product_price(self):
product_price = self.browser.find_element(*ProductPageLocators.PRODUCT_PRICE).text
alert_product_price = self.browser.find_element(*ProductPageLocators.BASKET_AMOUNT_ALERT).text
assert product_price == alert_product_price, "Стоимость корзины не совпадает с ценой товара"
def should_not_be_success_message(self):
assert self.is_not_element_present(*ProductPageLocators.SUCCESS_MESSAGE), \
"Success message is presented, but should not be"
def should_message_be_disappeared(self):
assert self.is_disappeared(*ProductPageLocators.SUCCESS_MESSAGE), \
"Success message is not disappeared, but should be"
| UTF-8 | Python | false | false | 1,396 | py | 4 | product_page.py | 3 | 0.73 | 0.73 | 0 | 26 | 49 | 119 |
genericmoniker/git-split | 9,835,475,132,423 | 0e5290e65020ddfa78c7734e71616571c13f5427 | 4141f242c4822596fb7fc8c26c972f9ac028d169 | /gitsplit/config.py | 558775e7a3f5a7757a94697d0d8efefe13b0f03e | [
"MIT"
] | permissive | https://github.com/genericmoniker/git-split | 4a2f4793659c5477c7c7df424b09c8c36c5e684d | a46a496e44f3a365579d26359ec446e09fa8cbe5 | refs/heads/master | 2021-02-07T13:20:11.782293 | 2020-08-10T23:23:32 | 2020-08-10T23:23:32 | 244,031,170 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import collections
from os import fspath
from pathlib import Path
import toml
from ranges import Range, RangeSet
class Config:
"""File split configuration."""
def __init__(self, data: collections.abc.Mapping, base_path: Path):
"""Create a configuration.
:param data: configuration data.
:param base_path: base path for any relative paths in `data`.
"""
self.base_path = base_path
source = data.get("source")
if not source:
raise ConfigError("Source file not specified in the config file.")
self.source_file = SourceFile(base_path / source)
self.split_files = [
SplitFile(base_path / k, data[k], self.source_file.line_count)
for k in data.keys()
if isinstance(data[k], collections.abc.Mapping)
]
if not self.split_files:
raise ConfigError("No split files specified in the config file.")
for split_file in self.split_files:
if split_file.has_star:
split_file.expand_star(
self.source_file.line_count,
(split for split in self.split_files if split != split_file),
)
break
self.commit_no_verify = data.get("commit_no_verify", False)
@classmethod
def from_file(cls, config_file: Path) -> "Config":
"""Create a configuration from a file."""
# Use TOML for now; other formats could be handled if desired.
return cls.from_toml(config_file.read_text(), config_file.parent)
@classmethod
def from_toml(cls, toml_data: str, base_path: Path) -> "Config":
return cls(toml.loads(toml_data), base_path)
class ConfigError(Exception):
"""Exception raised for configuration errors."""
class SourceFile:
"""A source file to be split."""
def __init__(self, path: Path):
if not path.exists():
raise ConfigError(f'Source file "{path}" does not exist.')
self._path = path
self._line_count = None
# We save the file contents early because splitting later involves using
# git mv on the source file, and it won't be there anymore to read.
self._lines = path.read_text().splitlines(keepends=True)
def __repr__(self):
return f"{self.__class__.__name__}(path={self._path})"
def __fspath__(self):
return fspath(self._path)
def __getattr__(self, attr):
# Delegate any other attributes to `Path`.
return getattr(self._path, attr)
@property
def line_count(self):
if self._line_count is None:
self._line_count = sum(1 for _ in self.lines)
return self._line_count
@property
def lines(self):
return self._lines
def difference(self, split_file: "SplitFile") -> "SplitFile":
"""Create a SplitFile of the difference with another SplitFile.
That is, create a SplitFile that has all of the lines of this SourceFile except
for lines in `split_file`.
"""
diff_split = SplitFile(self._path, {"lines": "*"}, self.line_count)
diff_split.expand_star(self.line_count, [split_file])
return diff_split
class SplitFile:
"""A target file for splitting into."""
def __init__(self, path: Path, split_data: collections.abc.Mapping, max_line: int):
self._path = path
self._has_star = False
self._lines = self._create_line_ranges(split_data, max_line)
self._file = None
def __repr__(self):
return f"{self.__class__.__name__}(path={self._path})"
def __fspath__(self):
return fspath(self._path)
def __contains__(self, item):
return item in self._lines
def __enter__(self):
self.open()
return self
def __exit__(self, _exc_type, _exc_value, _traceback):
self.close()
def __getattr__(self, attr):
# Delegate any other attributes to `Path`.
return getattr(self._path, attr)
def _create_line_ranges(self, split_data: collections.abc.Mapping, max_line: int):
lines = split_data.get("lines")
if not lines or not lines.strip():
raise ConfigError(f'No lines specified for split file "{self._path}".')
range_set = RangeSet()
line_ranges = lines.split(",")
for line_range in line_ranges:
start, _, end = line_range.partition("-")
if start.strip() == "*":
self._has_star = True
continue
try:
start = int(start)
end = int(end) if end else start
if not 0 < start <= max_line or not 0 < end <= max_line:
raise ValueError(f"Out of range (1-{max_line})")
range_set.add(Range(start, end, include_end=True))
except ValueError as ex:
raise ConfigError(f'Invalid lines for split file "{self._path}": {ex}')
return range_set
def exists(self):
return self._path.exists()
@property
def has_star(self):
"""Whether this split file has lines with "*".
A star indicates that this split file includes all of the lines from the source
file that aren't included in any other split file.
"""
return self._has_star
def expand_star(self, max_line: int, other_split_files):
source_file_range = Range(1, max_line, include_end=True)
union_of_splits = RangeSet()
for split in other_split_files:
lines = split._lines # pylint: disable=protected-access
union_of_splits = union_of_splits.union(lines)
diff = source_file_range.symmetric_difference(union_of_splits)
self._lines.extend(diff)
def open(self):
self.close()
self._file = self._path.open("w")
def close(self):
if self._file:
self._file.close()
self._file = None
def write(self, text: str):
if not self._file:
raise IOError("SplitFile is not open.")
self._file.write(text)
| UTF-8 | Python | false | false | 6,106 | py | 13 | config.py | 7 | 0.584343 | 0.583524 | 0 | 182 | 32.549451 | 87 |
xuewenG/future-star-backend | 3,762,391,357,916 | ea4283d139e616544e22fbb088bcd4c416840ee9 | b0558353779c06e0c2aed7545ff10ba81014a244 | /administrator/test/generate/privilege.py | 7682aa9fbdb62e89fb5c97f50a4b33c2d2c26a95 | [] | no_license | https://github.com/xuewenG/future-star-backend | 02a1478722afc5d54989ddd1fcae3d55c730b7c7 | 9beb618820657668198282a2025cfcd6ac4be192 | refs/heads/master | 2023-05-30T14:31:05.180001 | 2020-08-14T17:17:37 | 2020-08-14T17:17:37 | 272,581,974 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | """
generate privilege
:author: lishanZheng
:date: 2020/01/06
"""
import random
from administrator.models import Privilege
def get_privilege_data():
"""
生成权限数据
:author: lishanZheng
:date: 2020/01/06
"""
privilege_data = {
'enrollment': random.randint(1, 2),
'semester': random.randint(1, 2),
'activity': random.randint(1, 2),
'student': random.randint(1, 2)
}
return privilege_data
def get_privilege():
"""
生成权限
:author: lishanZheng
:date: 2020/01/06
"""
privilege_data = get_privilege_data()
privilege = Privilege.objects.create(**privilege_data)
return privilege
| UTF-8 | Python | false | false | 689 | py | 130 | privilege.py | 128 | 0.61435 | 0.566517 | 0 | 37 | 17.081081 | 58 |
Rehket/SF-CIDP | 12,369,505,855,490 | 5fbfcfdcc7f91030c45a1c11226588c93931d14c | 21e0a2c0f2f420f3ac18633b2e7998b7c3274be1 | /gob/sfdx_commands.py | f4dda741ba5ef8d48bd7d3a7a91d2df50eb09540 | [
"MIT"
] | permissive | https://github.com/Rehket/SF-CIDP | b0d310f7749388fb91d487c85bb8d99fda7e9e39 | eff10e6eaa19ac1c9e67cb23421794a9ce5464f2 | refs/heads/master | 2020-05-16T02:39:59.549746 | 2019-06-05T01:38:20 | 2019-06-05T01:38:20 | 182,636,327 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from pathlib import Path
from typing import List, Dict
from prefect import task as prefect_task, Flow
from loguru import logger
import json
import os
import subprocess
import shutil
working_dir = os.environ.get("WORKING_DIR", "working_dir")
@prefect_task
def pull_sfdc_code(
username: str, dest_dir: str, metadata_items: List[str] = ["ApexClass"]
) -> Dict[str, object]:
"""
Full code from the instance associated with the username.
:param username: The username of the sfdc user.
:param dest_dir: The directory to drop the files in.
:param metadata_items: The list of metadata items to retrieve.
:return: A list of the metadata items retrieved.
"""
# Now Pulling Code
metadata = ""
for item in metadata_items:
metadata += item + ","
metadata = metadata[:-1]
print(["sfdx", "force:source:retrieve", "-u", username, "-m", metadata, "--json"])
pull_instance_metadata = subprocess.run(
["sfdx", "force:source:retrieve", "-u", username, "-m", metadata, "--json"],
cwd=Path(working_dir, dest_dir),
capture_output=True,
shell=True,
text=True,
)
if pull_instance_metadata.returncode:
pull_result = json.loads(pull_instance_metadata.stderr)
raise RuntimeError(pull_result)
pull_result = json.loads(pull_instance_metadata.stdout)
logger.info(f"Retrieved {len(pull_result['result']['inboundFiles'])} files.")
print(pull_result["result"]["inboundFiles"])
return {"files": pull_result["result"]["inboundFiles"], "project_dir": dest_dir}
@prefect_task
def copy_changed_files_and_get_tests(pull_result: Dict[str, object]):
print(pull_result)
test_classes = []
for entry in pull_result["files"]:
new_path = Path(working_dir, "mdapi", os.path.split(entry["filePath"])[0])
new_path.mkdir(parents=True, exist_ok=True)
if entry["fullName"].lower().endswith("test") or entry[
"fullName"
].lower().endswith(
"tc"
): # It is a test Class
test_classes.append(entry["filePath"])
shutil.copy(
Path(
os.getcwd(), working_dir, pull_result["project_dir"], entry["filePath"]
),
new_path,
)
shutil.copy(
Path(
os.getcwd(),
working_dir,
pull_result["project_dir"],
entry["filePath"][0:-4] + "-meta.xml",
),
new_path,
)
if len(test_classes) == 0:
logger.error("No Test Classes were found. Aborting migration.")
raise RuntimeError("No Test Classes were found. Aborting migration.")
logger.info(f"{len(test_classes)} test classes located.")
return test_classes
@prefect_task
def convert_project_to_mdapi():
# Change CLI to mdapi
convert_to_metadata = subprocess.run(
["sfdx", "force:source:convert", "-r", "force-app", "-d", "mdapi", "--json"],
cwd=Path(working_dir, "mdapi"),
capture_output=True,
shell=True,
text=True,
)
if convert_to_metadata.returncode:
logger.error(convert_to_metadata.stderr)
raise RuntimeError("Conversion to metadata project failed.")
return json.loads(convert_to_metadata.stdout)["results"]
@prefect_task
def get_active_orgs() -> dict:
get_org_list = subprocess.run(
["sfdx", "force:org:list", "--json"], cwd=".", capture_output=True, shell=True
)
if get_org_list.returncode:
logger.error(get_org_list.stderr.decode("utf-8").strip("\n").replace("\n", " "))
return json.loads(get_org_list.stderr.decode("utf-8"))
return json.loads(get_org_list.stdout.decode("utf-8"))
@prefect_task
def sfdx_jwt_org_auth(user_name: str, key: str, client_id: str, alias: str) -> dict:
"""
Authorize with JWT
:param user_name: Username to use
:param key: path to private key file
:param client_id: client id for connected app
:param alias: Alias for the sandbox.
:return: a dictionary containing the orgId and instanceUrl
"""
log_into_org = subprocess.run(
[
"sfdx",
"force:auth:jwt:grant",
"-u",
f"{user_name}",
"-f",
f"{key}",
"-i",
f"{client_id}",
"-a",
f"{alias}",
"--json",
],
cwd=".",
capture_output=True,
shell=True,
text=True,
)
if log_into_org.returncode:
raise RuntimeError(log_into_org.stderr)
result = json.loads(log_into_org.stdout)["result"]
return {"orgId": result["orgId"], "instanceUrl": result["instanceUrl"]}
@prefect_task
def create_sfdx_project(project_name: str) -> int:
create_project = subprocess.run(
[
"sfdx",
"force:project:create",
"--projectname",
f"{project_name}",
"--template",
"standard",
"--json",
],
cwd=Path(working_dir),
capture_output=True,
shell=True,
)
output = json.loads(create_project.stdout.decode("utf-8"))
status = output["status"]
if status is not 0:
logger.error(output["result"]["rawOutput"])
else:
logger.info(output["result"]["rawOutput"])
return status
if __name__ == "__main__":
print("Testing sfdx_commands")
with open(Path(Path(os.getcwd()).parent, "sfdc_config.json"), "r") as config_in:
config = json.load(config_in)[0]
with Flow("A flow") as flow:
foo = sfdx_jwt_org_auth(
user_name=config["user"],
key=Path(Path(os.getcwd()).parent, config["cert"]),
client_id=config["client_id"],
alias=config["alias"],
)
flow.run()
| UTF-8 | Python | false | false | 5,849 | py | 11 | sfdx_commands.py | 7 | 0.577364 | 0.575483 | 0 | 204 | 27.671569 | 88 |
phsantosjr/dj_bank | 7,172,595,434,199 | 86b00fc9f12fb801ca5308ce08731022b49a65dc | 07444c8f3588da8a51d90f97da6f928bfd99be31 | /conftest.py | e83730884f28e4b83f164abb836813f84610faac | [] | no_license | https://github.com/phsantosjr/dj_bank | 610363f8b20494fd7188d7733cdee62c5984f7b9 | 84b9154b0132f1f13c5a2a2a7d6c40395dd977f6 | refs/heads/master | 2023-06-26T00:07:25.972046 | 2021-07-23T12:59:58 | 2021-07-25T12:58:26 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import pytest
from rest_framework.test import APIClient
@pytest.fixture(autouse=True)
def enable_db_access_for_all_tests(db):
pass
@pytest.fixture
def api_client():
return APIClient()
| UTF-8 | Python | false | false | 196 | py | 15 | conftest.py | 11 | 0.744898 | 0.744898 | 0 | 12 | 15.333333 | 41 |
akshay4570/DBMS_PROJECT | 15,865,609,210,531 | e4c7cb01ca7a1e6b17f712102654f634fef298b2 | 8d24d6b4480f00e51bd9a3c863feedc56ab3f82c | /ecom/migrations/0007_order_shipping.py | 9e02d4099062ecbfef21457fc74a77852b44dfce | [] | no_license | https://github.com/akshay4570/DBMS_PROJECT | 5c35f1ac7eba639980b7af0c26ad0717b46dcbfb | 44d016755230e99a7e1f0c2f5e3f89b6607c3c1b | refs/heads/master | 2021-05-26T15:54:05.644065 | 2020-12-10T07:38:56 | 2020-12-10T07:38:56 | 254,126,836 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # Generated by Django 2.2.6 on 2019-11-30 06:36
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('ecom', '0006_delete_order'),
]
operations = [
migrations.CreateModel(
name='Shipping',
fields=[
('sid', models.AutoField(primary_key=True, serialize=False)),
('name', models.CharField(max_length=100)),
('email', models.CharField(max_length=100)),
('address', models.CharField(max_length=200)),
('city', models.CharField(max_length=50)),
('state', models.CharField(max_length=20)),
('zip_code', models.IntegerField()),
('phone', models.BigIntegerField()),
('cid', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='ecom.Customer')),
],
),
migrations.CreateModel(
name='Order',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.DateTimeField(auto_now_add=True)),
('cid', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='ecom.Customer')),
('sid', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='ecom.Shipping')),
],
),
]
| UTF-8 | Python | false | false | 1,466 | py | 28 | 0007_order_shipping.py | 13 | 0.559345 | 0.537517 | 0 | 37 | 38.621622 | 114 |
BaderSZ/adventofcode2020 | 4,157,528,366,040 | 020962fc77ada5c9d6368e263ad026683d5a4c1a | ee62656f4cd66fc74f272045a259e0eb2d36f476 | /day12/part2.py | 612ba12f4bde4ee98bb2a68bc50c4e64a4715c31 | [
"MIT"
] | permissive | https://github.com/BaderSZ/adventofcode2020 | 9be6d113ded14513f4cbea09122cc99d6119a9e7 | dae705fd093bbd176021118f0898947cb4b02f84 | refs/heads/main | 2023-02-02T14:53:20.673481 | 2020-12-19T16:18:19 | 2020-12-19T16:18:19 | 317,454,681 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from enum import Enum
inp = []
class Direction(Enum):
north = "N"
south = "S"
east = "E"
west = "W"
class Rotation(Enum):
left = "L"
right = "R"
class Move(Enum):
forward = "F"
class Waypoint(object):
def __init__(self, pos_north=0, pos_east=0, unit_north=1, unit_east=10):
self.pos_north = pos_north
self.pos_east = pos_east
self.unit_north = unit_north
self.unit_east = unit_east
def rotate_facing(self, d: str):
if d == Rotation.right.value:
temp = self.unit_east
self.unit_east = self.unit_north
self.unit_north = 0 - temp
elif d == Rotation.left.value:
temp = self.unit_north
self.unit_north = self.unit_east
self.unit_east = 0 - temp
def rotate_deg(self, direction, degrees):
times = int(degrees / 90)
for _ in range(times):
self.rotate_facing(direction)
def move_facing(self, distance):
self.pos_north = self.pos_north + distance*self.unit_north
self.pos_east = self.pos_east + distance*self.unit_east
def move_direction(self, direction, distance):
if direction == Direction.north.value:
self.unit_north = self.unit_north + distance
elif direction == Direction.south.value:
self.unit_north = self.unit_north - distance
elif direction == Direction.east.value:
self.unit_east = self.unit_east + distance
elif direction == Direction.west.value:
self.unit_east = self.unit_east - distance
with open("input", "r") as f:
for l in f.readlines():
inp.append(l.rsplit()[0])
waypoint = Waypoint()
for i in inp:
P = i[0]
V = int(i[1:])
if P in [e.value for e in Direction]:
waypoint.move_direction(P, V)
elif P in [e.value for e in Rotation]:
waypoint.rotate_deg(P, V)
elif P in [e.value for e in Move]:
waypoint.move_facing(V)
print("Result = ", abs(waypoint.pos_east) + abs(waypoint.pos_north))
| UTF-8 | Python | false | false | 2,061 | py | 30 | part2.py | 29 | 0.584182 | 0.57836 | 0 | 76 | 26.105263 | 76 |
bvldog/binsonworksweb | 6,176,163,008,209 | bebf136f3922e3df42847c4c8ee09d4a68da744e | 8dcc6400dc819817789ebfc45ec05df9ff6025af | /mainPage/admin.py | 2ee368e208b6150e51a7f46a3f7bd1da167b8257 | [] | no_license | https://github.com/bvldog/binsonworksweb | fc19618a9b94361ff713b2eac471c2e8b9f7bc4f | 8d99aa5773f1846a780cabf2c75b660d16fa48b0 | refs/heads/master | 2022-12-24T00:54:17.594420 | 2019-11-21T07:06:29 | 2019-11-21T07:06:29 | 223,095,042 | 0 | 0 | null | false | 2022-12-08T06:55:10 | 2019-11-21T05:25:51 | 2019-11-21T07:06:44 | 2022-12-08T06:55:10 | 4,900 | 0 | 0 | 3 | HTML | false | false | from django.contrib import admin
from .models import MainPage, About
# Register your models here.
admin.site.register(MainPage)
admin.site.register(About)
| UTF-8 | Python | false | false | 158 | py | 13 | admin.py | 9 | 0.797468 | 0.797468 | 0 | 8 | 18.75 | 35 |
agzuniverse/wireless-printing-app-backend | 4,380,866,657,973 | b3e134bee1d4d985e41a90b506b0d5071761f124 | 4dff65532f6d300602690c0996ee6f3cbb44b274 | /backend/wireless_printing/serializers.py | 0cd43e6e9538128377e39237c92c4d9edb3105af | [] | no_license | https://github.com/agzuniverse/wireless-printing-app-backend | 9949fbcbc21f46749b49a1d3a75ff15ec60e1faf | 1d86fd60a1c9ec6f4e325f59f8c7e8bc0b7585f6 | refs/heads/master | 2022-01-14T12:16:15.838106 | 2019-07-23T09:20:57 | 2019-07-23T09:25:48 | 145,246,927 | 0 | 2 | null | false | 2022-01-06T22:26:20 | 2018-08-18T19:23:24 | 2019-08-17T19:05:06 | 2022-01-06T22:26:20 | 57 | 0 | 2 | 6 | Python | false | false | from rest_framework import serializers
from .models import Test
class TestSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Test
fields = ('id', 'url', 'name', 'desc')
| UTF-8 | Python | false | false | 212 | py | 11 | serializers.py | 8 | 0.693396 | 0.693396 | 0 | 8 | 25.5 | 61 |
AlanJohnM/SimPLe | 14,276,471,307,090 | f8003e8de06b247d88baa55b51127ab90f2806ca | a093c51e2addf9a0a61247ca63e855dafab0d694 | /atari_utils/atari_utils/envs.py | 11b3ec1289c4cc3424e3cf98ab0700c44b8c71a2 | [
"MIT"
] | permissive | https://github.com/AlanJohnM/SimPLe | 416960224dda6107f2195b1a3a27989cdb5eb01e | 5277392321f993f319ff6683f4e64118bf010336 | refs/heads/main | 2023-04-13T23:39:41.774243 | 2021-05-01T09:59:57 | 2021-05-01T09:59:57 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import cv2
# See https://stackoverflow.com/questions/54013846/pytorch-dataloader-stucked-if-using-opencv-resize-method
# See https://github.com/pytorch/pytorch/issues/1355
cv2.setNumThreads(0)
import gym
import torch
from baselines.common import atari_wrappers
from baselines.common.atari_wrappers import NoopResetEnv
from baselines.common.vec_env import ShmemVecEnv, VecEnvWrapper
import numpy as np
from gym.wrappers import TimeLimit
from atari_utils.utils import one_hot_encode, DummyVecEnv
class WarpFrame(gym.ObservationWrapper):
def __init__(self, env, width=84, height=84, grayscale=True, inter_area=False):
super().__init__(env)
self.width = width
self.height = height
self.grayscale = grayscale
self.inter_area = inter_area
channels = 1 if grayscale else self.env.observation_space.shape[-1]
self.observation_space = gym.spaces.Box(
low=0,
high=255,
shape=(channels, self.height, self.width),
dtype=np.uint8,
)
def observation(self, obs):
obs = np.array(obs, dtype=np.float32)
if self.grayscale:
obs = cv2.cvtColor(obs, cv2.COLOR_RGB2GRAY)
obs = cv2.resize(
obs,
(self.width, self.height),
interpolation=cv2.INTER_AREA if self.inter_area else cv2.INTER_NEAREST
)
obs = torch.tensor(obs, dtype=torch.uint8)
if len(obs.shape) == 2:
obs = obs.unsqueeze(-1)
obs = obs.permute((2, 0, 1))
return obs
class RenderingEnv(gym.ObservationWrapper):
def observation(self, observation):
self.render()
return observation
class ClipRewardEnv(gym.RewardWrapper):
def __init__(self, env):
super().__init__(env)
self.cum_reward = 0
def reset(self, **kwargs):
self.cum_reward = 0
return self.env.reset(**kwargs)
def step(self, action):
observation, reward, done, info = self.env.step(action)
self.cum_reward += reward
if done:
info['r'] = self.cum_reward
self.cum_reward = 0
return observation, self.reward(reward), done, info
def reward(self, reward):
return (reward > 0) - (reward < 0)
# Derived from
# https://github.com/openai/baselines/blob/master/baselines/common/vec_env/vec_frame_stack.py
class VecPytorchWrapper(VecEnvWrapper):
def __init__(self, venv, device, nstack=4):
self.venv = venv
self.device = device
self.nstack = nstack
wos = venv.observation_space # wrapped ob space
self.shape_dim0 = wos.shape[0]
low = np.repeat(wos.low, self.nstack, axis=0)
high = np.repeat(wos.high, self.nstack, axis=0)
self.stacked_obs = torch.zeros((venv.num_envs,) + low.shape).to(device)
observation_space = gym.spaces.Box(low=low, high=high, dtype=venv.observation_space.dtype)
VecEnvWrapper.__init__(self, venv, observation_space=observation_space)
def step_async(self, actions):
actions = actions.cpu().numpy()
self.venv.step_async(actions)
def step_wait(self):
obs, rews, news, infos = self.venv.step_wait()
obs = torch.tensor(obs).to(self.device)
rews = torch.tensor(rews).unsqueeze(1)
self.stacked_obs[:, :-self.shape_dim0] = self.stacked_obs[:, self.shape_dim0:].clone()
for (i, new) in enumerate(news):
if new:
self.stacked_obs[i] = 0
self.stacked_obs[:, -self.shape_dim0:] = obs
return self.stacked_obs, rews, news, infos
def reset(self):
obs = self.venv.reset()
obs = torch.tensor(obs).to(self.device)
self.stacked_obs.zero_()
self.stacked_obs[:, -self.shape_dim0:] = obs
return self.stacked_obs
class VecRecorderWrapper(VecEnvWrapper):
def __init__(self, venv, gamma, stacking, device):
super().__init__(venv)
self.venv = venv
self.gamma = gamma
self.stacking = stacking
self.device = device
assert self.venv.num_envs == 1
self.buffer = []
self.obs = None
self.initial_frames = None
def new_epoch(self):
self.initial_frames = None
def get_first_small_rollout(self):
return self.initial_frames
def add_interaction(self, action, reward, new_obs, done):
obs = self.obs.squeeze().byte().to(self.device)
action = one_hot_encode(action.squeeze(), self.action_space.n).to(self.device)
reward = (reward.squeeze() + 1).byte().to(self.device)
new_obs = new_obs.squeeze().byte()
new_obs = new_obs[-len(new_obs) // self.stacking:].to(self.device)
done = torch.tensor(done[0], dtype=torch.uint8).to(self.device)
self.buffer.append([obs, action, reward, new_obs, done, None])
def sample_buffer(self, batch_size):
if self.buffer[0][5] is None:
return None
samples = self.buffer[0]
data = [torch.empty((batch_size, *sample.shape), dtype=sample.dtype) for sample in samples]
for i in range(batch_size):
value = None
while value is None:
index = int(torch.randint(len(self.buffer), size=(1,)))
samples = self.buffer[index]
value = samples[5]
for j in range(len(data)):
data[j][i] = samples[j]
return data
def reset(self):
self.obs = self.venv.reset()
for _ in range(self.stacking - 1):
self.obs = self.venv.step(torch.tensor(0))[0].clone()
if self.initial_frames is None:
self.initial_frames = self.obs[0]
return self.obs
def step(self, action):
new_obs, reward, done, infos = self.venv.step(action)
self.add_interaction(action, reward, new_obs, done)
if done:
value = torch.tensor(0.).to(self.device)
self.buffer[-1][5] = value
index = len(self.buffer) - 2
while reversed(range(len(self.buffer) - 1)):
value = (self.buffer[index][2] - 1).to(self.device) + self.gamma * value
self.buffer[index][5] = value
index -= 1
if self.buffer[index][4] == 1:
break
self.obs = new_obs.clone()
return new_obs, reward, done, infos
def step_wait(self):
return self.venv.step_wait()
class SkipEnv(gym.Wrapper):
def __init__(self, env, skip=4):
"""Return only every `skip`-th frame"""
gym.Wrapper.__init__(self, env)
self._skip = skip
def step(self, action):
"""Repeat action, sum reward, and max over last observations."""
obs = None
total_reward = 0.0
done = None
info = None
for i in range(self._skip):
obs, reward, done, info = self.env.step(action)
total_reward += reward
if done:
break
return obs, total_reward, done, info
def reset(self, **kwargs):
return self.env.reset(**kwargs)
def _make_env(
env_name,
render=False,
max_episode_steps=18000,
frame_shape=(1, 84, 84),
inter_area=False,
noop_max=30
):
env = gym.make(f'{env_name}NoFrameskip-v4')
assert 'NoFrameskip' in env.spec.id
env = NoopResetEnv(env, noop_max=noop_max)
env = SkipEnv(env, skip=4)
env = TimeLimit(env, max_episode_steps=max_episode_steps)
if 'FIRE' in env.unwrapped.get_action_meanings():
env = atari_wrappers.FireResetEnv(env)
grayscale = frame_shape[0] == 1
height, width = frame_shape[1:]
env = WarpFrame(env, width=width, height=height, grayscale=grayscale, inter_area=inter_area)
env = ClipRewardEnv(env)
if render:
env = RenderingEnv(env)
return env
def make_envs(env_name, num, device, stacking=4, record=False, gamma=0.99, buffer_device='cpu', **kwargs):
env_fns = [lambda: _make_env(env_name, **kwargs)]
kwargs_no_render = kwargs.copy()
kwargs_no_render['render'] = False
env_fns += [lambda: _make_env(env_name, **kwargs_no_render)] * (num - 1)
if num == 1:
env = DummyVecEnv(env_fns)
else:
env = ShmemVecEnv(env_fns)
env = VecPytorchWrapper(env, device, nstack=stacking)
if record:
env = VecRecorderWrapper(env, gamma, stacking, buffer_device)
return env
def make_env(env_name, device, **kwargs):
return make_envs(env_name, 1, device, **kwargs)
| UTF-8 | Python | false | false | 8,564 | py | 15 | envs.py | 14 | 0.5961 | 0.583606 | 0 | 272 | 30.485294 | 107 |
quintel/etdataset-public | 1,254,130,486,407 | 58ff7e1e91ba05f58f35b198f10b73eba1bbb415 | a921fefffab0fc1ebd661a6c88fd8248333331fa | /curves/demand/households/space_heating/script/heat_demand/__init__.py | 502f06607464ab04f9847341f72fe4607a46860b | [] | no_license | https://github.com/quintel/etdataset-public | f2d1685f0da7c1f8023e8a536e21afa1252273b3 | bef8f76c7e0ebc172646fef085c8705245998b2f | refs/heads/master | 2023-07-25T04:54:52.661612 | 2023-07-10T13:35:52 | 2023-07-10T13:35:52 | 15,280,610 | 4 | 7 | null | false | 2022-12-27T16:17:17 | 2013-12-18T10:49:04 | 2021-12-21T15:36:12 | 2022-12-27T16:17:16 | 893,702 | 5 | 5 | 4 | Python | false | false | from .heat_demand_profile_generator import generate_profiles
| UTF-8 | Python | false | false | 61 | py | 2,813 | __init__.py | 108 | 0.852459 | 0.852459 | 0 | 1 | 60 | 60 |
kmcfar/DataScience | 6,244,882,492,733 | 88b0f2d2bee70d9d543cfb7eaf7533a10440cc4e | bcfc72a9225d4ab2f22844064ef8f35b8cacc708 | /Prepare_Data.py | 22ae25c3c0874657933b12f1e590ca65728ae8b8 | [] | no_license | https://github.com/kmcfar/DataScience | 2387b6a893de7d7a94e9b3326b1e913b1813f689 | 3766881233c8cbe3bae2ef14725046f34841b400 | refs/heads/master | 2021-01-23T13:29:25.722232 | 2016-02-04T18:05:03 | 2016-02-04T18:05:03 | 33,470,850 | 2 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | __author__ = 'kevinmcfarland'
import xml.etree.ElementTree as ET
import pprint
import re
import codecs
import json
problemchars = re.compile(r'[=\+/&<>;\'"\?%#$@\,\. \t\r\n]')
CREATED = [ "version", "changeset", "timestamp", "user", "uid"]
OSM_file = 'bellevue_washington.osm'
#lists and dictionary for updating street names
expected = ["Street", "Avenue", "Boulevard", "Drive", "Court", "Place", "Square", "Lane", "Road",
"Trail", "Parkway", "Commons", "Center", "Circle", "Way", "Plaza", "Point"]
mapping = { "St": "Street",
"St.": "Street",
"ST": "Street",
"Rd." : "Road",
"Ave" : "Avenue",
"CT" : "Court",
"street" : "Street",
"Av" : "Avenue",
"PL" : "Place",
"Ave." : "Avenue",
"AVENUE" : "Avenue",
"Blvd" : "Boulevard",
"Blvd." : "Boulevard",
"Sq" : "Square",
"st" : "Street",
"st." : "Street"
}
#Compares street names against the expected list and updates any acronyms using the dictionary above
def update_streetname(street_name):
auditkeys = mapping.keys()
words = street_name.split() #cut street name into individual words
for word in words: #cycle through the words to check them against the dict values
if word in auditkeys:
words[words.index(word)] = mapping[word]
words = " ".join(words)
return words
#Cleans zipcodes to contain just the first five digits and to clear out any erroneous data
def update_zipcodes(k_value, v_value, node, zipcodes):
long_zipcode = re.compile(r'\d{5}-\d{4}')
combined_zipcodes = re.compile(r'[0-9]*;')
correct_zipcode = re.compile(r'\d{5}')
if k_value == "addr:postcode": #finds postcode values for nodes
if re.match(long_zipcode, v_value): #takes only the first five digits for zip codes in the form xxxxx-xxxx
v_value = v_value[0:5]
elif re.search('[a-zA-Z]', v_value): #removes erroneous zipcodes where street names were put in as zipcodes
v_value = None
elif re.match(correct_zipcode, v_value):
v_value = v_value
return v_value
elif re.match(r'tiger:zip', k_value): #finds postcode values for ways
if re.match(combined_zipcodes, v_value): #takes only the first five digits for zip codes in the form xxxxx-xxxx
v_value = v_value[0:5]
else:
v_value = v_value
return v_value
#Cleans the address data and creates the address array
def clean_addresses(k_value, v_value, node, address):
if not problemchars.match(k_value):
node["address"] = address
if re.search("postcode", k_value) or re.search("tiger:zip", k_value): #cleans up zipcodes
address["postcode"] = update_zipcodes(k_value, v_value, node, address)
elif re.search("street", k_value): #cleans up street names
name = v_value
better_name = update_streetname(name)
if name != better_name:
address["street"] = better_name
else:
address["street"] = name
elif re.search("addr:", k_value): #imports the other address fields
address[k_value[5:]] = v_value
else:
node[k_value] = v_value
#Shapes the rest of the data and creates the documents
def shape_element(element):
node = {}
created_array = {}
pos_array = []
address = {}
node_refs = []
if element.tag == "node" or element.tag == "way" : #imports the values for node and way tags
for tag in element.attrib:
if tag in CREATED:
created_array[tag] = element.attrib[tag]
node["created"] = created_array
elif tag == "lat" or tag == "lon":
pos_array.insert(0, float(element.attrib[tag]))
node["pos"] = pos_array
elif tag == "id":
node["id"] = element.attrib[tag]
elif tag == "visible":
node["visible"] = element.attrib[tag]
for child in element: #cleans up and imports values of tags
if child.tag == "tag":
tag = child.attrib
k_value = tag.get("k")
v_value = tag.get("v")
clean_addresses(k_value, v_value, node, address)
if child.tag == "nd":
ref_value = child.attrib["ref"]
node_refs.append(ref_value)
node["node_refs"] = node_refs
if element.tag == "node":
node["type"] = "node"
if element.tag == "way":
node["type"] = "way"
pprint.pprint(node)
return node
#Creates and outputs the JSON document by executing the above commands
def process_map(file_in, pretty = False):
file_out = "{0}.json".format(file_in)
data = []
with codecs.open(file_out, "w") as fo:
for _, element in ET.iterparse(file_in):
el = shape_element(element)
if el:
data.append(el)
if pretty:
fo.write(json.dumps(el, indent=2)+"\n")
else:
fo.write(json.dumps(el) + "\n")
return data
def run():
data = process_map(OSM_file, True)
pprint.pprint(data)
run()
| UTF-8 | Python | false | false | 5,615 | py | 4 | Prepare_Data.py | 3 | 0.527337 | 0.525022 | 0 | 146 | 37.458904 | 126 |
sgcalle/hhrr | 12,687,333,416,977 | 7af76b3479bf1e54b3a4e5dce53d9694858ff62f | 67a0edf1f41c33c8167e7f45e30b675cc48efdbd | /stock_request/models/procurement_group.py | a439821d62893e2265a8dc6bbbcc1611e6614e5b | [] | no_license | https://github.com/sgcalle/hhrr | bfe8ff3b4cfa23a4f26191a1c594bf91c1f9427d | 3f1e5f5357f530cb78b422f4ba4f74eb8ac5d05e | refs/heads/master | 2021-07-16T18:53:06.580329 | 2021-03-17T08:28:35 | 2021-03-17T08:32:24 | 240,117,974 | 0 | 0 | null | false | 2021-03-17T08:32:24 | 2020-02-12T21:21:57 | 2020-05-07T09:33:34 | 2021-03-17T08:32:24 | 2,538 | 0 | 0 | 0 | Python | false | false | #-*- coding:utf-8 -*-
from odoo import models, fields, api
class ProcurementGroup(models.Model):
_inherit = "procurement.group"
stock_request_id = fields.Many2one(string="Stock Request",
comodel_name="stock.request") | UTF-8 | Python | false | false | 235 | py | 120 | procurement_group.py | 62 | 0.693617 | 0.685106 | 0 | 9 | 25.222222 | 62 |
ldirer/deploy-app-docker | 2,190,433,351,986 | 62bd3f22d539d51dd94c7270dac1e0dc3402b8e7 | 4050d014c8d68a3f57065087e2db14b753831e23 | /backend/settings.py | 53bc28ca6dd49257b49a5638ce89898d0fc30941 | [] | no_license | https://github.com/ldirer/deploy-app-docker | 20ce675a57ca7593acd1a7df811ab656594b5066 | b18c88df359918a082e1ec9a46b5af4546108da5 | refs/heads/master | 2022-01-11T09:37:37.018815 | 2018-03-22T16:14:40 | 2018-03-22T16:14:40 | 123,062,734 | 5 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import os
PRODUCTION = not os.environ['QUIZ_ENV'] == 'production'
DEBUG = not PRODUCTION
SQLALCHEMY_DATABASE_URI = os.environ.get('SQLALCHEMY_DATABASE_URI', 'postgresql://postgres:postgres@db')
SQLALCHEMY_TRACK_MODIFICATIONS = False
| UTF-8 | Python | false | false | 234 | py | 30 | settings.py | 9 | 0.769231 | 0.769231 | 0 | 6 | 38 | 104 |
TaeJuneJoung/Algorithm | 19,138,374,297,182 | c3b7903ba55d249b582f9277649666e3888d0042 | 89e40bf548403e440c230e06fa6301021ec8b0c7 | /sw_expert_academy/D4/p1486.py | 1397c76c26723f6216fbe24ce92041d1cd813034 | [] | no_license | https://github.com/TaeJuneJoung/Algorithm | b9cf5724501918c7302099b8194d26bd19512bd0 | ecc2934a376c91ecec8bfd15af377d8a2973d71d | refs/heads/master | 2020-06-19T13:50:14.720987 | 2019-08-04T14:35:43 | 2019-08-04T14:35:43 | 196,732,653 | 0 | 0 | null | false | 2019-08-04T14:35:44 | 2019-07-13T14:46:42 | 2019-07-13T16:37:07 | 2019-08-04T14:35:44 | 6,895 | 0 | 0 | 0 | Python | false | false | def nPr(n, r, s, rs):
if s >= H:
if s not in res_set:
res_set.add(s)
else:
return
elif n == r or s+rs < H:
return
else:
for i in range(n):
if used[i] == 0:
used[i] = 1
nPr(n, r+1, s+M[i], rs-M[i])
used[i] = 0
else:
return
T = int(input())
for t in range(1,T+1):
N, H = map(int, input().split())
M = list(map(int,input().split()))
used = [0] * N
res_set = set()
nPr(N, 0, 0, sum(M))
print("#"+str(t), min(res_set)-H) | UTF-8 | Python | false | false | 596 | py | 138 | p1486.py | 130 | 0.389262 | 0.374161 | 0 | 25 | 22.88 | 44 |
madkaye/ble-ls | 16,501,264,394,752 | 391a1f7371136e341b32dd4d904b3d71ce45d5b1 | 0219da7170d81ca6c3c483542a17a3c01954a538 | /ble-ls.py | 932d7ca28f72197abf4059e103ed97f7d561a126 | [
"MIT"
] | permissive | https://github.com/madkaye/ble-ls | 699cee2a655414cb4026aa3fe57ea4584487b695 | 24260b316fc2512b9e3b15612384609fefbeba8f | refs/heads/master | 2023-04-16T04:04:53.729433 | 2021-04-25T19:50:43 | 2021-04-25T19:50:43 | 361,523,436 | 5 | 2 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/env python3
import datetime
from bluepy import btle
from bluepy.btle import Scanner, Peripheral, Characteristic, ScanEntry, UUID
class BLELS:
SCAN_TIMEOUT = 10
scanner = None
publicdevices = []
def scan(self, duration=SCAN_TIMEOUT):
try:
print("scan: starting scan for {}s".format(duration))
self.scanner = Scanner()
devices = self.scanner.scan(duration)
foundDevices = 0
for dev in devices:
devname = dev.getValueText(btle.ScanEntry.COMPLETE_LOCAL_NAME)
if devname is None:
devname = dev.getValueText(btle.ScanEntry.SHORT_LOCAL_NAME)
print("scan: Device {} [{}] ({}), Connect={}, RSSI={} dB".format(dev.addr, devname, dev.addrType,
dev.connectable, dev.rssi))
# for (adtype, desc, value) in dev.getScanData():
# print(" %s = %s" % (desc, value))
if dev.addrType == btle.ADDR_TYPE_PUBLIC:
foundDevices = foundDevices + 1
self.publicdevices.append(dev)
print("scan: Complete, found {} devices, {} public".format(len(devices), len(self.publicdevices)))
except Exception as e:
print("scan: Error, ", e)
def connectandread(self, addr):
try:
peri = Peripheral()
peri.connect(addr)
print("Listing services...")
services = peri.getServices()
for serv in services:
print(" -- SERVICE: {} [{}]".format(serv.uuid, UUID(serv.uuid).getCommonName()))
characteristics = serv.getCharacteristics()
for chara in characteristics:
print(" -- --> CHAR: {}, Handle: {} (0x{:04x}) - {} - [{}]".format(chara.uuid,
chara.getHandle(),
chara.getHandle(),
chara.propertiesToString(),
UUID(chara.uuid).getCommonName()))
print("Listing descriptors...")
descriptors = peri.getDescriptors()
for desc in descriptors:
print(" -- DESCRIPTORS: {}, [{}], Handle: {} (0x{:04x})".format(desc.uuid,
UUID(desc.uuid).getCommonName(),
desc.handle, desc.handle))
print("Reading characteristics...")
chars = peri.getCharacteristics()
for c in chars:
print(" -- READ: {} [{}] (0x{:04x}), {}, Value: {}".format(c.uuid, UUID(c.uuid).getCommonName(),
c.getHandle(), c.descs, c.read() if c.supportsRead() else ""))
except Exception as e:
print("connectandread: Error,", e)
if __name__ == '__main__':
print("BLE LS Script ---")
print("--------------------")
print("scan [duration] : Scan")
print("ls <ADDRESS> : Read attributes for device")
print("q : Quit")
while True:
choice = input("> ")
choice = choice.lower()
if choice.startswith('q'):
print("exiting...")
break
elif choice.startswith('scan'):
duration = 10
if len(choice) > 2:
args = choice.split(' ', 2)
if len(args) == 2 and 0 < int(args[1]) < 60:
duration = int(args[1])
BLELS().scan(duration)
continue
elif choice.startswith('ls'):
addr = ''
if len(choice) > 2:
args = choice.split(' ', 2)
if len(args) == 2:
addr = args[1]
if len(addr) != 17:
print ("Bad address, expecting 17 characters, got:", addr)
continue
BLELS().connectandread(addr)
continue
elif choice.startswith('t'):
print("time is {}".format(datetime.date.isoformat(datetime.date.today())))
continue
else:
print("Unknown option:", choice)
continue
print("--------------------")
print("Goodbye!")
| UTF-8 | Python | false | false | 4,704 | py | 2 | ble-ls.py | 1 | 0.434311 | 0.427509 | 0 | 118 | 38.762712 | 126 |
pierrrrrr/raspberrrrr | 11,536,282,168,466 | da0d9cf11be11abcb2e25230690889ca6dfa9735 | f02fd314d2cd7a0bd897198af9264453f76165da | /core/utils.py | bb52cc42d4a0ea4ff8750dce643a085ff209f3af | [] | no_license | https://github.com/pierrrrrr/raspberrrrr | 4c1d6422de1b26b53b9a43d87443a571c87181b5 | c562f61238a0efb8a4557c1cd1e3b484c3c4b325 | refs/heads/master | 2021-01-01T05:39:55.104327 | 2013-08-08T22:03:22 | 2013-08-08T22:03:22 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from __future__ import print_function
import time
import functools
from ctypes import Union, Structure
sleep = time.sleep
def _dump(structure, level=0, filling=' '):
if level > 3:
return
for field, _ in structure._fields_:
value = getattr(structure, field)
print("{}{}: {!r}".format(filling*level, field, value))
if isinstance(value, (Structure, Union)):
_dump(value, level=level+1)
def dump(message, structure):
print("{}: {}:".format(message, structure))
_dump(structure, level=1)
def log(message, *args, **kwargs):
return print(message.format(*args, **kwargs))
def trace(message):
def decorator(function):
@functools.wraps(function)
def wrapper(*args, **kwargs):
#log("{}: args: {!r} kwargs: {!r}".format(message, args, kwargs))
log("* {}".format(message.format(*args, **kwargs)))
return function(*args, **kwargs)
return wrapper
return decorator
def ensure(result):
if result == 0:
return 0
raise ValueError("failed with: {}".format(hex(result&0xffffffff)))
def read(path):
log("reading path: {}".format(path))
with open(path, 'rb') as f:
data = f.read()
log("* read data with length: {}".format(len(data)))
return data
| UTF-8 | Python | false | false | 1,309 | py | 12 | utils.py | 10 | 0.601986 | 0.596639 | 0 | 50 | 25.16 | 76 |
philgyford/django-url-namespace-example | 16,527,034,157,703 | 36149421b56f4022de39b1b439c696da020c0431 | 6c632247c6d6cef9d017c0d2960808ddabe09362 | /books/views.py | d612e018e3f6815cea15b2baad20931f0ca39db3 | [
"BSD-3-Clause"
] | permissive | https://github.com/philgyford/django-url-namespace-example | 2e926a1686e0885b75ee3264cd7238529054166c | e962d1dc537c2f6a1df6eb7fa55839aec0812fc7 | refs/heads/master | 2020-08-08T22:22:02.735531 | 2019-10-09T14:32:55 | 2019-10-09T14:32:55 | 213,933,582 | 3 | 0 | null | false | 2019-10-09T14:01:06 | 2019-10-09T13:59:44 | 2019-10-09T14:00:05 | 2019-10-09T14:01:06 | 0 | 0 | 0 | 0 | Python | false | false | from django.http import HttpResponse
def index(request):
return HttpResponse("This is the Books Index page.")
def detail(request, book_slug):
return HttpResponse(
'This is the Book Detail page (for a book with the slug "{}").'.format(
book_slug
)
)
| UTF-8 | Python | false | false | 293 | py | 6 | views.py | 4 | 0.634812 | 0.634812 | 0 | 13 | 21.538462 | 79 |
SmarT-Ouassine/CNN_architectures_pytorch | 12,936,441,516,246 | 4891927abea8be9b3a079ea7f3f596e4ee7497f9 | 1e9c3380d62b4aaea105a541b2294d9eb243fb95 | /dataset.py | 4eec18c4c23aaf926237d28dae314e0fecb1efa4 | [] | no_license | https://github.com/SmarT-Ouassine/CNN_architectures_pytorch | 7ec2292feee17bd956ba55db9b6814e2f3b02257 | c67ea5000c950f08b03d7bad3e9862040948d18a | refs/heads/main | 2023-07-04T13:11:09.028674 | 2021-08-07T00:06:47 | 2021-08-07T00:06:47 | 449,676,976 | 2 | 0 | null | true | 2022-01-19T12:11:52 | 2022-01-19T12:11:51 | 2022-01-19T12:10:15 | 2021-08-07T00:06:54 | 12 | 0 | 0 | 0 | null | false | false | from torchvision import datasets, transforms
import torch
import config
import os
import random
from shutil import copyfile
def prepare_datasets():
subdirs = ['train/', 'test/']
for subdir in subdirs:
# create label subdirectories
labeldirs = ['airplane/', 'car/', 'cat/', 'dog/', 'flower/', 'fruit/', 'motorbike/',
'person/']
for labldir in labeldirs:
newdir = config.DATASET_HOME + subdir + labldir
os.makedirs(newdir, exist_ok=True)
val_ratio = 0.25
# copy training dataset images into subdirectories
for dir in os.listdir('natural_images/'):
for file in os.listdir('natural_images/' + dir):
src = 'natural_images/' + dir + '/' + file
if random.random() < val_ratio:
dest = config.DATASET_HOME + 'test/' + dir + '/' + file
else:
dest = config.DATASET_HOME + 'train/' + dir + '/' + file
copyfile(src, dest)
def get_loaders():
prepare_datasets()
train_transforms = transforms.Compose([
transforms.Resize((config.IMG_HEIGHT, config.IMG_WIDTH)),
transforms.RandomRotation(30),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
#transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
])
test_transforms = transforms.Compose([
transforms.Resize((config.IMG_HEIGHT, config.IMG_WIDTH)),
transforms.ToTensor(),
#transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
])
train_folder = os.path.join(config.DATASET_HOME, "train")
test_folder = os.path.join(config.DATASET_HOME, "test")
train_data = datasets.ImageFolder(train_folder, transform=train_transforms)
test_data = datasets.ImageFolder(test_folder, transform=test_transforms)
# Data Loading
trainloader = torch.utils.data.DataLoader(train_data, shuffle=True, batch_size=config.BATCH_SIZE)
testloader = torch.utils.data.DataLoader(test_data, shuffle=True, batch_size=config.BATCH_SIZE)
return trainloader, testloader | UTF-8 | Python | false | false | 2,132 | py | 13 | dataset.py | 12 | 0.628518 | 0.603659 | 0 | 62 | 33.403226 | 101 |
eriker75/curso-django-channels | 6,313,601,932,928 | a23526f06c7d85a36f321a98185bd36fae08ed3b | b314a90dd6961bd63d89297a67a563ac7055c1bb | /chat/models.py | 9457918b8ba8da12c8c79f60a9604775085bd142 | [] | no_license | https://github.com/eriker75/curso-django-channels | c702279d265a2a6df868f3868e33f9ffe52dd5f9 | 39bf328b409888a6fe02b1b4aaac570484c17dc7 | refs/heads/main | 2023-03-20T09:47:56.047151 | 2020-12-23T19:57:27 | 2020-12-23T19:57:27 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from django.db import models
from django.contrib.auth.models import User
class Room(models.Model):
name = models.CharField(max_length=60, unique=True)
users = models.ManyToManyField(User, related_name='rooms_joined', blank=True)
def __str__(self):
return self.name
| UTF-8 | Python | false | false | 296 | py | 13 | models.py | 11 | 0.695946 | 0.689189 | 0 | 9 | 30.888889 | 81 |
JayHsieh1104/LeetCode | 16,106,127,368,261 | d00d02124771991858418ce2aeb7543592bea51c | 6444c6d6fc51868f46ab9b6874039392db88b05e | /041. First Missing Positive/solution3.py | fb61a84306ffcb8aa893cd48615582695f854f70 | [] | no_license | https://github.com/JayHsieh1104/LeetCode | 4268a9a8a6d0a7c57a38ebd1d2545131d4463d77 | 892e9e869628ed384c3e96216be850245b501cff | refs/heads/master | 2021-06-20T18:13:19.081409 | 2021-04-04T11:22:52 | 2021-04-04T11:22:52 | 203,064,862 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | class Solution:
def firstMissingPositive(self, nums: List[int]) -> int:
is_one_existed = False
for i in range(len(nums)):
if nums[i] == 1:
is_one_existed = True
elif nums[i] > len(nums) or nums[i] < 1:
nums[i] = 1
if not is_one_existed:
return 1
for i in range(len(nums)):
if nums[abs(nums[i])-1] > 0:
nums[abs(nums[i])-1] = -1 * nums[abs(nums[i])-1]
for i in range(len(nums)):
if nums[i] > 0:
return i+1
return len(nums)+1
| UTF-8 | Python | false | false | 602 | py | 556 | solution3.py | 293 | 0.458472 | 0.438538 | 0 | 19 | 30.684211 | 64 |
xxtang/fspserver | 8,194,797,603,535 | a0f82ec0f4c41407014a28f1dbe08b73a32f1768 | dacf40993c1e97a70d41a503e2dcdc56772803b6 | /bsd_src/SConscript | ba38028136eba7aaee92ab5c09f71ec0421844b0 | [
"LicenseRef-scancode-warranty-disclaimer"
] | no_license | https://github.com/xxtang/fspserver | 995b3f089006cab9301389c891dc36368fd6e30e | 3458b8407d07d66396c3a2458841e25cdcee8ebc | refs/heads/master | 2021-06-21T02:51:29.418695 | 2017-07-28T09:00:51 | 2017-07-28T09:00:51 | 98,630,149 | 1 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null | Import(Split("env"))
bsdfsp=env.StaticLibrary(target = 'bsdfsp', source = Split('''
cmp.c fnmatch.c glob.c operator.c print.c
find.c function.c ls.c option.c util.c
'''))
Export(Split("bsdfsp"))
| UTF-8 | Python | false | false | 199 | 180 | SConscript | 41 | 0.688442 | 0.688442 | 0 | 8 | 23.875 | 62 |
|
thatandromeda/libpmp | 7,129,645,741,262 | 84b9903b004949ebb4d637a2a49e50dd54911a48 | a848b8c8a70f2f85eb6da114128953619009d917 | /distributions/numeric.py | 49de2818c354ea151b9328641b26c393407dde36 | [
"Apache-2.0"
] | permissive | https://github.com/thatandromeda/libpmp | 9de4a5fef33eecad1842dde88bbd154290cfa7ea | f5117eb3520b1551167b1c9adcc4de098ed32a63 | refs/heads/master | 2021-01-01T18:24:05.497619 | 2017-07-25T19:25:54 | 2017-07-25T19:25:54 | 98,325,149 | 0 | 0 | null | true | 2017-07-25T16:00:35 | 2017-07-25T16:00:34 | 2017-05-31T18:48:17 | 2017-07-25T15:24:29 | 74 | 0 | 0 | 0 | null | null | null | # Copyright 2016 Grant Gould
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Discrete distribution used for nonparametric functions."""
import math
from distributions.distribution import Distribution
class NumericDistribution(Distribution):
"""Class representing a discretization of a distribution, which is
required after certain mathematical operations (eg convolution).
The distribution consists of a list of PDF values and an offset. It is
automatically scaled to the sum of those PDF values to avoid numeric
error."""
def __init__(self, values, offset=0):
"""@p values is a list of PDF values P[i] (automatically normalized)
representing the probability of an outcome between offset+i and
offset+i+1."""
self._values = values
self._offset = int(offset)
self._scale = 1 / sum(values)
assert self._scale > 0, (
"NumericDistribution(%s) had zero scale" % values)
assert self._scale < float("inf"), (
"NumericDistribution(%s) had inf scale" % values)
BEFORE, AFTER = [-3, -2]
def _bucket(self, x):
if x < self._offset:
return self.BEFORE
try:
bucket = math.floor(x) - self._offset
except OverflowError as _:
return self.AFTER
if bucket >= len(self._values):
return self.AFTER
return bucket
def pdf(self, x):
bucket = self._bucket(x)
if bucket in (self.BEFORE, self.AFTER):
return 0
return self._values[bucket] * self._scale
def cdf(self, x):
bucket = self._bucket(x)
if bucket == self.BEFORE:
return 0
elif bucket == self.AFTER:
return 1
point_in_bucket = x - bucket - self._offset
return (sum(self._values[:bucket]) +
(self._values[bucket] * point_in_bucket)) * self._scale
def point_on_curve(self):
return self._offset + (len(self._values) / 2)
# Leave the default quantile() implementation in place because it is
# probably fast enough.
def contains_point_masses(self):
return False
def __repr__(self):
return "NumericDistribution(offset=%d, scale=%f, %s)" % (
self._offset, self._scale, self._values)
| UTF-8 | Python | false | false | 2,810 | py | 22 | numeric.py | 17 | 0.635587 | 0.629181 | 0 | 81 | 33.691358 | 76 |
mylgcs/python | 1,529,008,403,546 | 7c05804b4d8d9ff8d5609f804fe9cf77142eb5d6 | f4e1c211d1fe21c3c83f671de15d62aa8b3b9120 | /训练营day02/3_小明交换牌.py | e4c8dfc138b35148f786dde022387949b2fe1efe | [] | no_license | https://github.com/mylgcs/python | 5f4cbe185c66a415ab3837c137efc43954f3f6fb | c700cfa6124b98914cf892d950c06612273c4c5b | refs/heads/master | 2021-03-20T20:20:21.124169 | 2020-03-25T09:19:24 | 2020-03-25T09:19:24 | 247,230,409 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # 小明手里有两张牌,左手红桃A,右手黑桃K,问小明交换双手的牌
# 之后,左右手的牌各是什么?
# 先找对象 小明 两张牌 3个 小明 两只手 两张牌 5个
# 根据对象写类
# 牌的类,其对象是单张的牌
class Porke:
def __init__(self):
self.poker = None
color = "" #牌的花色
num = "" #牌的打小
# 手的类,用于创建一只手
class Hand:
porke:Porke #手里可以有一张牌
#人类,用于创建小明
class Human:
# 字段描述人的特征
hand_left = Hand()
hand_right = Hand()
# 方法描述人的行为功能
# 拿牌
def catch_porke_cards(self,porke1:Porke,porke2:Porke):
self.hand_left.porke = porke1
self.hand_right.porke = porke2
# 展示牌
def show_cards(self):
# print("左手:" + self.hand_left.poker.color + self.hand_left.poker.num)
# print("右手:" + self.hand_right.porke.color + self.hand_right.porke.num)
print("\n左手:%s_%s" % (self.hand_left.poker.color, self.hand_left.poker.num))
print("右手:%s_%s\n" % (self.hand_right.poker.color, self.hand_right.poker.num))
# 换牌
def swap_cards(self):
tmp = self.hand_right.porke
self.hand_right.porke = self.hand_left.porke
self.hand_left = tmp
porke1 = Porke()
porke1.color, porke1.num = "红桃","A"
porke2 = Porke()
porke2.color, porke2.num = "黑桃","K"
#创建小明
xiaoming = Human()
xiaoming.catch_porke_cards(porke1,porke2)
xiaoming.show_cards()
xiaoming.swap_cards()
xiaoming.show_cards() | UTF-8 | Python | false | false | 1,591 | py | 19 | 3_小明交换牌.py | 18 | 0.626073 | 0.615144 | 0 | 57 | 21.491228 | 86 |
raufer/text-classification-pytorch | 17,935,783,452,108 | 50109b120f229f3c7e4bf540d1b190cb6d8dc520 | 9e9b0d26fe42dc6a9ac529d2b0b3ba56e5659d6d | /src/tokenizer/electra.py | b0fd6834899ed5f8354dc0c09fb0d9b3b1704999 | [] | no_license | https://github.com/raufer/text-classification-pytorch | cbdde070bf8acd79d5f792467d9cc6ff922a55fc | 262d2a0d32e19cdd69961012fdc9c02b4816ba44 | refs/heads/main | 2023-03-13T02:27:56.601614 | 2021-03-04T16:04:03 | 2021-03-04T16:04:03 | 317,555,513 | 2 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from transformers import PreTrainedTokenizer
from transformers import ElectraTokenizer
def make_electra_tokenizer() -> PreTrainedTokenizer:
tokenizer = ElectraTokenizer.from_pretrained('google/electra-small-discriminator')
return tokenizer
if __name__ == '__main__':
tokenizer = make_electra_tokenizer()
print(tokenizer.cls_token)
print(tokenizer.cls_token_id)
print(tokenizer.sep_token)
print(tokenizer.sep_token_id)
print(tokenizer('Subject to regulation 7, a limited company')['input_ids'])
print(tokenizer('( a ) Subject to regulation 7 , a limited company')['input_ids'])
| UTF-8 | Python | false | false | 621 | py | 55 | electra.py | 52 | 0.727858 | 0.724638 | 0 | 20 | 30 | 86 |
azizs4h/HexagonLmsBackend | 695,784,724,479 | 61a6651a00b4e67b6fa71d952e249d488a228d35 | fff2f15a540c2d499f9e26d05772a06af02bad8c | /meet/models.py | 62910930aaa5656a72b41d6bcb400e3b36464e31 | [] | no_license | https://github.com/azizs4h/HexagonLmsBackend | 9f462108c765e7aec57b550060461fa43abde7b7 | 6d8b4d2376a26cd65bded1e81122e2769ed7d50e | refs/heads/master | 2023-06-05T03:10:38.902655 | 2021-06-16T15:51:46 | 2021-06-16T15:51:46 | 349,548,229 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from django.db import models
from lesson.models import Lesson
class Meet(models.Model):
name = models.CharField(max_length=255)
lesson = models.ForeignKey(Lesson, on_delete=models.CASCADE)
meet_time = models.DateTimeField(blank=True, null=True, auto_now_add=True)
def __str__(self):
return self.name
| UTF-8 | Python | false | false | 327 | py | 17 | models.py | 16 | 0.715596 | 0.706422 | 0 | 11 | 28.727273 | 78 |
Nastykiwi/git_ML_Project | 14,370,960,606,800 | 2f8ef640746f8bf32c4c1427b4d39a4f92011c88 | 98c87c98ad5ffd4b0aa20efdfe5902f3d397262a | /Dataset_creation/macro_data_extraction.py | 593ad4b4a885b528613c779362b87ffa05b63b6d | [] | no_license | https://github.com/Nastykiwi/git_ML_Project | 091a5d83d2367bc99e8bc397ed25a33aa58b3477 | 2ade5c7f13f80111fae6da6176db9c764382de0d | refs/heads/master | 2020-08-31T00:55:32.709070 | 2019-11-14T04:45:11 | 2019-11-14T04:45:11 | 218,538,980 | 0 | 0 | null | false | 2019-11-18T02:42:23 | 2019-10-30T13:52:41 | 2019-11-14T04:45:17 | 2019-11-18T02:42:22 | 126,198 | 0 | 0 | 0 | Python | false | false | '''
Code for macro data extraction
Documentation:
To come
- 2019-10-24: .py creation by Théophile
- 2019-10-30:
'''
import pandas as pd
import os
from Util.util_data_mgnt import save_df_to_pickle
'''
Ehrmann, M., & Fratzscher, M. (2004). Taking stock: Monetary policy transmission to equity markets.
Journal of Money, Credit and Banking, 719-737.
1. The 500 individual stocks comprising the S&P500 the firms with low cash flows, small size, poor credit ratings,
low debt to capital ratios, high price-earnings ratios, or a high Tobin's q are affected significantly more by
monetary policy
2. Strong industry-specific effects of U.S. monetary policies
Stock returns of firms in the technology, communication and cyclical consumer goods industries are more responsive than
the average stock, whereas non-cyclical consumer goods, energy, and utilities are industries that respond below average.
Firms in semiconductors, internet, telecommunications, computers, and software, to name a few, react more strongly
than the average.
https://www.federalreserve.gov/releases/h6/current/default.htm
'''
'''
EFFR (daily Effective Federal Funds Fate)
! Calculation changed a bit after 2016
Series (Select to preview) Available From Available To Observations Description
H15/H15/RIFSPFF_N.WW 1954-07-07 2019-10-23 3,408 Federal funds effective rate
H15/H15/RIFSPBLP_N.WW 1955-08-10 2019-10-23 3,351 Average majority prime rate charged by banks on short-term loans to business, quoted on an investment basis
H15/H15/RIFSRP_F02_N.WW 2003-01-15 2019-10-23 876 The rate charged for primary credit under amendment to the Board's Regulation
Weekly data
'''
#open EFFR
file = "EFFR.csv"
path = os.path.join(os.getcwd(), 'Data', file )
col_names = ['Date', 'RIFSPFF_N.WW', 'RIFSPBLP_N.WW', 'RIFSRP_F02_N.WW']
raw_EFFR = pd.read_csv(path, header=5, names=col_names)
# Change col Date to datetype and the other columns to numeric
raw_EFFR['Date'] = pd.to_datetime(raw_EFFR['Date'], format='%Y-%m-%d')
raw_EFFR[['RIFSPFF_N.WW', 'RIFSPBLP_N.WW', 'RIFSRP_F02_N.WW']] = \
raw_EFFR[['RIFSPFF_N.WW', 'RIFSPBLP_N.WW', 'RIFSRP_F02_N.WW']].apply(pd.to_numeric)
save_df_to_pickle(raw_EFFR, "EFFR")
#For memory
del file, path, col_names, raw_EFFR
"""
World Bank Data (Annual) / OCDE + China + india + Russia
Countries : Germany, Australia, Austria, Belgium, Canada, South Korea, Denmark, Spain, United States, Finland,
France, Greece, Hungary, Ireland, Iceland, Italy, Japan, Luxembourg, Mexico, Norway, New Zealand, Netherlands,
Poland, Portugal, Slovak Republic, Czech Republic, United Kingdom, Sweden, Switzerland, Turkey.
"""
file_WDB = "WDIEXCEL.xlsx"
path = os.path.join(os.getcwd(), 'Data', file_WDB)
raw_WDB = pd.read_excel(path, sheet_name='Data', index_col=0)
'''
countries = ['Germany', 'Australia', 'Austria', 'Belgium', 'Canada', 'South Korea', 'Denmark', 'Spain',
'United States', 'Finland', 'France', 'Greece', 'Hungary', 'Ireland', 'Iceland', 'Italy',
'Japan', 'Luxembourg', 'Mexico', 'Norway', 'New Zealand', 'Netherlands', 'Poland',
'Portugal', 'Slovak Republic', 'Czech Republic', 'United Kingdom', 'Sweden', 'Switzerland',
'Turkey', 'China', 'Russia', 'India']
'''
countries = ['Canada', 'United States']
raw_WDB = raw_WDB.loc[countries]
#Append country code with the indicator code
raw_WDB['Country_Indicator'] = raw_WDB['Country Code'] + "_" + raw_WDB['Indicator Code']
raw_WDB = raw_WDB.set_index(['Country_Indicator'])
raw_WDB = raw_WDB.drop(columns=['Country Code', 'Indicator Code', 'Indicator Name'])
raw_WDB_transposed = raw_WDB.T
raw_WDB_transposed['Date'] = raw_WDB_transposed.index
raw_WDB_transposed['Date'] = pd.to_datetime(raw_WDB_transposed['Date'], format='%Y-%m-%d')
#Move date column to the first one
cols = list(raw_WDB_transposed.columns)
cols = [cols[-1]] + cols[:-1]
raw_WDB_transposed = raw_WDB_transposed[cols]
#drop columns only composed of Nan
raw_WDB_transposed = raw_WDB_transposed.dropna(axis=1, how='all')
save_df_to_pickle(raw_WDB_transposed, "WBD_NA")
#for memory
del raw_WDB, raw_WDB_transposed | UTF-8 | Python | false | false | 4,109 | py | 47 | macro_data_extraction.py | 6 | 0.717381 | 0.687439 | 0 | 109 | 36.697248 | 157 |
charliememory/detectron2 | 10,333,691,336,175 | 12100e1f4a5de539342da612b822aeb9e3f30707 | f2cbd59703a1c1236a1733fd842ffbfb61e25015 | /projects/DensePose/RAFT/infer.py | b683d46cc920c81c2159c786ccf59ceff829a3c0 | [
"Apache-2.0"
] | permissive | https://github.com/charliememory/detectron2 | d7646251d9b4314401158742c93501c1f3d384dc | a2a6220068e73c616ee4c84cb52ea023c0203fa0 | refs/heads/master | 2023-04-01T12:20:50.723293 | 2021-02-03T20:19:30 | 2021-02-03T20:19:30 | 297,756,654 | 0 | 0 | Apache-2.0 | true | 2020-09-22T19:52:21 | 2020-09-22T19:52:20 | 2020-09-22T18:53:32 | 2020-09-22T13:38:55 | 3,337 | 0 | 0 | 0 | null | false | false | import sys
sys.path.append('core')
from PIL import Image
import argparse
import os, pdb, imageio, tqdm
import time
import numpy as np
import torch
import torch.nn.functional as F
import matplotlib.pyplot as plt
import datasets
from utils import flow_viz
from utils import frame_utils
from raft import RAFT
from utils.utils import InputPadder, forward_interpolate
@torch.no_grad()
def infer(model, seq_img_dir, suffix, iters=24, backward_flow=True):
if backward_flow:
flow_img_dir = os.path.join(seq_img_dir, '../flow_backward_img_{}'.format(suffix))
flow_np_dir = os.path.join(seq_img_dir, '../flow_backward_np_{}'.format(suffix))
# flow_np_save_path = os.path.join(seq_img_dir, '../flow_backward_{}.npy'.format(suffix))
else:
flow_img_dir = os.path.join(seq_img_dir, '../flow_forward_img_{}'.format(suffix))
flow_np_dir = os.path.join(seq_img_dir, '../flow_forward_np_{}'.format(suffix))
# flow_np_save_path = os.path.join(seq_img_dir, '../flow_forward_{}.npy'.format(suffix))
if not os.path.exists(flow_img_dir):
os.makedirs(flow_img_dir)
if not os.path.exists(flow_np_dir):
os.makedirs(flow_np_dir)
model.eval()
dataset = datasets.InferVideoDataset(seq_img_dir, backward_flow=backward_flow)
# flow_list, flow_img_list = [], []
for val_id in tqdm.tqdm(range(len(dataset))):
image1, image2, path1, path2 = dataset[val_id]
image1 = image1[None].cuda()
image2 = image2[None].cuda()
padder = InputPadder(image1.shape, mode='sintel')
image1, image2 = padder.pad(image1, image2)
flow_low, flow_pr = model(image1, image2, iters=iters, test_mode=True)
flow = padder.unpad(flow_pr[0]).cpu()
# map flow to rgb image
# pdb.set_trace()
# flow = flow[0].permute(1,2,0).cpu().numpy()
flow = flow.permute(1,2,0).cpu().numpy()
flow_img = flow_viz.flow_to_image(flow)
# flow_list.append(flow)
# flow_img_list.append(flow_img)
imageio.imwrite(os.path.join(flow_img_dir, path1.split('/')[-1]), flow_img)
np.save(os.path.join(flow_np_dir, path1.split('/')[-1].split('.')[0]+'.npy'), flow)
# del image1, image2, flow, flow_img
# flow_array = np.concatenate(flow_list)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--model', help="restore checkpoint")
parser.add_argument('--seq_img_dir', help="sequence images for evaluation")
parser.add_argument('--backward_flow', action='store_true', help='calculate flow from i+1 to i')
parser.add_argument('--small', action='store_true', help='use small model')
parser.add_argument('--mixed_precision', action='store_true', help='use mixed precision')
parser.add_argument('--alternate_corr', action='store_true', help='use efficent correlation implementation')
args = parser.parse_args()
model = torch.nn.DataParallel(RAFT(args))
model.load_state_dict(torch.load(args.model))
model.cuda()
model.eval()
# create_sintel_submission(model.module, warm_start=True)
# create_kitti_submission(model.module)
with torch.no_grad():
suffix = args.model.split('-')[-1].split('.')[0]
infer(model.module, args.seq_img_dir, suffix)
| UTF-8 | Python | false | false | 3,304 | py | 91 | infer.py | 75 | 0.646792 | 0.636199 | 0 | 86 | 37.395349 | 112 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.