repo_name
stringlengths 7
111
| __id__
int64 16.6k
19,705B
| blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 5
151
| content_id
stringlengths 40
40
| detected_licenses
list | license_type
stringclasses 2
values | repo_url
stringlengths 26
130
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
42
| visit_date
timestamp[ns] | revision_date
timestamp[ns] | committer_date
timestamp[ns] | github_id
int64 14.6k
687M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 12
values | gha_fork
bool 2
classes | gha_event_created_at
timestamp[ns] | gha_created_at
timestamp[ns] | gha_updated_at
timestamp[ns] | gha_pushed_at
timestamp[ns] | gha_size
int64 0
10.2M
⌀ | gha_stargazers_count
int32 0
178k
⌀ | gha_forks_count
int32 0
88.9k
⌀ | gha_open_issues_count
int32 0
2.72k
⌀ | gha_language
stringlengths 1
16
⌀ | gha_archived
bool 1
class | gha_disabled
bool 1
class | content
stringlengths 10
2.95M
| src_encoding
stringclasses 5
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 10
2.95M
| extension
stringclasses 19
values | num_repo_files
int64 1
202k
| filename
stringlengths 4
112
| num_lang_files
int64 1
202k
| alphanum_fraction
float64 0.26
0.89
| alpha_fraction
float64 0.2
0.89
| hex_fraction
float64 0
0.09
| num_lines
int32 1
93.6k
| avg_line_length
float64 4.57
103
| max_line_length
int64 7
931
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
hchiam/pythonSandbox
| 687,194,779,789 |
2719ec993a9dc1b74548ba756a040fc3a58f3281
|
7e23f6a252ef6d80d0612bcb666f093e435a8c0e
|
/string_to_args_list.py
|
a07acacb65a42dd85335fd913b099d2cfc30a728
|
[] |
no_license
|
https://github.com/hchiam/pythonSandbox
|
cb75036e2cdfd6ad2b5d588ab2353b466ac6042f
|
cd2585c18ef20d31f90941920bee6003478654f6
|
refs/heads/master
| 2020-06-29T23:16:55.723874 | 2020-05-10T04:50:37 | 2020-05-10T04:50:37 | 74,406,379 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# http://stackoverflow.com/questions/2921847/what-does-the-star-operator-mean
# http://stackoverflow.com/questions/20263839/python-convert-a-string-to-arguments-list
# http://www.python-course.eu/passing_arguments.php
def f(a,b):
print(a)
print(b)
x='argument-one argument-two'
args = x.split(' ')
print (args)
f(*args)
|
UTF-8
|
Python
| false | false | 327 |
py
| 55 |
string_to_args_list.py
| 50 | 0.733945 | 0.688073 | 0 | 10 | 31.8 | 87 |
momoAix/python_programming
| 13,615,046,357,167 |
8c941e96e6990ca69a03d38ab18591dbe459554f
|
0fdcb32301725ff43fff534b4f6f79a8c546c960
|
/courses_code/coursera/Ass2_3.py
|
b8c5984190613e57eb8d803871cb39ed7d8b3c35
|
[] |
no_license
|
https://github.com/momoAix/python_programming
|
c37827f16a53b204529bbd805ab7d9b872a4cc7b
|
309e06d5669e89dea45703ae73b2cf9f0ab3b3a3
|
refs/heads/master
| 2021-01-10T06:10:01.423831 | 2016-02-25T21:53:34 | 2016-02-25T21:53:34 | 52,558,822 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# This first line is provided for you
inp=raw_input ("Enter Hours:")
hours=float(inp)
inp=raw_input ("Enter Rate:")
rate=float(inp)
pay= hours*rate
print pay
|
UTF-8
|
Python
| false | false | 160 |
py
| 123 |
Ass2_3.py
| 98 | 0.71875 | 0.71875 | 0 | 8 | 19.125 | 37 |
hotsukai/HigaCoffeeDBBack
| 3,100,966,393,811 |
a15a79b21c20ccf191c5f1c17ef12faa9000d52e
|
b678c316d2437339ffe614ee63ee97ba19dd7b47
|
/src/tests/test_utils.py
|
e9da82aaf76ef35ed6470c95110cd03cf02fcf8f
|
[
"MIT"
] |
permissive
|
https://github.com/hotsukai/HigaCoffeeDBBack
|
4ec3d0253fa4537a2d119874766071e1432377be
|
20db785e291857339c82cb45d616b521328b2734
|
refs/heads/main
| 2023-04-08T16:46:24.316465 | 2021-03-04T14:46:37 | 2021-03-04T14:46:37 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from datetime import datetime, timezone
from src.app import bcrypt
from src.database import db
from src.models.models import User
def add_sample_users():
user1 = User(
name="テストユーザー1",
encrypted_password=bcrypt.generate_password_hash("password").decode(
'utf-8'),
)
user2 = User(
name="テストユーザー2",
encrypted_password=bcrypt.generate_password_hash("password2").decode(
'utf-8'),
)
db.session.add(user1)
db.session.add(user2)
db.session.commit()
return user1, user2
def format_datetime_to_json_str(time: datetime):
return time.astimezone(timezone.utc).strftime("%a, %d %b %Y %H:%M:%S GMT")
|
UTF-8
|
Python
| false | false | 711 |
py
| 5 |
test_utils.py
| 4 | 0.647145 | 0.63104 | 0 | 25 | 26.32 | 78 |
Flaac/DT2118-Deep-Learning-Project
| 4,337,916,974,936 |
8837761025e8022c967ceaac4f8ab0fde21f34b0
|
5f428bab97d1261883b56759b91653cf61cfadec
|
/source_separation/neural_network/DNN.py
|
798f35e02080a60e31a63c892ec15c79e3e7c638
|
[] |
no_license
|
https://github.com/Flaac/DT2118-Deep-Learning-Project
|
bdf9a9f157f586419c017297c2f108833646876c
|
f7e73b97ef6f74b8adde475c01b975822c2a7bcf
|
refs/heads/master
| 2021-01-11T02:45:47.894708 | 2016-05-30T00:25:00 | 2016-05-30T00:25:00 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import numpy as np
from keras.models import Sequential, model_from_json
from keras.layers import Dense
from output_layer import Output_Layer
from mask_data_callback import Mask_Data_Callback
from loss_function import source_separation_loss_function
import matplotlib.pyplot as pl
from source_separation.utils.plotter import Plotter
class DNN:
def __init__(self, input_size, hidden_layer, nodes_hl, stfs, optimizer='sgd',
loss='mse', activation='relu'):
self.stfs = stfs
self.model = self.build(input_size, hidden_layer, nodes_hl, activation)
self.model.compile(optimizer=optimizer, loss=loss)
self.name = "model_dnn_" + activation + "_" + str(hidden_layer) + "_" + str(nodes_hl)
def build(self, input_size, hidden_layer, nodes_hl, activation):
'''
The Recurrent Neural Network from the Monaural Speech
Recognition paper
:param input_size: Size of the input layer
:param hidden_layer: Number of hidden layers
:param stfs: The STFs as a numpy array with shape: (SET_SIZE, OUTPUT_SIZE)
:param timesteps: Recurrence depth
'''
output_size = self.stfs.shape[1]
model = Sequential()
model.add(Dense(nodes_hl, input_dim=input_size, activation=activation))
for i in range(hidden_layer):
model.add(Dense(nodes_hl, activation=activation))
model.add(Dense(2 * output_size, activation=activation))
model.add(Output_Layer(output_size, self.stfs))
return model
def fit(self, noisy, targets, nb_epoch=10, batch_size=1):
X, y = noisy, targets
mask_data = Mask_Data_Callback(self.stfs.shape[0])
self.model.fit(X, y, nb_epoch=nb_epoch, batch_size=batch_size, callbacks=[mask_data,
Plotter(show_regressions=False, save_to_filepath="test", show_plot_window=False)])
def save(self, name='model', overwrite=False):
json_string = self.model.to_json()
open(self.name + '.json', 'w').write(json_string)
self.model.save_weights(self.name + '_weights.h5', overwrite)
def load_weight(self, path):
#m = model_from_json(open(name + '.json').read())
self.model.load_weights(path)
def evaluate(self, x, y, batch_size=1):
return self.model.evaluate(x, y, batch_size=batch_size)
def predict(self, x, batch_size=1):
return self.model.predict(x,batch_size=batch_size)
if __name__ == '__main__':
# Dummy data
INPUT_SIZE = 1024
OUTPUT_SIZE = 2 * INPUT_SIZE
SET_SIZE = 1000
clean = np.random.random((SET_SIZE, INPUT_SIZE))
noise = 0.10 * np.random.random((SET_SIZE, INPUT_SIZE))
noisy = clean + noise
target = np.append(clean, noise, axis=1)
print target.shape
print noisy.shape
dnn = DNN(INPUT_SIZE, 2, 150, noisy, loss=source_separation_loss_function)
dnn.fit(noisy, target)
|
UTF-8
|
Python
| false | false | 2,923 |
py
| 30 |
DNN.py
| 23 | 0.645912 | 0.637017 | 0 | 76 | 37.460526 | 94 |
thomas-li-67/ctf-bugku2018
| 3,204,045,638,793 |
1cb331bb7eb19a3e8f3cacce856e21dc25eb0f4b
|
611293390710a27a86330eadb2e202ece951b828
|
/sqlinject/sqlencoder.py
|
898596b1fc23051ba97d73d6e419169610969b96
|
[] |
no_license
|
https://github.com/thomas-li-67/ctf-bugku2018
|
12b2e4ef33bdf037fc8ecebb3527c437552c8874
|
42a4ec2b765f450fd5fc21113b901656a5f78961
|
refs/heads/master
| 2020-03-26T22:42:39.727274 | 2018-08-20T23:44:21 | 2018-08-20T23:44:21 | 145,481,532 | 4 | 2 | null | null | null | null | null | null | null | null | null | null | null | null | null |
def encoder(string):
subs = []
values = {“ “: “%50”, “SELECT”: “HAVING”, “AND”: “&&”, “OR”: “||”}
originalstring = “' UNION SELECT * FROM Users WHERE username =
'admin' OR 1=1 AND username = 'admin'”
secondoriginalstring = originalstring
for key, value in values.iteritems():
if key in originalstring:
newstring = originalstring.replace(key, value)
subs.append(newstring)
if key in secondoriginalstring:
secondoriginalstring = secondoriginalstring.replace(key,
value)
subs.append(secondoriginalstring)
subset = set(subs)
return subset
|
UTF-8
|
Python
| false | false | 570 |
py
| 67 |
sqlencoder.py
| 48 | 0.737828 | 0.730337 | 0 | 16 | 32.4375 | 66 |
adikolsur/Django
| 10,256,381,930,002 |
22f495f680926659a7a8e2d1ff1dc38fddab769d
|
7d062d837a96a8b10fa2b07c0b0a44a62261c857
|
/level3_forms/formApp/forms.py
|
dcbc9acab8190190ea8ec9a236953d34a1ab086a
|
[] |
no_license
|
https://github.com/adikolsur/Django
|
81370baa15a26d87471e96675f6b4c827c7619d2
|
bd69b108dfdcc98e6db1dca6644e2dd0ed8c572b
|
refs/heads/master
| 2021-06-28T12:37:26.844526 | 2020-09-30T18:37:29 | 2020-09-30T18:37:29 | 167,184,745 | 0 | 0 | null | false | 2020-09-30T18:37:31 | 2019-01-23T13:05:59 | 2019-10-01T03:28:36 | 2020-09-30T18:37:30 | 10,249 | 0 | 0 | 0 |
Python
| false | false |
from django import forms
from formApp.models import User
from django.core import validators
# def check_begin_a(value):
# if value[0].lower()!='a':
# raise forms.ValidationError("Must begin with the letter a/A")
class FormModel(forms.ModelForm):
class Meta:
model = User
fields = '__all__'
class FormName(forms.Form):
# name = forms.CharField(validators=[check_begin_a])
name = forms.CharField()
email = forms.EmailField()
verifyEmail = forms.EmailField(label="Enter email again")
text = forms.CharField(widget=forms.Textarea)
bot_catcher = forms.CharField(required=False, widget=forms.HiddenInput,
validators=[validators.MaxLengthValidator(0)])
def clean(self):
all_clean_data = super().clean()
email = all_clean_data['email']
verify_email = all_clean_data['verifyEmail']
if email != verify_email:
raise forms.ValidationError("Emails Do Not Match")
# def clean_bot_catcher(self):
# bot_catcher=self.cleaned_data['bot_catcher']
# if len(bot_catcher)>0:
# raise forms.ValidationError("BYE BYE BOT!")
# return bot_catcher
|
UTF-8
|
Python
| false | false | 1,205 |
py
| 21 |
forms.py
| 14 | 0.636515 | 0.634025 | 0 | 36 | 32.472222 | 80 |
argux/server
| 15,281,493,669,950 |
5f7fd6f742a7d02e9c3ab90ce32c1e3a89b6ed88
|
589e5d16bf11cc1cc64c5cf9e750bde122b9e0b5
|
/argux_server/monitors/SNMPMonitor.py
|
6d740c91b94d09f36a41ed31e15a61f73379f919
|
[
"Apache-2.0"
] |
permissive
|
https://github.com/argux/server
|
38a2efc26782aaa9a2920d0230029acec46d47ab
|
e469521c037ecadc21a0527bce6a0cdba5cffcf2
|
refs/heads/master
| 2020-04-05T23:47:49.921547 | 2016-11-26T12:50:30 | 2016-11-26T12:50:30 | 44,723,415 | 0 | 1 | null | false | 2016-05-29T21:22:35 | 2015-10-22T05:10:18 | 2016-01-13T06:37:29 | 2016-05-29T21:22:15 | 1,439 | 0 | 1 | 16 |
Python
| null | null |
"""SNMPMonitor module."""
import time
import re
import subprocess
from datetime import datetime
from .ExternalMonitor import ExternalMonitor
import transaction
class SNMPMonitor(ExternalMonitor):
"""SNMPMonitor class.
Queries Monitor dao and schedules monitoring actions.
"""
def __init__(self, settings):
"""Initialise SNMPMonitor.
"""
super(SNMPMonitor, self).__init__(settings)
self.monitor_type = 'snmp'
@staticmethod
def validate_options(options):
if not 'interval' in options:
raise KeyError
return True
# pylint: disable=no-self-use
def monitor_once(self, client, monitor):
"""
Monitor once.
"""
system_name = platform.system()
return
|
UTF-8
|
Python
| false | false | 789 |
py
| 194 |
SNMPMonitor.py
| 135 | 0.626109 | 0.626109 | 0 | 43 | 17.348837 | 57 |
bobquest33/testRestFlask
| 18,193,481,468,345 |
c713022a8f8c192a752c001732193ef1bcf59995
|
f8480a6ecd0c140703999eb4f760995bd375b2a1
|
/testRestFlask/testRestFlask/apps/testRest/views.py
|
fe7e858d8f9be8d74d57fd6cb7b5756ebf7a110d
|
[] |
no_license
|
https://github.com/bobquest33/testRestFlask
|
72f0b6bf17eb333d057d00a0c05918beb1e9bc67
|
0d518ceecd63747c2325324b7e103ce422a4f621
|
refs/heads/master
| 2016-08-11T20:25:00.495819 | 2016-03-17T19:08:18 | 2016-03-17T19:08:18 | 54,085,511 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
from flask import render_template,jsonify,current_app,g
from flask import Blueprint
from flask_restful import reqparse, abort, Api, Resource,marshal
from flask.ext.httpauth import HTTPBasicAuth
from models import *
testRest = Blueprint('testRest', __name__,url_prefix='/api')
api = Api()
api.init_app(testRest)
parser = reqparse.RequestParser()
parser.add_argument('username')
parser.add_argument('password')
parser.add_argument('expire',type=int)
# Create a new resource
class Users(Resource):
def post(self):
data = parser.parse_args()
username = data['username']
password = data['password']
if username is None or password is None:
print "missing argument"
abort(400,message='missing argument') # missing argument
if User.query.filter_by(username=username).first() is not None:
print "existing user"
abort(400,message='existing user') # existing user
user = User(username=username)
user.hash_password(password)
db.session.add(user)
db.session.commit()
return {'username':user.username}
#Get the User details
class UserRes(Resource):
def get(self,tid):
user = User.query.get(tid)
if not user:
abort(400,message='user not found')
return {'username': user.username}
# Generate an authentication token
class AuthToken(Resource):
@auth.login_required
def get(self):
data = parser.parse_args()
print data['expire']
exipery = 600
if data['expire'] != None:
exipery = data['expire']
token = g.user.generate_auth_token(exipery)
return {'token': token.decode('ascii'), 'duration': exipery}
# Get a user resource after authentication
class HResource(Resource):
@auth.login_required
def get(self):
return {'data': 'Hello, %s!' %g.user.username}
api.add_resource(Users, '/users')
api.add_resource(UserRes, '/user/<tid>')
api.add_resource(AuthToken, '/token')
api.add_resource(HResource, '/resource')
|
UTF-8
|
Python
| false | false | 1,969 |
py
| 6 |
views.py
| 4 | 0.690706 | 0.684104 | 0 | 68 | 27.955882 | 67 |
ASWATHIV98/Luminarpythonproject
| 5,617,817,232,114 |
36f130d1d8f7e46bc2b35418e27d32b30c9d7e73
|
4f8017615a86d1dfa10718d06606999bd4f6ae24
|
/testword/operation_function.py
|
af1a2cf8f0ed5e91f54da3d1c07dd0f3acb536f8
|
[] |
no_license
|
https://github.com/ASWATHIV98/Luminarpythonproject
|
d2b2c840bb4c29223d85a7a4a5973f82de99afe7
|
53f07060442f24b6ffed2104ef63c43884ab6fc8
|
refs/heads/master
| 2023-03-06T10:45:07.875145 | 2021-02-12T05:28:59 | 2021-02-12T05:28:59 | 333,320,955 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# import function.module#function is a directory and module is an .py file in same directory.
#
# addres=function.module.add(100,200)
# print(addres)
#or
import function.module as np
addres=np.add(100,300)
print(addres)
# from function.module import *
# addres=add(200,100)
# subres=sub(200,50)
# print(addres)
# print(subres)
|
UTF-8
|
Python
| false | false | 328 |
py
| 45 |
operation_function.py
| 45 | 0.740854 | 0.670732 | 0 | 15 | 20.933333 | 93 |
fortuno/mutanome-project
| 11,828,339,965,348 |
df9cb6b0c64f8d9adcd53f7acb468681545f23d8
|
d3fad47cfbe00058c906afe1b584031f923e5a4b
|
/variants_functions.py
|
8b79d3246c3d0d11b7c0de5509256aed6720b510
|
[] |
no_license
|
https://github.com/fortuno/mutanome-project
|
45090b35478b4b50aa65190f895e7d30cc3d93c6
|
24493b29b4c986cf602cd4c7fe54b10a641f84f1
|
refs/heads/master
| 2020-03-21T23:38:56.979987 | 2018-06-29T22:24:50 | 2018-06-29T22:24:50 | 139,199,404 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import re
import gzip
import glob
import os.path
import urllib
import urllib2
import httplib
import pprint
import string
import math
import random
import copy
from classes.protein_info import PDB
from Bio import SwissProt
from Bio import SeqIO
from Bio import pairwise2
from Bio.PDB import *
from Bio.SubsMat import MatrixInfo as matlist
from Bio.SeqUtils import seq1
import __main__
__main__.pymol_argv = [ 'pymol', '-qc'] # Quiet and no GUI
import pymol
def read_pdb(pdbID, source):
# Check and download biological assemblies
assemfound = 0
if source == "ASSEMBLY" and len(pdbID) < 5:
pdbpath ='pdb/assemblies/'
pdbfile = pdbID.lower() + '.pdb'
# Check if biological assembly files already exist
if glob.glob(pdbpath + pdbfile + '*'):
assemfound = 1
else:
# Looking for assemblies
url = 'http://www.rcsb.org/pdb/rest/getEntityInfo?structureId=' + pdbID
req = urllib2.Request(url)
f = urllib2.urlopen(req)
result = f.read()
regassem = re.search("bioAssemblies=\"(\d)\"", result)
if hasattr(regassem, 'group'):
nassem = int(regassem.group(1))
# Download assemblies
for a in range(1,nassem+1):
# Check if file exists locally
localfile = pdbpath + pdbfile + str(a)
# Check if resource exists
try:
check = urllib2.urlopen("http://www.rcsb.org/pdb/files/" + pdbfile + str(a) + '.gz')
status = check.code
except urllib2.HTTPError, e:
status = e.code
# Download if resource exists
if status == 200:
assemfound = 1
# Download biological assembly
print " Downloading " + pdbID + " biological assembly " + str(a) + " ..."
url = 'http://www.rcsb.org/pdb/files/' + pdbfile + str(a) + '.gz'
urllib.urlretrieve(url, localfile + ".gz")
# Unzip biological assembly
gz = gzip.open(localfile + ".gz", 'rb')
with open(localfile, 'wb') as out:
out.writelines(gz)
gz.close()
os.remove(localfile + ".gz")
# Check and donwload if source is PDB or no biological assemblies have been found
elif source == "PDB" or assemfound == 0:
# Local path for PDB
pdbfile = pdbID.lower() + ".pdb"
pdbpath = "pdb/pdb/"
# Download file if it is not exist
if not os.path.exists(pdbpath + pdbfile):
print " Downloading " + pdbID + " file..."
if len(pdbID) < 5:
url = "http://files.rcsb.org/download/" + pdbfile
else:
url = "https://swissmodel.expasy.org/repository/uniprot/" + pdbfile + "?provider=swissmodel"
f = urllib2.urlopen(url)
data = f.read()
with open(pdbpath + pdbfile, "wb") as code:
code.write(data)
# Read structure from file
pathfiles = glob.glob(pdbpath + pdbfile + '*')
if pathfiles and not os.stat(pathfiles[0]).st_size == 0:
structures = []
parser = PDBParser(QUIET=True)
for fname in pathfiles:
pdbname = fname.split("/")[-1]
pdbname = pdbname.split(".pdb")
pdbname = [x for x in pdbname if x]
pdbname = '.'.join(pdbname)
structures += [parser.get_structure(pdbname, fname)]
return structures
else:
return None
def pdb_distances(pdb, pdb_seq_pos, outpath, pdbsource):
pymol.finish_launching()
# Read and format inputs
chain = pdb.chain
var_pdb_pos = pdb.var_pdb_pos
var_up_pos = sorted(pdb.var_up_pos)
var_freqs = pdb.freq
# Recover file name
filename = pdb.name.split('.')
struct = filename[0]
if len(filename)>1:
filename = '.pdb'.join(filename)
else:
filename = struct + '.pdb'
# Find path for structure
if pdbsource == "PDB":
structPath = "pdb/pdb/" + filename
elif pdbsource == "MODBASE":
structPath = "modbase/" + filename
chain = ""
elif pdbsource == "ASSEMBLY":
structPath = "pdb/assemblies/" + filename
if not os.path.exists(structPath):
structPath = "pdb/pdb/" + filename
# Load structure to PyMol
pymol.cmd.load(structPath, struct)
# Calculate distances
numvar = len(var_pdb_pos)
distances = [[0 for x in range(numvar)] for y in range(numvar)]
waps = [[0 for x in range(numvar)] for y in range(numvar)]
#random_vars = []
#random_rep = 1000
soft_cutoff = 6
#random_wap = [0] * random_rep
#for x in range(0,random_rep):
# random_vars.append(random.sample(pdb_seq_pos, numvar))
print "Calculating distances for " + pdb.name + " ..."
for i in range(0,numvar):
# Format variant 1
pos_init = "/" + struct + "//" + chain + "/{0}".format(var_pdb_pos[i])
for j in range(i+1, numvar):
# Format variant 2
pos_end = "/" + struct + "//" + chain + "/{0}".format(var_pdb_pos[j])
# Calculate distance
distance = pymol.cmd.distance('tmp',pos_init,pos_end,mode=4)
distances[i][j] = distance
distances[j][i] = distance
# Calculate wap
norm_freq_i = float(var_freqs[i]**3) / float(2**3 + var_freqs[i]**3)
norm_freq_j = float(var_freqs[j]**3) / float(2**3 + var_freqs[j]**3)
waps[i][j] = norm_freq_i * norm_freq_j * math.exp(-float(distance**2) / float((2*(soft_cutoff**2))))
# Save distances in a file
distance_file = outpath + 'distances/' + pdb.name + '_distances.txt'
fdispdb = open(distance_file, 'w')
fdispdb.write("Pos:\t" + '\t'.join(str(x) for x in var_up_pos) + '\n')
posd = 0
for d in distances:
fdispdb.write(str(var_up_pos[posd]) + "\t" + '\t'.join(str(x) for x in d) + '\n')
posd += 1
fdispdb.close()
return (distances, waps)
def check_pdb_coverage(pdbobj, UPseq, structure, outpath, pdbsource):
pdb = copy.copy(pdbobj)
# Read information from pdb object
pdbID = pdb.name
chain = pdb.chain
# Return if chain is not specified
if chain == '@':
return None
# Check included chains
chains = chain.split("/")
print "CHAINS: "
incchains = [ch.id for ch in structure[0] if ch.id in chains]
print incchains
if not incchains:
return None
else:
chain = incchains[0]
PDBseq = ""
pdbPositions = []
for res in structure[0][chain]:
if is_aa(res):
pdbPositions.append(res.id[1])
PDBseq = PDBseq + seq1(res.resname)
# Convert sequence, variants positions and variants AAs to UP-PDB format
UP_PDB_pos = [x - pdb.start for x in pdb.var_up_pos]
UP_PDBseq = UPseq[pdb.start-1:pdb.end]
UP_PDBseq = string.replace(UP_PDBseq,'U','X')
# Align UP-PDB and PDB sequences
alignment = pairwise2.align.globalds(UP_PDBseq, PDBseq, matlist.blosum62, -20, -0.5)
print alignment
# Index all AAs in alignment
seqUP_aa_index = [i for i, x in enumerate(alignment[0][0]) if x != '-']
seqPDB_aa_index = [i for i, x in enumerate(alignment[0][1]) if x != '-']
# Retrieve variants positions and AAs in alignment
aln_var_pos = [seqUP_aa_index[i] for i in UP_PDB_pos if alignment[0][1][seqUP_aa_index[i]] != '-']
alnUP_var = [alignment[0][0][i] for i in aln_var_pos]
alnPDB_var = [alignment[0][1][i] for i in aln_var_pos]
if '-' in alnPDB_var:
print "In here"
return None
# Convert variant positions and AAs to PDB sequence
seqPDB_pos = [i for i, x in enumerate(seqPDB_aa_index) if x in aln_var_pos]
seqUP_pos = [i+pdb.start for i, x in enumerate(seqUP_aa_index) if x in aln_var_pos]
seqPDB_var_pos = [pdbPositions[i] for i in seqPDB_pos]
seqPDB_var = [PDBseq[i] for i in seqPDB_pos]
# Sorted variants according to the variant position
sorted_idx = [i[0] for i in sorted(enumerate(pdb.var_up_pos), key=lambda x:x[1])]
sorted_variants = []
sorted_positions = []
sorted_projects = []
sorted_freqs = []
for i in sorted_idx:
if pdb.var_up_pos[i] in seqUP_pos:
sorted_variants += [pdb.variants[i]]
sorted_positions += [pdb.var_up_pos[i]]
sorted_projects += [pdb.project[i]]
sorted_freqs += [pdb.freq[i]]
# Update PDB object with updated variant positions
pdb.var_pdb_pos = seqPDB_var_pos
pdb.var_up_pos = sorted_positions
pdb.variants = sorted_variants
pdb.project = sorted_projects
pdb.freq = sorted_freqs
pdb.num_variants = len(pdb.var_pdb_pos)
pdb.name = structure.id
pdb.chain = chain
# Calculate distances to the updated PDB
distances, wap = pdb_distances(pdb, pdbPositions, outpath, pdbsource)
pdb.distances = distances
pdb.wap = wap
return pdb
def create_clusters(pdb, cutoff):
# Retrieve list of variants
var_pdb_pos = pdb.var_pdb_pos
var_up_pos = sorted(pdb.var_up_pos)
distances = pdb.distances
waps = pdb.wap
# Number of variants
numvar = len(distances)
# Create cluster
clusters_up = []
clusters_pdb = []
clusters_wap = []
for i in range(0,numvar):
for j in range(i+1,numvar):
added = 0
if distances[i][j] <= cutoff:
if not clusters_up:
clusters_up.append([var_up_pos[i], var_up_pos[j]])
clusters_pdb.append([var_pdb_pos[i], var_pdb_pos[j]])
clusters_wap.append(waps[i][j])
else:
for cl in range(0, len(clusters_up)):
c = clusters_up[cl]
if var_up_pos[i] in c and var_up_pos[j] in c:
added = 1
continue
elif var_up_pos[i] in c:
idx = [id for id, x in enumerate(var_up_pos) if x in c and x is not var_up_pos[j]]
in_cluster = [x for x in idx if distances[j][x] <= cutoff]
if set(in_cluster) == set(idx):
clusters_up[cl].append(var_up_pos[j])
clusters_pdb[cl].append(var_pdb_pos[j])
clusters_wap[cl] += waps[i][j]
added = 1
elif var_up_pos[j] in c:
idx = [id for id, x in enumerate(var_up_pos) if x in c and x is not var_up_pos[i]]
in_cluster = [x for x in idx if distances[i][x] <= cutoff]
if set(in_cluster) == set(idx):
clusters_up[cl].append(var_up_pos[i])
clusters_pdb[cl].append(var_pdb_pos[i])
clusters_wap[cl] += waps[i][j]
added = 1
if not added:
clusters_up.append([var_up_pos[i], var_up_pos[j]])
clusters_pdb.append([var_pdb_pos[i], var_pdb_pos[j]])
clusters_wap.append(waps[i][j])
# Get out PyMol
# pymol.cmd.quit()
return (clusters_up, clusters_pdb, clusters_wap)
|
UTF-8
|
Python
| false | false | 11,622 |
py
| 4 |
variants_functions.py
| 3 | 0.542592 | 0.535192 | 0 | 330 | 34.218182 | 156 |
tominozh/GalleryProject
| 10,746,008,206,728 |
2bbf0f2690faeb33fd4c41399f601bf238de1d30
|
963204c1f74dbcbd843ec216475b9c034d7dac56
|
/gallerydb_V6/tickets/tickets_year_to_mysql.py
|
f9d46621fdd5361d9ff62f2e9a525046cc962ee3
|
[] |
no_license
|
https://github.com/tominozh/GalleryProject
|
0b44c90641fbe059733e82a5518b93bf68352f83
|
6967a3300b4e1c80dfbe2dfa9f625b613912cff7
|
refs/heads/master
| 2017-10-08T08:12:53.330838 | 2017-06-23T09:40:32 | 2017-06-23T09:40:32 | 81,877,313 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Thu Mar 30 22:45:53 2017
@author: Tomas & Aoibh
"""
import csv
import MySQLdb
def read_csv():
db = MySQLdb.connect(host="127.0.0.1",user="root",passwd="",db="artgallery")
mCursor = db.cursor()
insert = '''INSERT INTO Ticket(Ticket_Visitor_ID,Ticket_TimeStamp) VALUES (%s,%s);'''
tickets ="tickets.csv"
#order by second column (x[1]) (timestamp)
data = sorted(tickets,key = lambda x:x[1],reverse = False)
with open(tickets, 'rb') as csvfile:
mReader = csv.reader(csvfile, delimiter=';')
for x in mReader:
mCursor.execute(insert,x)
db = MySQLdb.connect(host="127.0.0.1",user="root",passwd="",db="artgallery")
mCursor = db.cursor()
insert = '''INSERT INTO Ticket(Ticket_Visitor_ID,Ticket_TimeStamp) VALUES (%s,%s);'''
read_csv()
mCursor.close()
db.commit()
db.close()
|
UTF-8
|
Python
| false | false | 900 |
py
| 24 |
tickets_year_to_mysql.py
| 22 | 0.614444 | 0.584444 | 0 | 34 | 25.117647 | 86 |
Kawser-nerd/CLCDSA
| 9,397,388,464,356 |
8fe6b15aaf6e731b9262760f10ac4ed31509ff9d
|
54f352a242a8ad6ff5516703e91da61e08d9a9e6
|
/Source Codes/AtCoder/agc004/D/4664663.py
|
2e96bed94defbe237a820b51cdbe7bf1bf553e19
|
[] |
no_license
|
https://github.com/Kawser-nerd/CLCDSA
|
5cbd8a4c3f65173e4e8e0d7ed845574c4770c3eb
|
aee32551795763b54acb26856ab239370cac4e75
|
refs/heads/master
| 2022-02-09T11:08:56.588303 | 2022-01-26T18:53:40 | 2022-01-26T18:53:40 | 211,783,197 | 23 | 9 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import sys
sys.setrecursionlimit(10**7)
INF = 10 ** 18
MOD = 10 ** 9 + 7
def LI(): return [int(x) for x in sys.stdin.readline().split()]
def LI_(): return [int(x) - 1 for x in sys.stdin.readline().split()]
def LF(): return [float(x) for x in sys.stdin.readline().split()]
def LS(): return sys.stdin.readline().split()
def II(): return int(sys.stdin.readline())
def SI(): return input()
from collections import defaultdict
def main():
N, K = LI()
A = LI_()
edges = defaultdict(list)
for i, a in enumerate(A[1:], 1):
edges[a].append(i)
global ans
ans = 0
if A[0] != 0:
ans += 1
def DFS(v, parent):
global ans
height = 0
for to in edges[v]:
height = max(height, DFS(to, v))
if parent != 0 and height >= K - 1:
height = -1
ans += 1
return height + 1
DFS(0, 0)
return ans
print(main())
|
UTF-8
|
Python
| false | false | 952 |
py
| 63,031 |
4664663.py
| 40,589 | 0.528361 | 0.50105 | 0 | 37 | 23.783784 | 68 |
jinxmirror13/bnp-vae
| 9,234,179,722,720 |
6a4a764b096e5f02a7ffa768ade84d7b2983a793
|
b6fc764629f7779e9a0bdb35b377d83db8333f9e
|
/src/var_inf.py
|
09fb3ac5555fb57c435efb9f61210caafd5bd2b6
|
[] |
no_license
|
https://github.com/jinxmirror13/bnp-vae
|
83b15afd67e4483cdbde0e0f96d1d0c4684a2a20
|
b0bcbb02c2cef6faba697583fc09d285547ad524
|
refs/heads/master
| 2022-11-30T00:27:27.947368 | 2020-08-06T15:50:15 | 2020-08-06T15:50:15 | 278,445,959 | 0 | 0 | null | true | 2020-07-09T18:52:28 | 2020-07-09T18:52:27 | 2020-06-18T14:43:28 | 2018-03-09T04:24:16 | 1,592 | 0 | 0 | 0 | null | false | false |
from util import *
from copy import deepcopy
from scipy.special import digamma
from datetime import datetime
import sys
from node import Node
from util import *
import cPickle
from sklearn.cluster import MiniBatchKMeans
from sklearn.metrics import silhouette_samples
#######################################################
### Variational Inference Object
#
#
#######################################################
class VarInf(object):
def __init__(self):
# initialize variational parameters
self.root = None
self.decay_coeff = 0.0
# Frame = x?? FIXME
self.vidid_frameid_to_idx = []
self.vidid_to_idx = []
# kmeans variable
self.kmeans_models = {}
self.kmeans_labels = {}
self.kmeans_ss = {}
@staticmethod
def weighted_kmeans(data, weights, K):
"""
Calculate the centre points (K of them) for the data set
Assumes points surround center [0]*D, relies on given weights
Params:
data: uhh... the data ?
weights: given weights of the points for this procedure
K: Number of centres
Returns:
centers: matrix [K x D] of selected data points that are "centres" ? mediods? FIXME
"""
N = np.size(data, 0) # Number of data points
D = np.size(data, 1) # Number of features
# initialize centers
centers = np.zeros(shape=(K, D)) # Empty array to hold multivariate centers
init_weights = weights / np.sum(weights) # Normalise the initial weights
closest_cluster_dist = np.ones(shape=N) / N # Random decimal initialisation? FIXME
# For K rounds, select a data point based on the max weighted distance to be centre
for k in range(K):
idx = np.argmax(VarInf.normalize(init_weights * closest_cluster_dist))
centers[k, :] = data[idx, :] # Use selected data point as centre
for n in range(N):
# Redefine distances with respect to next cluster center (always [0]*D)
closest_cluster_dist[n] = np.min(np.linalg.norm(data[n, :] - centers[:k+1, :], axis=1))
return centers
def get_phi_leaves(self, idx):
"""
Iterates through the tree to fetch leaf nodes and their phi values (??) FIXME
Params:
idx: An index
Returns:
result: [dictionary] Map of leaf nodes to respective phi[idx][0]
"""
result = {}
unvisited_nodes = [self.root]
while len(unvisited_nodes) > 0:
next_node = unvisited_nodes[0]
unvisited_nodes = unvisited_nodes[1:]
if next_node.isLeafNode:
result[next_node] = next_node.phi[idx][0]
else:
unvisited_nodes = unvisited_nodes + next_node.children
return result
def get_true_path_mean(self, x_annot_batch):
"""
Do standard VAE?? It returns a list [x_annot_batch] of list
[LATENT_CODE_SIZE] of zeros... FIXME
Params:
x_annot_batch: ?? FIXME
Returns:
true_path_mu_batch: List [x_annot_batch] of list
[LATENT_CODE_SIZE] of zeros
"""
true_path_mu_batch = []
indices = []
for x_annot in x_annot_batch:
# vidid: FIXME
# frameid: FIXME
(vidid, frameid) = x_annot
# standard VAE
true_path_mu_batch.append(np.zeros(shape=LATENT_CODE_SIZE))
continue # So this skips the rest of the loop code??
if len(self.vidid_frameid_to_idx) == 0:
# no paths initiated
true_path_mu_batch.append(np.zeros(shape=LATENT_CODE_SIZE))
else:
# Paths initiated
idx = self.vidid_frameid_to_idx.index((vidid, frameid))
phi = self.get_phi_leaves(idx)
keys = phi.keys()
values = VarInf.normalize(phi.values())
sample_path_idx = np.random.choice(len(values), p=values)
indices.append(sample_path_idx)
true_path_mu_batch.append(keys[sample_path_idx].alpha)
print indices
return np.asarray(true_path_mu_batch)
def get_matrix_from_dict(self, latent_codes):
"""
Get matrix from
Params:
latent_codes:
Returns:
result:
"""
# If vidid_to_idx is empty,
# then
if len(self.vidid_to_idx) == 0:
for vidid in latent_codes:
self.vidid_to_idx.append(vidid)
for frameid in latent_codes[vidid]:
self.vidid_frameid_to_idx.append((vidid, frameid))
result = []
for vidid in latent_codes:
for frameid in latent_codes[vidid]:
result.append(latent_codes[vidid][frameid])
return np.asarray(result)
def update_variational_parameters(self, latent_codes):
"""
TODO - Self-editing of object variables
Params:
latent_codes:
Returns:
Nothing
"""
print 'Performing variational inference...'
print 'Memory usage: %s (kb)' % resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
latent_codes_matrix = self.get_matrix_from_dict(latent_codes)
if self.root is None:
# initialize tree
self.root = Node('0', len(self.vidid_to_idx), len(self.vidid_frameid_to_idx), None, \
LATENT_CODE_SIZE, GAMMA)
BF = 4
l1_nodes = []
for n in range(BF):
node_tmp = Node('0-'+str(n), len(self.vidid_to_idx), len(self.vidid_frameid_to_idx), \
self.root, LATENT_CODE_SIZE, GAMMA)
l1_nodes.append(node_tmp)
l2_nodes = []
for n in range(BF*BF):
parent = l1_nodes[n/BF]
node_tmp = Node(parent.node_id+'-'+str(n%BF), len(self.vidid_to_idx), \
len(self.vidid_frameid_to_idx), parent, LATENT_CODE_SIZE, GAMMA)
l2_nodes.append(node_tmp)
l3_nodes = []
for n in range(BF*BF*BF):
parent = l2_nodes[n/BF]
node_tmp = Node(parent.node_id+'-'+str(n%BF), len(self.vidid_to_idx), \
len(self.vidid_frameid_to_idx), parent, LATENT_CODE_SIZE, GAMMA)
l3_nodes.append(node_tmp)
# mark all internal nodes
self.root.isLeafNode = False
self.root.children = l1_nodes
for i, l in enumerate(l1_nodes):
l.isLeafNode = False
l.children = l2_nodes[BF*i: BF*(i+1)]
for i, l in enumerate(l2_nodes):
l.isLeafNode = False
l.children = l3_nodes[BF*i: BF*(i+1)]
# precompute gamma / phi mappings
self.phi_vidid_list = []
for i in range(len(self.vidid_to_idx)):
vidid = self.vidid_to_idx[i]
self.phi_vidid_list.append(filter(lambda idx: \
self.vidid_frameid_to_idx[idx][0]==vidid, range(len(self.vidid_frameid_to_idx))))
#initialize phi to one-hot
alpha_leaves = self.get_alpha_leaves()
alpha_keys = alpha_leaves.keys()
alpha_values = alpha_leaves.values()
phi_init = np.zeros(shape=(len(alpha_keys), len(self.vidid_frameid_to_idx)))
for i, z in enumerate(latent_codes_matrix):
phi_init[np.argmin(np.linalg.norm(z - alpha_values, axis=1))][i] = 1.0
self.initialize_phi(self.root, dict(zip(alpha_keys, phi_init)))
for iteration in range(1):
self.compute_sigma_alpha(latent_codes_matrix)
self.compute_gamma()
self.compute_phi(latent_codes_matrix)
self.print_phi(self.root)
split = self.split_nodes(self.root, latent_codes_matrix, \
STDEV_THR + 100.0 * np.exp(-1.0*self.decay_coeff))
if split:
self.decay_coeff = 0.0
self.compute_phi(latent_codes_matrix)
else:
self.decay_coeff += 1.0
self.merge_nodes(self.root, latent_codes_matrix, \
STDEV_THR + 100.0 * np.exp(-1.0*self.decay_coeff))
def print_stdev(self, node, latent_codes_matrix):
"""
TODO
Params:
node:
latent_codes_matrix:
Returns:
Nothing
"""
if node.isLeafNode:
stdev = np.sqrt(np.linalg.norm(np.sqrt(node.phi) * (latent_codes_matrix - node.alpha)) \
/ np.sum(node.phi))
else:
for c in node.children:
self.print_stdev(c, latent_codes_matrix)
def print_all_param_values(self, node):
print node.node_id
print node.alpha
print node.sigmasqr_inv
print node.phi
print node.gamma
if not(node.isLeafNode):
for c in node.children:
self.print_all_param_values(c)
def initialize_phi(self, node, init_values):
"""
TODO
Params:
node:
init_values:
Returns:
Nothing
"""
if node.isLeafNode:
node.phi = np.reshape(init_values[node.node_id], (len(self.vidid_frameid_to_idx), 1))
else:
for c in node.children:
self.initialize_phi(c, init_values)
def print_phi(self, node):
if node.isLeafNode:
print 'phi', node.node_id, np.mean(node.phi)
else:
for c in node.children:
self.print_phi(c)
def print_gamma(self, node):
if node.isLeafNode:
if node.gamma is None:
print 'gamma', node.node_id, node.gamma, node.gamma
else:
print 'gamma', node.node_id, np.mean(node.gamma, axis=0), node.gamma
else:
for c in node.children:
self.print_gamma(c)
def merge_nodes(self, node, latent_codes_matrix, split_thr):
"""
TODO
Params:
node:
latent_codes_matrix:
split_thr:
Returns:
Nothing
"""
print 'merge_nodes', node.node_id
if node.isLeafNode:
if np.mean(node.phi) < 10**-3:
print 'Removing node ', node.node_id
node.parent.children.remove(node)
else:
for c in node.children:
self.merge_nodes(c, latent_codes_matrix, split_thr)
if len(node.children) == 0:
node.isLeafNode = True
elif len(node.children) == 1:
print 'Merge 1 child'
node.isLeafNode = node.children[0].isLeafNode
node.alpha = deepcopy(node.children[0].alpha)
node.sigmasqr_inv = node.children[0].sigmasqr_inv
node.phi = deepcopy(node.children[0].phi)
node.gamma = deepcopy(node.children[0].gamma)
node.children = deepcopy(node.children[0].children)
for c in node.children:
c.parent = node
def split_nodes(self, node, latent_codes_matrix, split_thr):
"""
TODO
Params:
node:
latent_codes_matrix:
split_thr:
Returns:
result:
"""
print 'split_nodes', node.node_id
if node.isLeafNode:
# compute variance
if np.mean(node.phi) < 10**-2:
return False
stdev = np.sqrt(np.linalg.norm(np.sqrt(node.phi) * (latent_codes_matrix - node.alpha)) \
/ np.sum(node.phi))
print node.node_id, stdev, split_thr
if stdev > split_thr:
best_K = args.num_splits
node.isLeafNode = False
new_centers = VarInf.weighted_kmeans(latent_codes_matrix, node.phi[:, 0], best_K)
for k in range(best_K):
new_node = Node(node.node_id + '-' + str(k), \
len(self.vidid_to_idx), len(self.vidid_frameid_to_idx), node, \
LATENT_CODE_SIZE, GAMMA)
new_node.alpha = new_centers[k, :]
new_node.sigmasqr_inv = node.sigmasqr_inv
new_node.phi = node.phi / best_K
node.children.append(new_node)
return True
else:
result = False
for c in node.children:
result = self.split_nodes(c, latent_codes_matrix, split_thr) or result
return result
def compute_sigma_alpha_node(self, node, latent_codes_matrix):
"""
TODO
Params:
node:
latent_codes_matrix:
Returns:
Nothing
"""
if node.isLeafNode:
sum_phi = np.sum(node.phi)
sum_phi_z = np.sum(node.phi * latent_codes_matrix, axis=0)
node.sigmasqr_inv = SIGMA_B_sqrinv + sum_phi * SIGMA_Z_sqrinv
try:
node.alpha = deepcopy(node.parent.alpha)
except AttributeError:
node.alpha = deepcopy(ALPHA)
node.alpha = (node.alpha * SIGMA_B_sqrinv + sum_phi_z * SIGMA_Z_sqrinv) \
/ node.sigmasqr_inv
else:
# recursively find alpha's and sigma's of children nodes
for c in node.children:
self.compute_sigma_alpha_node(c, latent_codes_matrix)
node.sigmasqr_inv = (1.0 + len(node.children)) * SIGMA_B_sqrinv
try:
node.alpha = deepcopy(node.parent.alpha)
except AttributeError:
node.alpha = deepcopy(ALPHA)
for c in node.children:
node.alpha += c.alpha
node.alpha = node.alpha * SIGMA_B_sqrinv / node.sigmasqr_inv
def compute_sigma_node(self, node, latent_codes_matrix):
"""
TODO
Params:
node:
latent_codes_matrix:
Returns:
Nothing
"""
if node.isLeafNode:
sum_phi = np.sum(node.phi)
sum_phi_z = np.sum(node.phi * latent_codes_matrix, axis=0)
node.sigmasqr_inv = SIGMA_B_sqrinv + sum_phi * SIGMA_Z_sqrinv
print node.node_id, SIGMA_B_sqrinv, sum_phi, SIGMA_Z_sqrinv, node.sigmasqr_inv
else:
# recursively find alpha's and sigma's of children nodes
for c in node.children:
self.compute_sigma_node(c, latent_codes_matrix)
node.sigmasqr_inv = (1.0 + len(node.children)) * SIGMA_B_sqrinv
def compute_alpha_node(self, node, latent_codes_matrix):
"""
TODO
Params:
node:
latent_codes_matrix:
Returns:
Nothing
"""
if node.isLeafNode:
sum_phi = np.sum(node.phi)
sum_phi_z = np.sum(node.phi * latent_codes_matrix, axis=0)
try:
node.alpha = deepcopy(node.parent.alpha)
except AttributeError:
node.alpha = deepcopy(ALPHA)
node.alpha = (node.alpha * SIGMA_B_sqrinv + sum_phi_z * SIGMA_Z_sqrinv) \
/ node.sigmasqr_inv
else:
# recursively find alpha's and sigma's of children nodes
for c in node.children:
self.compute_alpha_node(c, latent_codes_matrix)
try:
node.alpha = deepcopy(node.parent.alpha)
except AttributeError:
node.alpha = deepcopy(ALPHA)
for c in node.children:
node.alpha += c.alpha
node.alpha = node.alpha * SIGMA_B_sqrinv / node.sigmasqr_inv
def compute_sigma_alpha(self, latent_codes_matrix):
"""
TODO
Params:
latent_codes_matrix:
Returns:
Nothing
"""
self.compute_sigma_node(self.root, latent_codes_matrix)
self.compute_alpha_node(self.root, latent_codes_matrix)
@staticmethod
def digamma_add0(gamma_sum, new_gamma):
"""
TODO
Params:
gamma_sum:
new_gamma:
Returns:
calculation... TODO
"""
return gamma_sum + digamma(new_gamma[:, 0]) - digamma(np.sum(new_gamma, axis=1))
@staticmethod
def digamma_add1(gamma_sum, new_gamma):
"""
TODO
Params:
gamma_sum:
new_gamma:
Returns:
calculation... TODO
"""
if gamma_sum is None:
return digamma(new_gamma[:, 1]) - digamma(np.sum(new_gamma, axis=1))
return gamma_sum + digamma(new_gamma[:, 1]) - digamma(np.sum(new_gamma, axis=1))
def compute_phi_node(self, node, latent_codes_matrix, gamma_sum_on, gamma_sum_before):
"""
TODO
Params:
node:
latent_codes_matrix:
gamma_sum_on:
gamma_sum_before:
Returns:
Nothing
"""
if node.isLeafNode:
scaled_dist = 0.5 * SIGMA_Z_sqrinv * \
(np.linalg.norm(latent_codes_matrix - node.alpha, axis=1) + \
1.0 / node.sigmasqr_inv)
for i in range(len(self.vidid_frameid_to_idx)):
vidid, frameid = self.vidid_frameid_to_idx[i]
node.phi[i] = gamma_sum_on[node][self.vidid_to_idx.index(vidid)] + \
gamma_sum_before[node][self.vidid_to_idx.index(vidid)] - \
scaled_dist[i]
else:
for c in node.children:
self.compute_phi_node(c, latent_codes_matrix, gamma_sum_on, gamma_sum_before)
def get_phi_min(self, node):
"""
TODO
Params:
node:
Returns:
phi_min:
"""
if node.isLeafNode:
return node.phi
else:
phi_min = -np.inf * np.ones(shape=(len(self.vidid_frameid_to_idx), 1))
for c in node.children:
phi_min = np.maximum(phi_min, self.get_phi_min(c))
return phi_min
def get_phi_sum(self, node):
"""
TODO
Params:
node:
Returns:
phi_sum:
"""
if node.isLeafNode:
return node.phi
else:
phi_sum = np.zeros(shape=(len(self.vidid_frameid_to_idx), 1))
for c in node.children:
phi_sum += self.get_phi_sum(c)
return phi_sum
def normalize_phi(self, node, phi_sum):
"""
TODO
Params:
node:
phi_sum:
Returns:
Nothing
"""
if node.isLeafNode:
node.phi /= phi_sum
else:
for c in node.children:
self.normalize_phi(c, phi_sum)
def exponentiate_phi(self, node, offset):
"""
TODO
Params:
node:
offset:
Returns:
Nothing
"""
if node.isLeafNode:
node.phi = np.exp((node.phi - offset))
else:
for c in node.children:
self.exponentiate_phi(c, offset)
def compute_gamma_sum_on(self, node, curr_path_sum, result):
"""
TODO
Params:
node:
curr_path_sum:
result:
Returns:
result:
"""
if not(node.parent is None):
curr_path_sum = VarInf.digamma_add0(curr_path_sum, node.gamma)
if node.isLeafNode:
result.update({node: curr_path_sum})
return result
else:
for c in node.children:
result = self.compute_gamma_sum_on(c, curr_path_sum, result)
return result
def compute_gamma_sum_before(self, node, curr_path_before, result):
"""
TODO
Params:
node:
curr_path_sum:
result:
Returns:
digamma update? TODO
"""
if node.isLeafNode:
result.update({node: curr_path_before})
return VarInf.digamma_add1(None, node.gamma), result
else:
sum_children = 0.0
for c in node.children:
sum_c, result = self.compute_gamma_sum_before(c, \
curr_path_before + sum_children, result)
sum_children += sum_c
#return curr_path_before + sum_children, result
return VarInf.digamma_add1(sum_children, node.gamma), result
def compute_phi(self, latent_codes_matrix):
"""
TODO
Params:
latent_codes_matrix:
Returns:
Nothing
"""
gamma_sum_on = self.compute_gamma_sum_on(self.root, \
np.zeros(shape=len(self.vidid_to_idx)), {})
_, gamma_sum_before = self.compute_gamma_sum_before(self.root, \
np.zeros(shape=len(self.vidid_to_idx)), {})
self.compute_phi_node(self.root, latent_codes_matrix, gamma_sum_on, gamma_sum_before)
phi_min = self.get_phi_min(self.root)
self.exponentiate_phi(self.root, phi_min)
phi_sum = self.get_phi_sum(self.root)
self.normalize_phi(self.root, phi_sum)
phi_sum = self.get_phi_sum(self.root)
def compute_gamma_node(self, node, sum_phi_before):
"""
TODO
Params:
node:
sum_phi_before:
Returns:
sum_phi_curr and sum_phi_children:
"""
if node.isLeafNode:
if node.parent is None:
node.gamma = None
return None
sum_phi_curr = np.zeros(shape=len(self.vidid_to_idx))
for i in range(len(self.vidid_to_idx)):
phi_vidid = self.phi_vidid_list[i]
sum_phi_curr[i] = np.sum(node.phi[phi_vidid])
node.gamma[:, 0] = 1.0 + sum_phi_curr
node.gamma[:, 1] = GAMMA + sum_phi_before
return sum_phi_curr
else:
sum_phi_children = np.zeros(shape=len(self.vidid_to_idx))
for c in node.children:
sum_phi_children += self.compute_gamma_node(c, sum_phi_before + sum_phi_children)
node.gamma[:, 0] = 1.0 + sum_phi_children
node.gamma[:, 1] = GAMMA + sum_phi_before
return sum_phi_children
def compute_gamma(self):
"""
TODO
Params:
None
Returns:
Compute Gamma node...
"""
self.compute_gamma_node(self.root, np.zeros(shape=len(self.vidid_to_idx)))
@staticmethod
def normalize(p):
"""
TODO
Params:
p:
Returns:
calculation... TODO
"""
p = np.asarray(p)
s = np.sum(p)
if s > 0:
return p/s
else:
return VarInf.normalize(np.ones(shape=np.shape(p)))
def write_gamma(self, filename):
"""
TODO
Params:
filename:
Returns:
Nothing
"""
with open(filename, 'w') as f:
for vidid in self.gamma:
f.write(vidid + '\t' + unicode(self.gamma[vidid]) + '\n')
def write_assignments(self, filename):
"""
TODO
Params:
filename:
Returns:
Nothing
"""
with open(filename, 'w') as f:
for vidid in self.phi:
for frameid in self.phi[vidid]:
f.write(vidid + '\t' + frameid + '\t' + \
unicode(np.argmax(self.phi[vidid][frameid])) + '\n')
def write_alpha_node(self, node, fileptr):
"""
TODO
Params:
node:
fileptr:
Returns:
Nothing
"""
fileptr.write(node.node_id + ' ' + \
' '.join(map(lambda x: str(np.round(x, 2)), node.alpha)) + \
'\n')
if not(node.isLeafNode):
for c in node.children:
self.write_alpha_node(c, fileptr)
def write_alpha(self, filename):
"""
TODO
Params:
filename:
Returns:
Nothing
"""
fileptr = open(filename, 'w')
self.write_alpha_node(self.root, fileptr)
fileptr.close()
def write_sigma_node(self, node, fileptr):
"""
TODO
Params:
node:
fileptr:
Returns:
Nothing
"""
fileptr.write(node.node_id + ' ' + \
' '.join(map(lambda x: str(np.round(x, 2)), [node.sigmasqr_inv])) + \
'\n')
if not(node.isLeafNode):
for c in node.children:
self.write_sigma_node(c, fileptr)
def write_sigma(self, filename):
"""
TODO
Params:
filename:
Returns:
Nothing
"""
fileptr = open(filename, 'w')
self.write_sigma_node(self.root, fileptr)
fileptr.close()
def get_nodes_list(self, node, result):
"""
TODO
Params:
node:
result:
Returns:
result
"""
if node.isLeafNode:
result.append(node)
else:
result.append(node)
for c in node.children:
result = self.get_nodes_list(c, result)
return result
def write_nodes(self, filename):
"""
TODO
Params:
filename:
Returns:
Nothing
"""
nodes_list = self.get_nodes_list(self.root, [])
fileptr = open(filename, 'wb')
cPickle.dump(nodes_list, fileptr)
fileptr.close()
def get_alpha_leaves_node(self, node, result):
"""
TODO
Params:
node:
result:
Returns:
result
"""
print node.node_id, len(result)
if node.isLeafNode:
result.update({node.node_id: node.alpha})
return result
else:
for c in node.children:
result.update(self.get_alpha_leaves_node(c, result))
return result
def get_alpha_leaves(self):
"""
TODO
Params:
None
Returns:
Leaf node
"""
return self.get_alpha_leaves_node(self.root, {})
|
UTF-8
|
Python
| false | false | 23,081 |
py
| 11 |
var_inf.py
| 9 | 0.585503 | 0.580737 | 0 | 829 | 26.841978 | 95 |
adelineROATTA/ThreaDNA
| 9,363,028,748,444 |
80af646df84309a374279151ae1176884bd1851f
|
15e5b7b0f5cfbf1beb69e6df9416e4f0fe8445f8
|
/seqmotifs.py
|
c5b8224c76f9e06613cb43f58608dd76b4d82ed0
|
[] |
no_license
|
https://github.com/adelineROATTA/ThreaDNA
|
a75d0f8e6fbee68d59520242d53aa596f3d19687
|
3c8e793d443f5b4ce9922eceb273e2d987d4f571
|
refs/heads/master
| 2021-01-11T16:31:58.155440 | 2017-09-02T15:51:23 | 2017-09-02T15:51:23 | 80,102,042 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python
# coding: utf8
import sys
import argparse
def seqpen(seqn,seq,char,pos,l,f,ref):
i=0
s=""
while i<len(seq)-l+1:
f.write(seqn+"\t"+str(ref+i)+"\t"+str(int(seq[i+pos-1]==char))+"\n")
i+=1
def main(args):
if args.reference==None:
args.reference=(args.l+1)/2
f=open(args.fi,"r")
f2=open(args.fi.split("/")[-1].split(".")[0]+"_"+args.char+str(args.pos)+".bed","w")
f2.write("Energy penalties for "+args.char+" in position "+str(args.pos)+"/"+str(args.l)+"\n")
i=0
li=f.read().split("\n")
f.close()
seq,seqn,si=[],[],[]
while i<len(li):
if li[i] and li[i][0]=='>':
seqn.append(li[i][1:].replace(" ","_")) #replace whitespaces by underscores for further data utilisation (plot)
si.append(i)
i+=1
si.append(i)
i=0
while i<len(si)-1:
seq.append("".join(li[si[i]+1:si[i+1]]).upper())
seqpen(seqn[i],seq[i],args.char,args.pos,args.l,f2,args.reference)
i+=1
f2.close()
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='ThreaDNA helper program, to identify the protein positions matching given sequence patterns. Penalty is given the chosen nucleotide and its position on the protein, and returns the positions of the protein where this pattern is matched. ') #program parser and help
parser.add_argument('fi', metavar='fasta_file', type=str,help='FASTA file containing the sequence')
parser.add_argument('char',metavar='nucleotide',type=str,help='Nucleotide query (A, C, G, or T)')
parser.add_argument("pos",metavar="position",type=int, help="Position of the nucleotide in the protein-DNA model")
parser.add_argument("l",metavar="length",type=int,help="Length of the protein-DNA complex (in nucleotides)")
parser.add_argument("-r","--reference",type=int,help="Reference nucleotide for this protein-DNA model (default (length+1)/2)")
args=parser.parse_args()
main(args)
|
UTF-8
|
Python
| false | false | 1,985 |
py
| 35 |
seqmotifs.py
| 25 | 0.634761 | 0.62267 | 0 | 44 | 44.113636 | 314 |
judith600/advent_of_code_2020
| 335,007,467,574 |
604347663f4fdef30f31a45790370ccda5a97ff6
|
4fa8e983f98ad6757aee11dca749ab9a4a09d541
|
/advent/year2020/src/part_two/day_9/day_9.py
|
e0709a350b859ab996b8fdbb6f5c0f6652127ce0
|
[] |
no_license
|
https://github.com/judith600/advent_of_code_2020
|
0d824047338f160895a674c13f42a29e3c5023e6
|
8f78bd2d7e5e7b98c019e6d04357a0c82c58124f
|
refs/heads/main
| 2023-01-28T06:47:26.395377 | 2020-12-11T07:09:46 | 2020-12-11T07:09:46 | 315,248,625 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from copy import deepcopy
from typing import List
from file_opener import getInputFileLinesAsList
def getInputNumerical(fileName):
numberStrings = getInputFileLinesAsList(fileName)
return [int(line) for line in numberStrings]
def isCurrentNumberValid(current: int, scope: List[int]):
for index, firstNum in enumerate(scope):
for secondNum in scope[index + 1:]:
if firstNum + secondNum == current:
return True
return False
def getCurrentScope(currentIndex):
copy = deepcopy(numbers)
return copy[currentIndex - 25: currentIndex]
def tryFromIndexOn(numbers, indexToStart):
myIter = iter(numbers[indexToStart:])
mySum = 0
summandList = []
while mySum < 70639851:
follower = next(myIter)
summandList.append(follower)
mySum += follower
if mySum == 70639851:
return summandList
else:
return None
def findContiguousSet(numberList):
for index, number in enumerate(numberList):
resultList = tryFromIndexOn(numberList, index)
if resultList is not None:
return resultList
def verifyNumbers(numbers):
for index, num in enumerate(numbers):
if index < 25:
continue
myScope = getCurrentScope(index)
isValid = isCurrentNumberValid(num, myScope)
if not isValid:
return num
if __name__ == '__main__':
numbers = getInputNumerical("src/part_one/day_1/input")
result = verifyNumbers(numbers)
print(result)
mySet = findContiguousSet(numbers)
print(mySet)
resultPartTwo = min(mySet) + max(mySet)
print(resultPartTwo)
|
UTF-8
|
Python
| false | false | 1,653 |
py
| 23 |
day_9.py
| 22 | 0.664852 | 0.650938 | 0 | 64 | 24.828125 | 59 |
emiludenz/liveness_analysis
| 14,869,176,800,805 |
1f3b1f0deef3ee556b7ffaa4a6ff2ac306b89a4b
|
ce02b18411c2bd3fec07ca01fd662292654c462a
|
/main.py
|
075ac82cc9b0eccf86659d82a43da1902aa93ef7
|
[
"MIT"
] |
permissive
|
https://github.com/emiludenz/liveness_analysis
|
fdd0d18c966ac78c21c6611ab5a6e4185ff9923c
|
ce3fdb47be54767eb13eee032091471917b6bfc1
|
refs/heads/master
| 2022-12-02T17:38:17.025691 | 2020-08-20T10:33:29 | 2020-08-20T10:33:29 | 288,209,515 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from lib_live import *
def main():
gen_set = get_kill(ex7,True)
fp_set = fp_iteration(gen_set,True)
ex = ["0: LABEL begin",
"1: z := a+b",
"2: a := z+b",
"3: IF y < x THEN end ELSE begin",
"4: LABEL end"]
ex2 = ["1: z := 0",
"2: LABEL begin",
"3: a := x + 7",
"4: y := y + a",
"5: b := y % 3",
"6: x := x - b",
"7: c := x + b",
"8: z := z - c",
"9: IF y < x THEN end ELSE begin",
"10: LABEL end",
"11: RETURN z",
"12: GOTO end"]
ex3 = [
"1: a := 0",
"2: b := 1",
"3: z := 0",
"4: LABEL loop",
"5: IF n = z THEN end ELSE body",
"6: LABEL body",
"7: t := a + b",
"8: a := b",
"9: b := t",
"10: n := n - 1",
"11: z := 0",
"12: GOTO loop",
"13: LABEL end",
"14: RETURN a"]
ex4 = [
"1: x := CALL f(x,y+5)",
"2: M[x+8] := y+5+x",
"3: a := M[x+5]"
]
ex5 = [
"1: LABEL start",
"2: IF a < b THEN next ELSE swap",
"3: LABEL swap",
"4: t := a",
"5: a := b",
"6: b := t",
"7: LABEL next",
"8: z := 0",
"9: b := b % a",
"10: IF b = z THEN end ELSE start",
"11: LABEL end",
"12: RETURN a"
]
ex6 = [
"1: z := 0",
"2: LABEL begin",
"3: a := x + 7",
"4: y := y + a",
"5: b := y % 3",
"6: x := x - b",
"7: c := x + b",
"8: z := z - c",
"9: IF y < x THEN end ELSE begin",
"10: LABEL end",
"11: RETURN z"
]
ex7 = ["1: LABEL test",
"2: IF a = 0 THEN exit ELSE body",
"3: LABEL body",
"4: x := a - b",
"5: b := x / b",
"6: y := a - 5",
"7: a := y + a",
"8: GOTO test",
"9: LABEL exit",
"10: RETURN b"]
if __name__ == '__main__':
main()
|
UTF-8
|
Python
| false | false | 1,483 |
py
| 4 |
main.py
| 3 | 0.425489 | 0.354012 | 0 | 92 | 15.130435 | 36 |
mario5698/Course-2020-2021
| 6,871,947,719,002 |
b4e9dd5331fdc5b82c0befaeb196d2bd26cae46d
|
e332ab085f575fae376f5b43759c4b5c7fdad075
|
/S2AM/Code/Aprender python/UF-1-Resuelto/ICB0M03U01I03/act4.py
|
2179f8d05a98eb96fb59bb67765f1329980441d6
|
[] |
no_license
|
https://github.com/mario5698/Course-2020-2021
|
9bdfc3b09be56e78a9da17e358ff321c5174f338
|
35333297f4ff6f387296c55897221640ee605e4f
|
refs/heads/main
| 2023-04-26T09:45:29.961490 | 2021-05-28T15:16:42 | 2021-05-28T15:16:42 | 371,736,741 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import math
print ("Transport public :")
preu_transport=float(input ())
print ("KM :")
km=float(input ())
print ("Preu KM :")
preu_KM=float(input ())
preu_kma=km*preu_KM
minim= min([preu_transport,preu_kma ])
print(round(minim,2))
|
UTF-8
|
Python
| false | false | 233 |
py
| 342 |
act4.py
| 14 | 0.678112 | 0.67382 | 0 | 11 | 20.090909 | 38 |
sagarbrptr/library_management
| 4,028,679,324,166 |
4e2b8b288565baa291583dcd0d08d65e1036f67f
|
4d49137de5a8ecc194e9611a754902f90893accf
|
/proj1/app/migrations/0002_auto_20180723_1812.py
|
9aba99c08fd6ed5f311e99f77113ebfb2b1b4807
|
[] |
no_license
|
https://github.com/sagarbrptr/library_management
|
7bf85c856055426cb1784ab92e89aa0adddfe464
|
43971e03aca28ad3ae6a1d9c0112c750a48d992d
|
refs/heads/master
| 2020-03-24T17:29:31.031094 | 2018-07-30T11:06:34 | 2018-07-30T11:06:34 | 142,860,711 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-07-23 18:12
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('app', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='bookings',
name='issue_date',
field=models.DateTimeField(default=datetime.datetime(2018, 7, 23, 18, 12, 53, 572723)),
),
migrations.AddField(
model_name='bookings',
name='return_date',
field=models.DateTimeField(default=datetime.datetime(2018, 7, 30, 18, 12, 53, 572832)),
),
]
|
UTF-8
|
Python
| false | false | 710 |
py
| 9 |
0002_auto_20180723_1812.py
| 6 | 0.592958 | 0.511268 | 0 | 26 | 26.307692 | 99 |
zhudaxia666/alicomp
| 2,508,260,917,991 |
ed49eecd150c142c646924707597b7d36361ab61
|
86ec0dca6e8c8b642f813f4dbd0a54014d238c30
|
/zhu/xgboost_copy.py
|
4a2069fc4069aa365050fe3d34c60ac82d09ae29
|
[] |
no_license
|
https://github.com/zhudaxia666/alicomp
|
fd890413f669ccec5e4ddbadb73550b773ef9603
|
50e98f9db19837eed9ced1c24b46340ba4b32ee8
|
refs/heads/master
| 2020-04-30T06:47:20.289814 | 2019-03-20T06:52:46 | 2019-03-20T06:52:46 | 176,662,800 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#! /usr/bin/python
import numpy as np
import xgboost as xgb
import h5py
import numpy as np
import os
import operator
# from keras.utils import to_categorical
import csv
import random
base_dir = "./../data"
path_training = os.path.join(base_dir, "training.h5")
path_test = os.path.join(base_dir, "test.h5")
path_validation = os.path.join(base_dir,"validation.h5/validation.h5")
# path_test = os.path.join(base_dir, "sample_test.h5", "sample_test.h5")
fid_training = h5py.File(path_training,'r')
fid_test = h5py.File(path_test,'r')
fid_validation = h5py.File(path_validation,'r')
def Test_data():
s1 = fid_test['sen1']
s2 = fid_test['sen2']
data=[]
for i in range(len(s1)):
part=[]
for chanel1 in range(s1.shape[3]):
for j1 in range(s1.shape[1]):
part.extend(s1[i,j1,:,chanel1])
for chanel2 in range(s2.shape[3]):
for j2 in range(s2.shape[1]):
part.extend(s2[i,j2,:,chanel2])
data.append(part)
print("数据转换成功")
return np.array(data)
def Data_change(train=True):
data=[]
labels=[]
if train:
s1 = fid_training['sen1']
s2 = fid_training['sen2']
label = fid_training['label']
index=random.sample(range(352366),2000)
else:
s1 = fid_validation['sen1']
s2 = fid_validation['sen2']
label = fid_validation['label']
index=random.sample(range(24119),2000)
for i in index:
part=[]
for chanel1 in range(s1.shape[3]):
for j1 in range(s1.shape[1]):
part.extend(s1[i,j1,:,chanel1])
for chanel2 in range(s2.shape[3]):
for j2 in range(s2.shape[1]):
part.extend(s2[i,j2,:,chanel2])
# item=list(label[i])
labels.append(label[i])
data.append(part)
print("数据转换成功")
return np.array(data),np.array(labels)
def onehot_tranIndex(label):
index_set=[]
# label=list(label)
for i in range(len(label)):
item=list(label[i])
index_set.append(item.index(1))
# write_csvfile(index_set)
# print(index_set)
return np.array(index_set)
def write_csvfile(preds):
one_hots = to_categorical(preds)
# print(one_hots)
csvfile=open('csvfile.csv','w',newline='')
write1=csv.writer(csvfile)
for i in one_hots:
write1.writerow(i)
csvfile.close()
def xgboost_class(train_data,train_label,valid_data,valid_label,test_data):
params={
'booster':'gbtree',
# 这里手写数字是0-9,是一个多类的问题,因此采用了multisoft多分类器,
'objective': 'multi:softmax',
'num_class':17, # 类数,与 multisoftmax 并用
'gamma':0.01, # 在树的叶子节点下一个分区的最小损失,越大算法模型越保守 。[0:]
'max_depth':6, # 构建树的深度 [1:],典型值:3-10
#'lambda':450, # L2 正则项权重
'subsample':0.5, # 采样训练数据,设置为0.5,随机选择一般的数据实例 (0:1],典型值:0.5-1
'colsample_bytree':0.7, # 构建树树时的采样比率 (0:1] 典型值:0.5-1
#'min_child_weight':12, # 节点的最少特征数
'silent':1 ,
'eta': 0.01, # 如同学习率
'seed':710,
'nthread':4,# cpu 线程数,根据自己U的个数适当调整
}
plst = list(params.items())
num_rounds = 20 # 迭代你次数
# xgtest = xgb.DMatrix(test)
xgtrain = xgb.DMatrix(train_data, label=train_label)
xgval = xgb.DMatrix(valid_data, label=valid_label)
xgtest=xgb.DMatrix(test_data)
# 划分训练集与验证集
# xgtrain = xgb.DMatrix(train[:offset,:], label=labels[:offset])
# xgval = xgb.DMatrix(train[offset:,:], label=labels[offset:])
# return 训练和验证的错误率
watchlist = [(xgtrain, 'train'),(xgval, 'val')]
# training model
# early_stopping_rounds 当设置的迭代次数较大时,early_stopping_rounds 可在一定的迭代次数内准确率没有提升就停止训练
model = xgb.train(plst, xgtrain, num_rounds, watchlist,early_stopping_rounds=100)
#model.save_model('./model/xgb.model') # 用于存储训练出的模型
preds = model.predict(xgtest,ntree_limit=model.best_iteration)
preds=[int(i) for i in preds]
return preds
# print(preds)
# print(preds)
# one_hots = to_categorical(preds).astype(np.int32)
# csvfile=open('accuracy.csv','w',newline='')
# write1=csv.writer(csvfile)
# for i in one_hots:
# write1.writerow(i)
# csvfile.close()
# print("预测完毕!")
# print ('predicting, classification accuracy=%f' % (1-(sum( int(preds[i]) != valid_label[i] for i in range(len(valid_label))) / float(len(valid_label)) )))
if __name__ == "__main__":
train_data,train_label=Data_change(train=True)
valid_data,valid_label=Data_change(train=False)
train_label=onehot_tranIndex(train_label)
valid_label=onehot_tranIndex(valid_label)
test_data=Test_data()
xgboost_class(train_data,train_label,valid_data,valid_label,test_data)
|
UTF-8
|
Python
| false | false | 5,071 |
py
| 29 |
xgboost_copy.py
| 24 | 0.621347 | 0.592336 | 0 | 147 | 30.421769 | 160 |
fedesevle/Ventura
| 15,281,493,655,679 |
f7833d45d4367e69e1ccf489fe220adb6a6591e9
|
995ce02bedb4defdcfa15934dfc374e3c9c3fe85
|
/RK4/RK4_exponencial.py
|
07c1568fa0db877490756241c3a4e270801e2656
|
[] |
no_license
|
https://github.com/fedesevle/Ventura
|
95848894b56ea0ec2fa5e9608e0d61bc1104662d
|
4b75041784245a63884538751f36e4f1d9e26afe
|
refs/heads/master
| 2020-03-10T15:09:47.338319 | 2018-06-05T17:38:27 | 2018-06-05T17:38:27 | 129,442,759 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Wed May 16 10:44:04 2018
@author: USUARIO
"""
import numpy as np #importa el paquete que permite usar funciones matemáticas
import pylab as plb #importa pquete que nos permite hacer gráficos
def int_rk4(f,x,dt,params):
k_1 = f(x,params)
k_2 = f(x+dt*0.5*k_1,params)
k_3 = f(x+dt*0.5*k_2,params)
k_4 = f(x+dt*k_3,params)
y=x + dt*(k_1/6 + k_2/3 + k_3/3 + k_4/6)
return y
#quiero integrar la función dx/dt=ax, para eso me construyo la función derivada que le voy a dar al RK4
def derivada(x,params):
a=params #así lo elijo de afuera
dx=a*x #defino la derivada de la exponencial
return np.array(dx) #le pido a la función que me devuelva la derivada
#ahora integro para el tiempo que yo quiera
#me armo el vector de tiempo
t0=0 #tiempo inicial
tf=8 #tiempo final
dt=0.1 #paso del tiempo
t=np.arange(t0,tf,dt) #vector que va de t0 a tf con un paso dt
#le doy un valor a mi parámetro
a=np.log(2) #Significa que por cada unidad de tiempo la población se duplica
#me armo la variable que voy a integrar
x=np.zeros(len(t)) # x es un vector de ceros que vamos a ir llenando
x[0]=1 #fijo condición inicial
#integro usando el método RK4 definido antes
for i in range (len(t)-1): #"para todos los i es menores a la longitud de t"
x[i+1]=int_rk4(derivada,x[i],dt,a) #le doy el tiempo y la x correspondientes al paso i-esimo para calcular el paso siguiente
#luego del for tenemos la ecuación integrada para t entre 0 y tf, comparemos con la solución teórica
xteo=np.exp(a*t)
#ploteemos las funciones a ver si dan lo mismo
plb.plot(t,x,'.') #el puntito al final hace que grafique con puntitos
plb.plot(t,xteo,'-') #la rayita al final hace que grafique con una raya
plb.xlabel('tiempo')
plb.ylabel('N')
|
UTF-8
|
Python
| false | false | 1,950 |
py
| 9 |
RK4_exponencial.py
| 6 | 0.648425 | 0.623129 | 0 | 60 | 29.633333 | 128 |
nkelava/WebApplications
| 927,712,978,204 |
43991c0b6a3e7384c28619a5d8875410c2cf0661
|
bd7838f6fecce9998eda98dc79c4e24d5607d8bb
|
/UploadFiles/session.py
|
eab58f6c3038b4b7c98b2228bb3e7e2738700609
|
[] |
no_license
|
https://github.com/nkelava/WebApplications
|
5009a99246bf3683ddc6e1a55e670062fcaa58d2
|
44ee28684bc32639e0ddbf4540b23744e63adc11
|
refs/heads/main
| 2023-02-26T12:09:54.633272 | 2021-02-07T12:53:14 | 2021-02-07T12:53:14 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#! C:\Users\Nikola Kelava\AppData\Local\Programs\Python\Python38-32\python.exe
from os import environ
from http import cookies
import database
def create_session():
session_id = database.create_session()
print(f'Set-Cookie: session_id = {session_id}')
return session_id
def get_session_id():
http_cookies_str = environ.get('HTTP_COOKIE', '')
get_all_cookies_object = cookies.SimpleCookie(http_cookies_str)
session_id = get_all_cookies_object.get('session_id').value if (get_all_cookies_object.get('session_id')) else None
return session_id
def destroy_session_id():
print('Set-Cookie: session_id = ""; expires = "Thu, 01 Jan 1970 00:00:00 GMT"')
def destroy_session():
session_id = get_session_id()
destroy_session_id()
database.destroy_session(session_id)
def add_to_session(user_data, session_id=None):
if (session_id is None):
session_id = get_session_id()
for key, value in user_data.items():
user_data[key] = value
database.replace_session(session_id, user_data)
def remove_from_session(dict):
session_id = get_session_id()
_, data = database.get_session(session_id)
for key in dict:
data.pop(key, None)
database.replace_session(session_id, data)
def get_session_data():
session_id = get_session_id()
if (session_id):
_, data = database.get_session(session_id)
else:
return None
return data
|
UTF-8
|
Python
| false | false | 1,455 |
py
| 24 |
session.py
| 23 | 0.662543 | 0.651546 | 0 | 55 | 25.418182 | 119 |
xaviranik/HackerRank
| 12,369,505,824,533 |
269bf8be120710a3d2e631333a005b47d146da37
|
5a60d1717e18e5ca61f7b739ae8222a09620c1ff
|
/python/basic/lists.py
|
6479aba260d17afd1202a3332e22d6e0cb6efe61
|
[] |
no_license
|
https://github.com/xaviranik/HackerRank
|
69b177d4e4586bab990a60930a3f905a9dfc5bea
|
31c999c711e583c9c4d73199930657713daaa6a5
|
refs/heads/master
| 2022-02-23T15:45:16.473865 | 2019-08-14T09:06:42 | 2019-08-14T09:06:42 | 198,211,947 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
L = []
no_of_inp = int(input())
for i in range(no_of_inp):
inputs = input()
lst_of_input = [i for i in inputs.split()]
if lst_of_input[0] == 'insert':
L.insert(int(lst_of_input[1]), int(lst_of_input[2]))
elif lst_of_input[0] == 'print':
print (L)
elif lst_of_input[0] == 'remove':
L.remove(int(lst_of_input[1]))
elif lst_of_input[0] == 'append':
L.append(int(lst_of_input[1]))
elif lst_of_input[0] == 'sort':
L.sort()
elif lst_of_input[0] == 'pop':
L.pop()
elif lst_of_input[0] == 'reverse':
L.reverse()
|
UTF-8
|
Python
| false | false | 598 |
py
| 27 |
lists.py
| 26 | 0.523411 | 0.505017 | 0 | 21 | 27.47619 | 60 |
duwei19961021/pythonxx
| 2,551,210,597,413 |
0e4dac24845e3ae89f6b9dbfb2b5e4f74c513419
|
517389abde6df26d9ef8ba57a76b18bccf34a5fc
|
/Mysql.py
|
c32269f1480c7daf0e1137d970324e163bc86f8b
|
[] |
no_license
|
https://github.com/duwei19961021/pythonxx
|
bac34ef8f412a5096e1cf21766972cda289474b1
|
072c8f8d31909ffe2e6c07ec8e219aceb0774095
|
refs/heads/master
| 2021-06-25T02:39:46.651233 | 2021-02-07T03:17:18 | 2021-02-07T03:17:18 | 190,140,601 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import pymysql
def chushihua():
connection = pymysql.connect(host='',
port=3306,
user='python',
password='123456',
charset='utf8')
cursor = connection.cursor()
sql1 = "create database shop;"
sql2 = "create table shop.users(ID int(11) primary key not null auto_increment,User_name VARCHAR (32) unique not null, Password varchar(30) not NULL,Wage int(11) not null);"
cursor.execute(sql1)
cursor.execute(sql2)
chushihua()
|
UTF-8
|
Python
| false | false | 577 |
py
| 49 |
Mysql.py
| 49 | 0.544194 | 0.504333 | 0 | 13 | 43.384615 | 178 |
taguz91/python-p
| 9,534,827,425,384 |
b018f0e9650435ff59e094009dec11376fd895b4
|
264f26e174f89da5a9f1b72be779be4f23465c9c
|
/Ejercicios 2/Palabra_repite.py
|
1d2fc47c31ef4fb84c95e99baed9847847c136c7
|
[] |
no_license
|
https://github.com/taguz91/python-p
|
8248485a66867479e97f9c47af4a6cb5fa4017fb
|
a6f772cf924bbf44d42a48c8c4de103c7a5bfcda
|
refs/heads/master
| 2022-03-15T05:01:51.773680 | 2019-11-23T22:53:43 | 2019-11-23T22:53:43 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
capitulo = """Letter 1
TO Mrs. Saville, England
St. Petersburgh, Dec. 11th, 17-
You will rejoice to hear that no disaster has accompanied the
commencement of an enterprise which you have regarded with such
evil forebodings. I arrived here yesterday, and my first task is
to assure my dear sister of my welfare and increasing confidence in
the success of my undertaking.
I am already far north of London, and as I walk in the streets of
Petersburgh, I feel a cold northern breeze play upon my cheeks,
which braces my nerves and fills me with delight. Do you understand
this feeling? This breeze, which has travelled from the regions
towards which I am advancing, gives me a foretaste of those icy climes.
Inspirited by this wind of promise, my daydreams become more fervent
and vivid. I try in vain to be persuaded that the pole is the seat
of frost and desolation; it ever presents itself to my imagination as the
region of beauty and delight. There, Margaret, the sun is forever visible,
its broad disk just skirting the horizon and diffusing a perpetual splendour.
There--for with your leave, my sister, I will put some trust in preceding
navigators--there snow and frost are banished; and, sailing over a calm sea,
we may be wafted to a land surpassing in wonders and in beauty every region
hitherto discovered on the habitable globe. Its productions and features
may be without example, as the phenomena of the heavenly bodies undoubtedly
are in those undiscovered solitudes. What may not be expected in a country
of eternal light? I may there discover the wondrous power which attracts
the needle and may regulate a thousand celestial observations that require
only this voyage to render their seeming eccentricities consistent forever.
I shall satiate my ardent curiosity with the sight of a part of the world
never before visited, and may tread a land never before imprinted by the
foot of man. These are my enticements, and they are sufficient to conquer
all fear of danger or death and to induce me to commence this laborious voyage
with the joy a child feels when he embarks in a little boat, with his holiday
mates, on an expedition of discovery up his native river. But supposing all
these conjectures to be false, you cannot contest the inestimable benefit
which I shall confer on all mankind, to the last generation, by discovering
a passage near the pole to those countries, to reach which at present so many
months are requisite; or by ascertaining the secret of the magnet, which,
if at all possible, can only be effected by an undertaking such as mine.
These reflections have dispelled the agitation with which I began my letter,
and I feel my heart glow with an enthusiasm which elevates me to heaven,
for nothing contributes so much to tranquillize the mind as a steady purpose
--a point on which the soul may fix its intellectual eye. This expedition
has been the favourite dream of my early years. I have read with ardour
the accounts of the various voyages which have been made in the prospect
of arriving at the North Pacific Ocean through the seas which surround
the pole. You may remember that a history of all the voyages made for
purposes of discovery composed the whole of our good Uncle Thomas' library.
My education was neglected, yet I was passionately fond of reading.
These volumes were my study day and night, and my familiarity with them
increased that regret which I had felt, as a child, on learning that my
father's dying injunction had forbidden my uncle to allow me to embark
in a seafaring life.
These visions faded when I perused, for the first time, those poets
whose effusions entranced my soul and lifted it to heaven. I also
became a poet and for one year lived in a paradise of my own creation;
I imagined that I also might obtain a niche in the temple where the
names of Homer and Shakespeare are consecrated. You are well acquainted
with my failure and how heavily I bore the disappointment. But just at
that time I inherited the fortune of my cousin, and my thoughts were
turned into the channel of their earlier bent.
Six years have passed since I resolved on my present undertaking.
I can, even now, remember the hour from which I dedicated myself to
this great enterprise. I commenced by inuring my body to hardship.
I accompanied the whale-fishers on several expeditions to the North Sea;
I voluntarily endured cold, famine, thirst, and want of sleep;
I often worked harder than the common sailors during the day and devoted
my nights to the study of mathematics, the theory of medicine,
and those branches of physical science from which a naval
adventurer might derive the greatest practical advantage.
Twice I actually hired myself as an under-mate in a Greenland whaler,
and acquitted myself to admiration. I must own I felt a little proud
when my captain offered me the second dignity in the vessel and
entreated me to remain with the greatest earnestness, so valuable
did he consider my services. And now, dear Margaret, do I not deserve
to accomplish some great purpose? My life might have been passed in ease
and luxury, but I preferred glory to every enticement that wealth placed
in my path. Oh, that some encouraging voice would answer in the affirmative!
My courage and my resolution is firm; but my hopes fluctuate, and my spirits
are often depressed. I am about to proceed on a long and difficult voyage,
the emergencies of which will demand all my fortitude: I am required
not only to raise the spirits of others, but sometimes to sustain my own,
when theirs are failing.
This is the most favourable period for travelling in Russia. They fly
quickly over the snow in their sledges; the motion is pleasant, and,
in my opinion, far more agreeable than that of an English stagecoach.
The cold is not excessive, if you are wrapped in furs--a dress which
I have already adopted, for there is a great difference between walking
the deck and remaining seated motionless for hours, when no exercise
prevents the blood from actually freezing in your veins. I have no
ambition to lose my life on the post-road between St. Petersburgh
and Archangel. I shall depart for the latter town in a fortnight
or three weeks; and my intention is to hire a ship there, which can
easily be done by paying the insurance for the owner, and to engage
as many sailors as I think necessary among those who are accustomed
to the whale-fishing. I do not intend to sail until the month of June;
and when shall I return? Ah, dear sister, how can I answer this question?
If I succeed, many, many months, perhaps years, will pass before you
and I may meet. If I fail, you will see me again soon, or never.
Farewell, my dear, excellent Margaret. Heaven shower down blessings
on you, and save me, that I may again and again testify my gratitude
for all your love and kindness.
Your affectionate brother,
R. Walton
Letter 2
To Mrs. Saville, England
Archangel, 28th March, 17-
How slowly the time passes here, encompassed as I am by frost and snow!
Yet a second step is taken towards my enterprise. I have hired a vessel
and am occupied in collecting my sailors; those whom I have already
engaged appear to be men on whom I can depend and are certainly possessed
of dauntless courage.
But I have one want which I have never yet been able to satisfy,
and the absence of the object of which I now feel as a most
severe evil, I have no friend, Margaret: when I am glowing with the
enthusiasm of success, there will be none to participate my joy;
if I am assailed by disappointment, no one will endeavour to sustain me
in dejection. I shall commit my thoughts to paper, it is true;
but that is a poor medium for the communication of feeling.
I desire the company of a man who could sympathize with me,
whose eyes would reply to mine. You may deem me romantic,
my dear sister, but I bitterly feel the want of a friend.
I have no one near me, gentle yet courageous, possessed of
a cultivated as well as of a capacious mind, whose tastes are
like my own, to approve or amend my plans. How would such a
friend repair the faults of your poor brother! I am too ardent
in execution and too impatient of difficulties. But it is a still
greater evil to me that I am self-educated: for the first fourteen
years of my life I ran wild on a common and read nothing but our
Uncle Thomas' books of voyages. At that age I became acquainted with
the celebrated poets of our own country; but it was only when it
had ceased to be in my power to derive its most important benefits
from such a conviction that I perceived the necessity of becoming
acquainted with more languages than that of my native country.
Now I am twenty-eight and am in reality more illiterate than
many schoolboys of fifteen. It is true that I have thought more
and that my daydreams are more extended and magnificent, but they
want (as the painters call it) KEEPING; and I greatly need a friend
who would have sense enough not to despise me as romantic,
and affection enough for me to endeavour to regulate my mind.
Well, these are useless complaints; I shall certainly find
no friend on the wide ocean, nor even here in Archangel,
among merchants and seamen. Yet some feelings, unallied to
the dross of human nature, beat even in these rugged bosoms.
My lieutenant, for instance, is a man of wonderful courage
and enterprise; he is madly desirous of glory, or rather, to word
my phrase more characteristically, of advancement in his profession.
He is an Englishman, and in the midst of national and professional
prejudices, unsoftened by cultivation, retains some of the noblest
endowments of humanity. I first became acquainted with him on board
a whale vessel; finding that he was unemployed in this city,
I easily engaged him to assist in my enterprise. The master
is a person of an excellent disposition and is remarkable in
the ship for his gentleness and the mildness of his discipline.
This circumstance, added to his well-known integrity and
dauntless courage, made me very desirous to engage him.
A youth passed in solitude, my best years spent under your
gentle and feminine fosterage, has so refined the groundwork
of my character that I cannot overcome an intense distaste
to the usual brutality exercised on board ship: I have never
believed it to be necessary, and when I heard of a mariner
equally noted for his kindliness of heart and the respect
and obedience paid to him by his crew, I felt myself peculiarly
fortunate in being able to secure his services. I heard of him
first in rather a romantic manner, from a lady who owes to him
the happiness of her life. This, briefly, is his story.
Some years ago he loved a young Russian lady of moderate fortune,
and having amassed a considerable sum in prize-money, the father
of the girl consented to the match. He saw his mistress
once before the destined ceremony; but she was bathed in tears,
and throwing herself at his feet, entreated him to spare her,
confessing at the same time that she loved another, but that he
was poor, and that her father would never consent to the union.
My generous friend reassured the suppliant, and on being informed
of the name of her lover, instantly abandoned his pursuit.
He had already bought a farm with his money, on which he had designed
to pass the remainder of his life; but he bestowed the whole on his rival,
together with the remains of his prize-money to purchase stock,
and then himself solicited the young woman's father to consent
to her marriage with her lover. But the old man decidedly refused,
thinking himself bound in honour to my friend, who, when he found
the father inexorable, quitted his country, nor returned until he heard
that his former mistress was married according to her inclinations.
"What a noble fellow!" you will exclaim. He is so; but then he is
wholly uneducated: he is as silent as a Turk, and a kind of ignorant
carelessness attends him, which, while it renders his conduct the
more astonishing, detracts from the interest and sympathy which
otherwise he would command.
Yet do not suppose, because I complain a little or because I
can conceive a consolation for my toils which I may never know,
that I am wavering in my resolutions. Those are as fixed as fate,
and my voyage is only now delayed until the weather shall permit
my embarkation. The winter has been dreadfully severe, but the spring
promises well, and it is considered as a remarkably early season, so that
perhaps I may sail sooner than I expected. I shall do nothing rashly:
you know me sufficiently to confide in my prudence and considerateness
whenever the safety of others is committed to my care.
I cannot describe to you my sensations on the near prospect
of my undertaking. It is impossible to communicate to you
a conception of the trembling sensation, half pleasurable
and half fearful, with which I am preparing to depart.
I am going to unexplored regions, to "the land of mist and snow,"
but I shall kill no albatross; therefore do not be alarmed
for my safety or if I should come back to you as worn and woeful
as the "Ancient Mariner." You will smile at my allusion,
but I will disclose a secret. I have often attributed my
attachment to, my passionate enthusiasm for, the dangerous
mysteries of ocean to that production of the most imaginative
of modern poets. There is something at work in my soul
which I do not understand. I am practically industrious--
painstaking, a workman to execute with perseverance and labour--
but besides this there is a love for the marvellous, a belief in
the marvellous, intertwined in all my projects, which hurries me
out of the common pathways of men, even to the wild sea and unvisited
regions I am about to explore. But to return to dearer considerations.
Shall I meet you again, after having traversed immense seas, and returned
by the most southern cape of Africa or America? I dare not expect such
success, yet I cannot bear to look on the reverse of the picture.
Continue for the present to write to me by every opportunity: I may
receive your letters on some occasions when I need them most to support
my spirits. I love you very tenderly. Remember me with affection,
should you never hear from me again.
Your affectionate brother,
Robert Walton
Letter 3
To Mrs. Saville, England
July 7th, 17-
My dear Sister,
I write a few lines in haste to say that I am safe--and well advanced
on my voyage. This letter will reach England by a merchantman now
on its homeward voyage from Archangel; more fortunate than I,
who may not see my native land, perhaps, for many years. I am,
however, in good spirits: my men are bold and apparently firm
of purpose, nor do the floating sheets of ice that continually
pass us, indicating the dangers of the region towards which we
are advancing, appear to dismay them. We have already reached
a very high latitude; but it is the height of summer, and although
not so warm as in England, the southern gales, which blow us
speedily towards those shores which I so ardently desire to attain,
breathe a degree of renovating warmth which I had not expected.
No incidents have hitherto befallen us that would make a figure
in a letter. One or two stiff gales and the springing of a leak are
accidents which experienced navigators scarcely remember to record,
and I shall be well content if nothing worse happen to us during
our voyage.
Adieu, my dear Margaret. Be assured that for my own sake, as well
as yours, I will not rashly encounter danger. I will be cool,
persevering, and prudent.
But success SHALL crown my endeavours. Wherefore not? Thus far I
have gone, tracing a secure way over the pathless seas, the very
stars themselves being witnesses and testimonies of my triumph.
Why not still proceed over the untamed yet obedient element? What
can stop the determined heart and resolved will of man?
My swelling heart involuntarily pours itself out thus. But must
finish. Heaven bless my beloved sister!
R.W.
Letter 4
To Mrs. Saville, England
August 5th, 17-
So strange an accident has happened to us that I cannot forbear
recording it, although it is very probable that you will see me
before these papers can come into your possession.
Last Monday (July 3lst) we were nearly surrounded by ice, which closed
in the ship on all sides, scarcely leaving her the sea-room in which
she floated. Our situation was somewhat dangerous, especially as we
were compassed round by a very thick fog. We accordingly lay to,
hoping that some change would take place in the atmosphere and weather.
About two o'clock the mist cleared away, and we beheld, stretched
out in every direction, vast and irregular plains of ice, which
seemed to have no end. Some of my comrades groaned, and my
own mind began to grow watchful with anxious thoughts, when a
strange sight suddenly attracted our attention and diverted our
solicitude from our own situation. We perceived a low carriage,
fixed on a sledge and drawn by dogs, pass on towards the north, at
the distance of half a mile; a being which had the shape of a man,
but apparently of gigantic stature, sat in the sledge and guided
the dogs. We watched the rapid progress of the traveller with our
telescopes until he was lost among the distant inequalities of the ice.
This appearance excited our unqualified wonder. We were, as we believed,
many hundred miles from any land; but this apparition seemed to denote
that it was not, in reality, so distant as we had supposed. Shut in,
however, by ice, it was impossible to follow his track, which we had
observed with the greatest attention. About two hours after this
occurrence we heard the ground sea, and before night the ice broke
and freed our ship. We, however, lay to until the morning,
fearing to encounter in the dark those large loose masses which
float about after the breaking up of the ice. I profited of this
time to rest for a few hours.
In the morning, however, as soon as it was light, I went upon deck
and found all the sailors busy on one side of the vessel, apparently
talking to someone in the sea. It was, in fact, a sledge, like that
we had seen before, which had drifted towards us in the night on a
large fragment of ice. Only one dog remained alive; but there was a
human being within it whom the sailors were persuading to enter the vessel.
He was not, as the other traveller seemed to be, a savage inhabitant
of some undiscovered island, but a European. When I appeared on deck
the master said, "Here is our captain, and he will not allow you to
perish on the open sea."
On perceiving me, the stranger addressed me in English, although with
a foreign accent. "Before I come on board your vessel," said he,
"will you have the kindness to inform me whither you are bound?"
You may conceive my astonishment on hearing such a question
addressed to me from a man on the brink of destruction and to whom
I should have supposed that my vessel would have been a resource
which he would not have exchanged for the most precious wealth the
earth can afford. I replied, however, that we were on a voyage of
discovery towards the northern pole.
Upon hearing this he appeared satisfied and consented to come on board.
Good God! Margaret, if you had seen the man who thus capitulated for
his safety, your surprise would have been boundless. His limbs were
nearly frozen, and his body dreadfully emaciated by fatigue and suffering.
I never saw a man in so wretched a condition. We attempted to carry him
into the cabin, but as soon as he had quitted the fresh air he fainted.
We accordingly brought him back to the deck and restored him to animation
by rubbing him with brandy and forcing him to swallow a small quantity.
As soon as he showed signs of life we wrapped him up in blankets and
placed him near the chimney of the kitchen stove. By slow degrees
he recovered and ate a little soup, which restored him wonderfully.
Two days passed in this manner before he was able to speak, and I
often feared that his sufferings had deprived him of understanding.
When he had in some measure recovered, I removed him to my own cabin
and attended on him as much as my duty would permit. I never saw a more
interesting creature: his eyes have generally an expression of wildness,
and even madness, but there are moments when, if anyone performs an act
of kindness towards him or does him the most trifling service,
his whole countenance is lighted up, as it were, with a beam of
benevolence and sweetness that I never saw equalled. But he is
generally melancholy and despairing, and sometimes he gnashes his teeth,
as if impatient of the weight of woes that oppresses him.
When my guest was a little recovered I had great trouble to keep off the men,
who wished to ask him a thousand questions; but I would not allow him to be
tormented by their idle curiosity, in a state of body and mind whose
restoration evidently depended upon entire repose. Once, however,
the lieutenant asked why he had come so far upon the ice in so
strange a vehicle.
His countenance instantly assumed an aspect of the deepest gloom,
and he replied, "To seek one who fled from me."
"And did the man whom you pursued travel in the same fashion?"
"Yes."
"Then I fancy we have seen him, for the day before we picked you up
we saw some dogs drawing a sledge, with a man in it, across the ice."
This aroused the stranger's attention, and he asked a multitude of
questions concerning the route which the demon, as he called him,
had pursued. Soon after, when he was alone with me, he said,
"I have, doubtless, excited your curiosity, as well as that of
these good people; but you are too considerate to make inquiries."
"Certainly; it would indeed be very impertinent and inhuman in me
to trouble you with any inquisitiveness of mine."
"And yet you rescued me from a strange and perilous situation;
you have benevolently restored me to life."
Soon after this he inquired if I thought that the breaking up of
the ice had destroyed the other sledge. I replied that I could not
answer with any degree of certainty, for the ice had not broken
until near midnight, and the traveller might have arrived at a
place of safety before that time; but of this I could not judge.
From this time a new spirit of life animated the decaying frame of
the stranger. He manifested the greatest eagerness to be upon deck
to watch for the sledge which had before appeared; but I have
persuaded him to remain in the cabin, for he is far too weak to
sustain the rawness of the atmosphere. I have promised that
someone should watch for him and give him instant notice if
any new object should appear in sight.
Such is my journal of what relates to this strange occurrence up to
the present day. The stranger has gradually improved in health but
is very silent and appears uneasy when anyone except myself enters
his cabin. Yet his manners are so conciliating and gentle that the
sailors are all interested in him, although they have had very
little communication with him. For my own part, I begin to love
him as a brother, and his constant and deep grief fills me with
sympathy and compassion. He must have been a noble creature in his
better days, being even now in wreck so attractive and amiable.
I said in one of my letters, my dear Margaret, that I should find
no friend on the wide ocean; yet I have found a man who, before his
spirit had been broken by misery, I should have been happy to have
possessed as the brother of my heart.
I shall continue my journal concerning the stranger at intervals,
should I have any fresh incidents to record.
August 13th, 17-
My affection for my guest increases every day. He excites at once
my admiration and my pity to an astonishing degree. How can I see
so noble a creature destroyed by misery without feeling the most
poignant grief? He is so gentle, yet so wise; his mind is so cultivated,
and when he speaks, although his words are culled with the choicest art,
yet they How with rapidity and unparalleled eloquence. He is now much
recovered from his illness and is continually on the deck, apparently
watching for the sledge that preceded his own. Yet, although unhappy,
he is not so utterly occupied by his own misery but that he interests
himself deeply in the projects of others. He has frequently conversed
with me on mine, which I have communicated to him without disguise.
He entered attentively into all my arguments in favour of my eventual
success and into every minute detail of the measures I had taken to
secure it. I was easily led by the sympathy which he evinced to
use the language of my heart, to give utterance to the burning
ardour of my soul and to say, with all the fervour that warmed me,
how gladly I would sacrifice my fortune, my existence, my every
hope, to the furtherance of my enterprise. One man's life or death
were but a small price to pay for the acquirement of the knowledge
which I sought, for the dominion I should acquire and transmit over
the elemental foes of our race. As I spoke, a dark gloom spread
over my listener's countenance. At first I perceived that he tried
to suppress his emotion; he placed his hands before his eyes, and
my voice quivered and failed me as I beheld tears trickle fast from
between his fingers; a groan burst from his heaving breast. I paused;
at length he spoke, in broken accents: "Unhappy man! Do you share
my madness? Have you drunk also of the intoxicating draught?
Hear me; let me reveal my tale, and you will dash the cup
from your lips!"
Such words, you may imagine, strongly excited my curiosity;
but the paroxysm of grief that had seized the stranger overcame
his weakened powers, and many hours of repose and tranquil
conversation were necessary to restore his composure.
Having conquered the violence of his feelings, he appeared to
despise himself for being the slave of passion; and quelling the
dark tyranny of despair, he led me again to converse concerning
myself personally. He asked me the history of my earlier years.
The tale was quickly told, but it awakened various trains of reflection.
I spoke of my desire of finding a friend, of my thirst for
a more intimate sympathy with a fellow mind than had ever
fallen to my lot, and expressed my conviction that a man could
boast of little happiness who did not enjoy this blessing.
"I agree with you," replied the stranger; "we are unfashioned
creatures, but half made up, if one wiser, better, dearer than
ourselves--such a friend ought to be--do not lend his aid to
perfectionate our weak and faulty natures. I once had a friend,
the most noble of human creatures, and am entitled, therefore,
to judge respecting friendship. You have hope, and the world
before you, and have no cause for despair. But I--I have lost
everything and cannot begin life anew."
As he said this his countenance became expressive of a calm,
settled grief that touched me to the heart. But he was silent
and presently retired to his cabin.
Even broken in spirit as he is, no one can feel more deeply than
he does the beauties of nature. The starry sky, the sea, and every
sight afforded by these wonderful regions seem still to have the
power of elevating his soul from earth. Such a man has a double
existence: he may suffer misery and be overwhelmed by disappointments,
yet when he has retired into himself, he will be like a celestial spirit
that has a halo around him, within whose circle no grief or folly ventures.
Will you smile at the enthusiasm I express concerning this divine
wanderer? You would not if you saw him. You have been tutored and
refined by books and retirement from the world, and you are
therefore somewhat fastidious; but this only renders you the more
fit to appreciate the extraordinary merits of this wonderful man.
Sometimes I have endeavoured to discover what quality it is which
he possesses that elevates him so immeasurably above any other
person I ever knew. I believe it to be an intuitive discernment,
a quick but never-failing power of judgment, a penetration into the
causes of things, unequalled for clearness and precision; add to
this a facility of expression and a voice whose varied intonations
are soul-subduing music.
August l9, 17-
Yesterday the stranger said to me, "You may easily perceive, Captain
Walton, that I have suffered great and unparalleled misfortunes.
I had determined at one time that the memory of these evils
should die with me, but you have won me to alter my determination.
You seek for knowledge and wisdom, as I once did; and I ardently
hope that the gratification of your wishes may not be a serpent
to sting you, as mine has been. I do not know that the relation
of my disasters will be useful to you; yet, when I reflect that
you are pursuing the same course, exposing yourself to the same
dangers which have rendered me what I am, I imagine that you may
deduce an apt moral from my tale, one that may direct you
if you succeed in your undertaking and console you in case of
failure. Prepare to hear of occurrences which are usually deemed
marvellous. Were we among the tamer scenes of nature I might fear
to encounter your unbelief, perhaps your ridicule; but many things
will appear possible in these wild and mysterious regions which
would provoke the laughter of those unacquainted with the ever-
varied powers of nature; nor can I doubt but that my tale conveys
in its series internal evidence of the truth of the events of which
it is composed."
You may easily imagine that I was much gratified by the offered
communication, yet I could not endure that he should renew his grief
by a recital of his misfortunes. I felt the greatest eagerness
to hear the promised narrative, partly from curiosity and partly
from a strong desire to ameliorate his fate if it were in my power.
I expressed these feelings in my answer.
"I thank you," he replied, "for your sympathy, but it is useless;
my fate is nearly fulfilled. I wait but for one event, and then I
shall repose in peace. I understand your feeling," continued he,
perceiving that I wished to interrupt him; "but you are mistaken,
my friend, if thus you will allow me to name you; nothing can alter
my destiny; listen to my history, and you will perceive how
irrevocably it is determined."
He then told me that he would commence his narrative the next day when
I should be at leisure. This promise drew from me the warmest thanks.
I have resolved every night, when I am not imperatively occupied
by my duties, to record, as nearly as possible in his own words,
what he has related during the day. If I should be engaged,
I will at least make notes. This manuscript will doubtless afford
you the greatest pleasure; but to me, who know him, and who hear it
from his own lips--with what interest and sympathy shall I read
it in some future day! Even now, as I commence my task, his full-
toned voice swells in my ears; his lustrous eyes dwell on me with
all their melancholy sweetness; I see his thin hand raised in animation,
while the lineaments of his face are irradiated by the soul within.
Strange and harrowing must be his story, frightful the storm which
embraced the gallant vessel on its course and wrecked it--thus!
Chapter 1
I am by birth a Genevese, and my family is one of the most distinguished
of that republic. My ancestors had been for many years counsellors
and syndics, and my father had filled several public situations
with honour and reputation. He was respected by all who knew him
for his integrity and indefatigable attention to public business.
He passed his younger days perpetually occupied by the affairs of
his country; a variety of circumstances had prevented his marrying
early, nor was it until the decline of life that he became a husband
and the father of a family.
As the circumstances of his marriage illustrate his character, I
cannot refrain from relating them. One of his most intimate
friends was a merchant who, from a flourishing state, fell,
through numerous mischances, into poverty. This man, whose name
was Beaufort, was of a proud and unbending disposition and could not
bear to live in poverty and oblivion in the same country where he
had formerly been distinguished for his rank and magnificence.
Having paid his debts, therefore, in the most honourable manner,
he retreated with his daughter to the town of Lucerne, where he lived
unknown and in wretchedness. My father loved Beaufort with the
truest friendship and was deeply grieved by his retreat in these
unfortunate circumstances. He bitterly deplored the false pride
which led his friend to a conduct so little worthy of the affection
that united them. He lost no time in endeavouring to seek him out,
with the hope of persuading him to begin the world again through
his credit and assistance. Beaufort had taken effectual measures to
conceal himself, and it was ten months before my father discovered
his abode. Overjoyed at this discovery, he hastened to the house,
which was situated in a mean street near the Reuss. But when he entered,
misery and despair alone welcomed him. Beaufort had saved but a
very small sum of money from the wreck of his fortunes, but it
was sufficient to provide him with sustenance for some months,
and in the meantime he hoped to procure some respectable employment
in a merchant's house. The interval was, consequently, spent in
inaction; his grief only became more deep and rankling when he had
leisure for reflection, and at length it took so fast hold of his
mind that at the end of three months he lay on a bed of sickness,
incapable of any exertion.
His daughter attended him with the greatest tenderness, but she saw
with despair that their little fund was rapidly decreasing and that
there was no other prospect of support. But Caroline Beaufort
possessed a mind of an uncommon mould, and her courage rose to
support her in her adversity. She procured plain work; she plaited
straw and by various means contrived to earn a pittance scarcely
sufficient to support life.
Several months passed in this manner. Her father grew worse;
her time was more entirely occupied in attending him; her means of
subsistence decreased; and in the tenth month her father died in
her arms, leaving her an orphan and a beggar. This last blow
overcame her, and she knelt by Beaufort's coffin weeping bitterly,
when my father entered the chamber. He came like a protecting spirit
to the poor girl, who committed herself to his care; and after the
interment of his friend he conducted her to Geneva and placed her
under the protection of a relation. Two years after this event
Caroline became his wife.
There was a considerable difference between the ages of my parents,
but this circumstance seemed to unite them only closer in
bonds of devoted affection. There was a sense of justice
in my father's upright mind which rendered it necessary that
he should approve highly to love strongly. Perhaps during former
years he had suffered from the late-discovered unworthiness of one
beloved and so was disposed to set a greater value on tried worth.
There was a show of gratitude and worship in his attachment to my mother,
differing wholly from the doting fondness of age, for it was inspired
by reverence for her virtues and a desire to be the means of,
in some degree, recompensing her for the sorrows she had endured,
but which gave inexpressible grace to his behaviour to her.
Everything was made to yield to her wishes and her convenience.
He strove to shelter her, as a fair exotic is sheltered by the gardener,
from every rougher wind and to surround her with all that could tend to
excite pleasurable emotion in her soft and benevolent mind. Her health,
and even the tranquillity of her hitherto constant spirit, had been shaken
by what she had gone through. During the two years that had elapsed
previous to their marriage my father had gradually relinquished all
his public functions; and immediately after their union they sought
the pleasant climate of Italy, and the change of scene and interest
attendant on a tour through that land of wonders, as a restorative
for her weakened frame.
From Italy they visited Germany and France. I, their eldest child,
was born at Naples, and as an infant accompanied them in their rambles.
I remained for several years their only child. Much as they were attached
to each other, they seemed to draw inexhaustible stores of affection from
a very mine of love to bestow them upon me. My mother's tender caresses
and my father's smile of benevolent pleasure while regarding me are my
first recollections. I was their plaything and their idol, and something
better--their child, the innocent and helpless creature bestowed on them
by heaven, whom to bring up to good, and whose future lot it was in
their hands to direct to happiness or misery, according as they
fulfilled their duties towards me. With this deep consciousness of
what they owed towards the being to which they had given life,
added to the active spirit of tenderness that animated both, it may
be imagined that while during every hour of my infant life I
received a lesson of patience, of charity, and of self-control,
I was so guided by a silken cord that all seemed but one train of
enjoyment to me. For a long time I was their only care. My mother
had much desired to have a daughter, but I continued their single
offspring. When I was about five years old, while making an
excursion beyond the frontiers of Italy, they passed a week on the
shores of the Lake of Como. Their benevolent disposition often
made them enter the cottages of the poor. This, to my mother, was
more than a duty; it was a necessity, a passion--remembering what
she had suffered, and how she had been relieved--for her to act in
her turn the guardian angel to the afflicted. During one of their
walks a poor cot in the foldings of a vale attracted their notice
as being singularly disconsolate, while the number of half-clothed
children gathered about it spoke of penury in its worst shape.
One day, when my father had gone by himself to Milan, my mother,
accompanied by me, visited this abode. She found a peasant and his
wife, hard working, bent down by care and labour, distributing a
scanty meal to five hungry babes. Among these there was one which
attracted my mother far above all the rest. She appeared of a
different stock. The four others were dark-eyed, hardy little
vagrants; this child was thin and very fair. Her hair was the
brightest living gold, and despite the poverty of her clothing,
seemed to set a crown of distinction on her head. Her brow was
clear and ample, her blue eyes cloudless, and her lips and the
moulding of her face so expressive of sensibility and sweetness
that none could behold her without looking on her as of a distinct
species, a being heaven-sent, and bearing a celestial stamp in all
her features. The peasant woman, perceiving that my mother fixed
eyes of wonder and admiration on this lovely girl, eagerly
communicated her history. She was not her child, but the daughter
of a Milanese nobleman. Her mother was a German and had died on
giving her birth. The infant had been placed with these good
people to nurse: they were better off then. They had not been
long married, and their eldest child was but just born. The father
of their charge was one of those Italians nursed in the memory of the
antique glory of Italy--one among the schiavi ognor frementi,
who exerted himself to obtain the liberty of his country. He became
the victim of its weakness. Whether he had died or still lingered
in the dungeons of Austria was not known. His property was confiscated;
his child became an orphan and a beggar. She continued with her foster
parents and bloomed in their rude abode, fairer than a garden rose among
dark-leaved brambles. When my father returned from Milan, he found
playing with me in the hall of our villa a child fairer than pictured cherub
--a creature who seemed to shed radiance from her looks and whose form and
motions were lighter than the chamois of the hills. The apparition
was soon explained. With his permission my mother prevailed on her
rustic guardians to yield their charge to her. They were fond of
the sweet orphan. Her presence had seemed a blessing to them, but
it would be unfair to her to keep her in poverty and want when Providence
afforded her such powerful protection. They consulted their village priest,
and the result was that Elizabeth Lavenza became the inmate of my parents'
house--my more than sister--the beautiful and adored companion of all
my occupations and my pleasures.
Everyone loved Elizabeth. The passionate and almost reverential
attachment with which all regarded her became, while I shared it,
my pride and my delight. On the evening previous to her being
brought to my home, my mother had said playfully, "I have a pretty
present for my Victor--tomorrow he shall have it." And when,
on the morrow, she presented Elizabeth to me as her promised gift,
I, with childish seriousness, interpreted her words literally and
looked upon Elizabeth as mine--mine to protect, love, and cherish.
All praises bestowed on her I received as made to a possession of
my own. We called each other familiarly by the name of cousin.
No word, no expression could body forth the kind of relation
in which she stood to me--my more than sister, since till death
she was to be mine only. """
palabras = capitulo.split()
lista_frecuencia = []
for palabra in palabras:
lista_frecuencia.append(palabras.count(palabra))
numero_mayor = max(lista_frecuencia)
ubicacion = lista_frecuencia.index(numero_mayor)
print(lista_frecuencia)
print(numero_mayor)
print(palabras[ubicacion])
"""
numero_repite = palabras.count("the")
print (numero_repite)
"""
|
UTF-8
|
Python
| false | false | 41,812 |
py
| 58 |
Palabra_repite.py
| 58 | 0.792548 | 0.791902 | 0 | 762 | 53.872703 | 78 |
kelly-vacasa/django-jwtauth
| 16,389,595,243,652 |
233009d73805c9ad29a8a2b1121259fde04575ed
|
9a6cf71002d3634dddc0931a057518c81fd3cc95
|
/tests/test_commands.py
|
aabc6a16ed5707693e58e508e22286fc48658836
|
[
"MIT"
] |
permissive
|
https://github.com/kelly-vacasa/django-jwtauth
|
916eb34846d22f445c2db0e726d72a9b6334aded
|
56f658c3c5f15cba184b415c494ca8f76abfe926
|
refs/heads/master
| 2020-04-18T04:56:14.063078 | 2019-03-19T00:51:38 | 2019-03-19T00:51:38 | 167,257,801 | 0 | 0 |
MIT
| true | 2019-03-18T23:40:45 | 2019-01-23T21:35:20 | 2019-02-02T00:09:32 | 2019-03-18T23:40:45 | 56 | 0 | 0 | 0 |
Python
| false | null |
from io import StringIO
from django.core.management import call_command
from django.core.management.base import CommandError
from django.test import TestCase
from django.contrib.auth import get_user_model
from django_jwtauth.utils import setup_keys, verify_token, get_private_key
OUT = StringIO()
class GenerateTokenTestCase(TestCase):
def setUp(self):
self.test_user = get_user_model().objects.create(
first_name="test_first_name",
last_name="test_last_name",
email="test_first_name.test_last_name@test_domain.com",
is_active=True,
is_superuser=False
)
def test_command_raises_without_debug(self):
with self.assertRaises(CommandError):
call_command('generate_token', stdout=OUT)
def test_command_raises_error_missing_args(self):
with self.settings(DEBUG=True):
with self.assertRaises(CommandError):
call_command('generate_token', stdout=OUT)
def test_command(self):
with self.settings(DEBUG=True):
call_command('generate_token', user=str(self.test_user.id), stdout=OUT)
def test_command_keys_only_generate_once(self):
with self.settings(DEBUG=True):
token = call_command('generate_token', user=str(self.test_user.id), stdout=OUT)
private_key = setup_keys()
self.assertEqual(private_key, get_private_key())
def test_command_uses_test_user(self):
with self.settings(DEBUG=True):
token = call_command('generate_token', user=str(self.test_user.id), stdout=OUT)
user = verify_token(token)
self.assertEqual(user.id, self.test_user.id)
def test_command_email(self):
with self.settings(DEBUG=True):
token = call_command('generate_token', email=self.test_user.email, stdout=OUT)
user = verify_token(token)
self.assertEqual(user.id, self.test_user.id)
|
UTF-8
|
Python
| false | false | 1,943 |
py
| 5 |
test_commands.py
| 2 | 0.65929 | 0.65929 | 0 | 53 | 35.660377 | 91 |
Ckoelewyn/maplistCod4
| 10,642,928,993,627 |
3761b6cb10f0389cb4d6f415f684ac1dbd2e5475
|
fcd68b72ff7ac3e3953b7b4a621575479f8d74bc
|
/ftpmaplist.py
|
d12fb4d257243cf58bd69cbe15b4b7783fdd76e8
|
[
"MIT"
] |
permissive
|
https://github.com/Ckoelewyn/maplistCod4
|
5f934e56dfd65bc4fde5ad1934dc55346e557dc1
|
da692caab7b7c7e54b11caf60cfb5470515b1c86
|
refs/heads/master
| 2018-12-22T17:18:25.300237 | 2018-10-01T02:30:23 | 2018-10-01T02:30:23 | 151,027,246 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Clanmate wanted to have a way to see what maps were currently in the server,
# A quick python script which pulls the mapfolder directory from ftp and
# returns a json structure to be passed to a website.
# json should have each map and the date the website was updated.
import os
from ftplib import FTP
from datetime import date
import json
# file line format follows:
# key:value
# 1 entry per line
def import_cred(cred_fn):
with open(cred_fn, 'r') as ip_file:
d = {line.split(':')[0].strip() : line.split(':')[1].strip() for line in ip_file}
return d
def default_dir_list_pull(ftp_creds):
ftp = FTP(ftp_creds['IP'])
ftp.login(user=ftp_creds['USER'], passwd=ftp_creds['PASS'],acct='')
map_list = ftp.nlst()
ftp.quit()
return map_list
def json_map_list(m_list):
jsonName = 'maplist.json'
with open(jsonName, 'w') as jsonOP:
json.dump(m_list, jsonOP)
print(jsonName + " created successfully")
return
def Shoddy_HTML(maplist, HTML_template):
html_t = open(HTML_template, 'r')
html_op = open("index.html",'w')
for line in html_t:
if "!INPUT!" in line:
html_op.write(r'<ul>' + '\n')
for item in maplist:
html_op.write('\t' + r'<li>' + item + r'</li>'+'\n')
html_op.write(r'</ul>)' + '\n')
else:
html_op.write(line)
html_op.close()
html_t.close()
def main():
ftp_creds = import_cred("map_read_ftp.cred")
map_list = default_dir_list_pull(ftp_creds)
Shoddy_HTML(map_list, 'Site_builder_maplist.txt')
#json_map_list(map_list)
if __name__ == '__main__':
main()
|
UTF-8
|
Python
| false | false | 1,712 |
py
| 4 |
ftpmaplist.py
| 1 | 0.587033 | 0.58528 | 0 | 57 | 28 | 89 |
avpan/SFOfficeAssessor-Recorder
| 7,765,300,905,119 |
12cba64b14330a653a46e4b12542d8dd37d9babd
|
d14e1043315995303fcb206881025e8f0c371b84
|
/question2.py
|
2acf7a9075029acbbdfa2885c99e51ef555e533d
|
[] |
no_license
|
https://github.com/avpan/SFOfficeAssessor-Recorder
|
2d2adb428619f11e8354fee3905188bfdb325d84
|
56def99fa009b14e647d8fcb8176ec4d0706503b
|
refs/heads/master
| 2021-01-16T22:15:37.215755 | 2016-07-19T00:21:40 | 2016-07-19T00:21:40 | 63,646,051 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import numpy as np
import pandas as pd
import time
import operator
def getFraction(prop_code):
classcode = {}
N = len(prop_code)
for el in prop_code:
if el in classcode:
classcode[el] = classcode[el] + 1
else:
classcode[el] = 1
maxcode = max(classcode.values())
fraction = maxcode/float(N)
print 'Fraction of assessments are for properties of the most common class: %.12f'%fraction
def makeDict(a,b):
A = {}
for i in range(len(a)):
A[a[i]] = b[i]
return A
def getMedian(a):
data = {}
count = {}
avg = []
sort = []
for k,v in a.items():
if k in data:
data[k] = v
count[k] = count[k]+1
else:
data[k] = v
count[k] = 1
for (k,v), (k2,v2) in zip(data.items(),count.items()):
avg.append(float(v)/float(v2))
#sort = sorted(data.values())
#N = len(data)/2
#median = sort[N]
dif = max(avg)-min(avg)
#print 'The Median: %.12f'%median
print 'Difference: %.12f'%dif
def main():
csv_file ='./Historic_Secured_Property_Tax_Rolls.csv'
df = pd.read_csv(csv_file,low_memory=False)
#prop_code = df['Property Class Code']
a = df['Block and Lot Number']
b = df['Closed Roll Assessed Improvement Value']
A = makeDict(a,b)
#print A
#print A
#getFraction(prop_code)
getMedian(A)
if __name__ == "__main__":
main()
|
UTF-8
|
Python
| false | false | 1,280 |
py
| 1 |
question2.py
| 1 | 0.623438 | 0.6125 | 0 | 60 | 20.166667 | 92 |
PINTO0309/TinyYolo
| 2,954,937,537,065 |
876ffe368d9aaede18488dc83418a1e3a1ec2045
|
415afcfbe0046d9a675c5c2e9361635b6051c094
|
/detectionExample/MultiStick.py
|
f2ccf86af69c58d3ec6272abe3a9806be3217c40
|
[
"MIT"
] |
permissive
|
https://github.com/PINTO0309/TinyYolo
|
2ed5c8e0a2133b8f0a580b6f09343fe44a06244c
|
eaa0ebcba302c1da06b03d475c2dff5d58dc09fe
|
refs/heads/master
| 2020-03-07T12:44:41.793598 | 2018-03-31T13:10:43 | 2018-03-31T13:10:43 | 127,484,726 | 6 | 2 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import sys
graph_folder="./"
if sys.version_info.major < 3 or sys.version_info.minor < 4:
print("Please using python3.4 or greater!")
exit(1)
if len(sys.argv) > 1:
graph_folder = sys.argv[1]
from mvnc import mvncapi as mvnc
import numpy as np
import cv2
from os import system
import io, time
from os.path import isfile, join
from queue import Queue
from threading import Thread, Event, Lock
import re
from time import sleep
from Visualize import *
from libpydetector import YoloDetector
from OpenGL.GL import *
from OpenGL.GLU import *
from OpenGL.GLUT import *
mvnc.SetGlobalOption(mvnc.GlobalOption.LOG_LEVEL, 2)
devices = mvnc.EnumerateDevices()
if len(devices) == 0:
print("No devices found")
quit()
print(len(devices))
devHandle = []
graphHandle = []
with open(join(graph_folder, "graph"), mode="rb") as f:
graph = f.read()
for devnum in range(len(devices)):
devHandle.append(mvnc.Device(devices[devnum]))
devHandle[devnum].OpenDevice()
graphHandle.append(devHandle[devnum].AllocateGraph(graph))
graphHandle[devnum].SetGraphOption(mvnc.GraphOption.ITERATIONS, 1)
iterations = graphHandle[devnum].GetGraphOption(mvnc.GraphOption.ITERATIONS)
dim = (320,320)
blockwd = 9
targetBlockwd = 9
wh = blockwd*blockwd
classes = 20
threshold = 0.3
nms = 0.4
print("\nLoaded Graphs!!!")
cam = cv2.VideoCapture(0)
#cam = cv2.VideoCapture('/home/pi/TinyYolo/detectionExample/xxxx.mp4')
if cam.isOpened() != True:
print("Camera/Movie Open Error!!!")
quit()
widowWidth = 320
windowHeight = 240
cam.set(cv2.CAP_PROP_FRAME_WIDTH, widowWidth)
cam.set(cv2.CAP_PROP_FRAME_HEIGHT, windowHeight)
lock = Lock()
frameBuffer = []
results = Queue()
lastresults = None
detector = YoloDetector(1)
def init():
glClearColor(0.7, 0.7, 0.7, 0.7)
def idle():
glutPostRedisplay()
def resizeview(w, h):
glViewport(0, 0, w, h)
glLoadIdentity()
glOrtho(-w / 1920, w / 1920, -h / 1080, h / 1080, -1.0, 1.0)
def keyboard(key, x, y):
key = key.decode('utf-8')
if key == 'q':
lock.acquire()
while len(frameBuffer) > 0:
frameBuffer.pop()
lock.release()
for devnum in range(len(devices)):
graphHandle[devnum].DeallocateGraph()
devHandle[devnum].CloseDevice()
print("\n\nFinished\n\n")
sys.exit()
def camThread():
global lastresults
s, img = cam.read()
if not s:
print("Could not get frame")
return 0
lock.acquire()
if len(frameBuffer)>10:
for i in range(10):
del frameBuffer[0]
frameBuffer.append(img)
lock.release()
res = None
if not results.empty():
res = results.get(False)
if res == None:
if lastresults == None:
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
h, w = img.shape[:2]
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, w, h, 0, GL_RGB, GL_UNSIGNED_BYTE, img)
else:
imdraw = Visualize(img, lastresults)
imdraw = cv2.cvtColor(imdraw, cv2.COLOR_BGR2RGB)
h, w = imdraw.shape[:2]
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, w, h, 0, GL_RGB, GL_UNSIGNED_BYTE, imdraw)
else:
img = Visualize(img, res)
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
h, w = img.shape[:2]
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, w, h, 0, GL_RGB, GL_UNSIGNED_BYTE, img)
lastresults = res
else:
if lastresults == None:
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
h, w = img.shape[:2]
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, w, h, 0, GL_RGB, GL_UNSIGNED_BYTE, img)
else:
imdraw = Visualize(img, lastresults)
imdraw = cv2.cvtColor(imdraw, cv2.COLOR_BGR2RGB)
h, w = imdraw.shape[:2]
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, w, h, 0, GL_RGB, GL_UNSIGNED_BYTE, imdraw)
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
glColor3f(1.0, 1.0, 1.0)
glEnable(GL_TEXTURE_2D)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR)
glBegin(GL_QUADS)
glTexCoord2d(0.0, 1.0)
glVertex3d(-1.0, -1.0, 0.0)
glTexCoord2d(1.0, 1.0)
glVertex3d( 1.0, -1.0, 0.0)
glTexCoord2d(1.0, 0.0)
glVertex3d( 1.0, 1.0, 0.0)
glTexCoord2d(0.0, 0.0)
glVertex3d(-1.0, 1.0, 0.0)
glEnd()
glFlush()
glutSwapBuffers()
def inferencer(results, lock, frameBuffer, handle):
failure = 0
sleep(1)
while failure < 100:
lock.acquire()
if len(frameBuffer) == 0:
lock.release()
failure += 1
continue
img = frameBuffer[-1].copy()
del frameBuffer[-1]
failure = 0
lock.release()
start = time.time()
imgw = img.shape[1]
imgh = img.shape[0]
now = time.time()
im,offx,offy = PrepareImage(img, dim)
handle.LoadTensor(im.astype(np.float16), 'user object')
out, userobj = handle.GetResult()
out = Reshape(out, dim)
internalresults = detector.Detect(out.astype(np.float32), int(out.shape[0]/wh), blockwd, blockwd, classes, imgw, imgh, threshold, nms, targetBlockwd)
pyresults = [BBox(x) for x in internalresults]
results.put(pyresults)
print("elapsedtime = ", time.time() - now)
def PrepareImage(img, dim):
imgw = img.shape[1]
imgh = img.shape[0]
imgb = np.empty((dim[0], dim[1], 3))
imgb.fill(0.5)
newh = dim[1]
neww = dim[0]
offx = int((dim[0] - neww)/2)
offy = int((dim[1] - newh)/2)
imgb[offy:offy+newh,offx:offx+neww,:] = cv2.resize(img.copy()/255.0,(newh,neww))
im = imgb[:,:,(2,1,0)]
return im,offx,offy
def Reshape(out, dim):
shape = out.shape
out = np.transpose(out.reshape(wh, int(shape[0]/wh)))
out = out.reshape(shape)
return out
class BBox(object):
def __init__(self, bbox):
self.left = bbox.left
self.top = bbox.top
self.right = bbox.right
self.bottom = bbox.bottom
self.confidence = bbox.confidence
self.objType = bbox.objType
self.name = bbox.name
glutInitWindowPosition(0, 0)
glutInit(sys.argv)
glutInitDisplayMode(GLUT_RGBA | GLUT_DOUBLE )
glutCreateWindow("DEMO")
glutFullScreen()
glutDisplayFunc(camThread)
glutReshapeFunc(resizeview)
glutKeyboardFunc(keyboard)
init()
glutIdleFunc(idle)
print("press 'q' to quit!\n")
threads = []
for devnum in range(len(devices)):
t = Thread(target=inferencer, args=(results, lock, frameBuffer, graphHandle[devnum]))
t.start()
threads.append(t)
glutMainLoop()
|
UTF-8
|
Python
| false | false | 6,785 |
py
| 8 |
MultiStick.py
| 6 | 0.611643 | 0.579956 | 0 | 250 | 26.14 | 157 |
AKoushikReddy/Django-WebApplication-2
| 18,184,891,550,854 |
982230ab71494486071eb6a86c41059993c2ac56
|
0de12c35f8a187b40ae7da824d0fe5dd608f6a17
|
/Equipment/migrations/0004_auto_20181002_1729.py
|
fe6fc7c74c01963f6b7b8e7312c45ae6f2537952
|
[] |
no_license
|
https://github.com/AKoushikReddy/Django-WebApplication-2
|
3a59acb7639c630e7ea851616ad1427b69e3f44b
|
079155622c3d41ba1faa63b92213d7f7a4a9eeb2
|
refs/heads/master
| 2020-04-08T02:06:33.510495 | 2018-11-24T10:13:06 | 2018-11-24T10:13:06 | 158,922,817 | 2 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Generated by Django 2.0.2 on 2018-10-02 11:59
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('Equipment', '0003_auto_20180930_1038'),
]
operations = [
migrations.RenameField(
model_name='sensoralarm',
old_name='alramtype',
new_name='alarmtype',
),
]
|
UTF-8
|
Python
| false | false | 394 |
py
| 24 |
0004_auto_20181002_1729.py
| 14 | 0.555838 | 0.477157 | 0 | 18 | 19.888889 | 49 |
yumilr/simulacion-de-datos
| 2,439,541,427,441 |
021d5bc18f8bc8f0292571cc777c4a971c1d9c73
|
f845aacbe7ff988633a622a8707f7a5c7d3ab3d2
|
/process/generators.py
|
1bec5cf9217a3ae4879f0a863906bb485963d204
|
[] |
no_license
|
https://github.com/yumilr/simulacion-de-datos
|
092922ece32245cfa57d7f435b33927cbff19517
|
26965aed333a219a2446b96da4a5b7378855d078
|
refs/heads/main
| 2023-02-02T18:49:56.121058 | 2020-12-11T21:43:51 | 2020-12-11T21:43:51 | 320,689,869 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import os
from random import randint
from random import randrange
from datetime import timedelta
from datetime import datetime
from data import *
import random as rd
def random_date(start, end):
"""
This function will return a random datetime between two datetime
objects.
"""
delta = end - start
int_delta = (delta.days * 24 * 60 * 60) + delta.seconds
random_second = randrange(int_delta)
return datetime.strftime(start + timedelta(seconds=random_second), '%Y/%m/%d') # formato normal ; o ;
# d1 = datetime.strptime('1/1/2008 1:30 PM', '%m/%d/%Y %I:%M %p')
# d2 = datetime.strptime('1/1/2009 4:50 AM', '%m/%d/%Y %I:%M %p')
def checkFolders(schema, dataSize):
if os.path.isdir(f'{schema}') == False: os.mkdir(f'{schema}')
if os.path.isdir(f'{schema}/scripts') == False: os.mkdir(f'{schema}/scripts')
if os.path.isdir(f'{schema}/textFiles') == False: os.mkdir(f'{schema}/textFiles')
if os.path.isdir(f'{schema}/scripts/{dataSize}') == False: os.mkdir(f'{schema}/scripts/{dataSize}')
if os.path.isdir(f'{schema}/textFiles/{dataSize}') == False: os.mkdir(f'{schema}/textFiles/{dataSize}')
fNU = None
fNE = None
fNV = None
fC = None
def generateUsers(schema, dataSize):
checkFolders(schema, dataSize)
global fNU
fNU = firstName(dataSize)
fP = password(dataSize)
global fC
fC = email(dataSize)
file1 = open(f"{schema}/textFiles/{dataSize}/usuarios{dataSize}.txt","a")
file2 = open(f"{schema}/scripts/{dataSize}/insert_usuarios{dataSize}.sql","a")
print("a\n")
for i in range(dataSize):
str1 = f"'{fNU[i]}', '{fP[i]}', '{fC[i]}'\n"
str2 = f"INSERT INTO {schema}.usuario (Nombre, Contrasenia, Correo) VALUES ('{fNU[i]}', '{fP[i]}', '{fC[i]}');\n"
file1.write(str1)
file2.write(str2)
file1.close()
file2.close()
def generateEmpresas(schema, dataSize):
global fNE
fNE = companyName(dataSize)
checkFolders(schema, dataSize)
file1 = open(f"{schema}/textFiles/{dataSize}/empresas{dataSize}.txt","a")
file2 = open(f"{schema}/scripts/{dataSize}/insert_empresas{dataSize}.sql","a")
d1 = datetime.strptime('1/1/1980', '%m/%d/%Y')
d2 = datetime.strptime('11/1/2016', '%m/%d/%Y')
for i in range(dataSize):
fecha = random_date(d1,d2)
str1 = f"'{fNE[i]}', '{fecha}'\n"
str2 = f"INSERT INTO {schema}.empresa (Nombre, FechaFundacion) VALUES ('{fNE[i]}', '{fecha}');\n"
file1.write(str1)
file2.write(str2)
file1.close()
file2.close()
# d1 = datetime.strftime('1/1/2008 1:30 PM', '%m/%d/%Y %I:%M %p')
def generateDesarrolladores(schema, dataSize):
file1 = open(f"{schema}/textFiles/{dataSize}/desarrolladores{dataSize}.txt","a")
file2 = open(f"{schema}/scripts/{dataSize}/insert_desarrolladores{dataSize}.sql","a")
print("a\n")
global fNE
rd.shuffle(fNE)
for i in range(dataSize):
str1 = f"'{fNE[i]}'\n"
str2 = f"INSERT INTO {schema}.desarrollador (Nombre) VALUES ('{fNE[i]}');\n"
file1.write(str1)
file2.write(str2)
file1.close()
file2.close()
def generateEditores(schema, dataSize):
file1 = open(f"{schema}/textFiles/{dataSize}/editores{dataSize}.txt","a")
file2 = open(f"{schema}/scripts/{dataSize}/insert_editores{dataSize}.sql","a")
print("a\n")
global fNE
rd.shuffle(fNE)
for i in range(dataSize):
str1 = f"'{fNE[i]}'\n"
str2 = f"INSERT INTO {schema}.editor (Nombre) VALUES ('{fNE[i]}');\n"
file1.write(str1)
file2.write(str2)
file1.close()
file2.close()
def generateGameName(schema, dataSize):
global fNV, fNE
fNV = gameName(dataSize)
fG = generos()
checkFolders(schema, dataSize)
file1 = open(f"{schema}/textFiles/{dataSize}/games{dataSize}.txt","a")
file2 = open(f"{schema}/scripts/{dataSize}/insert_games{dataSize}.sql","a")
rd.shuffle(fNE)
d1 = datetime.strptime('1/1/1981', '%m/%d/%Y')
d2 = datetime.strptime('11/1/2020', '%m/%d/%Y')
for i in range(dataSize):
en = fNE[rd.randint(0,dataSize-1)]
dn = fNE[rd.randint(0,dataSize-1)]
precio = rd.uniform(5.99,499.99)
genero = fG[rd.randint(0,len(fG)-1)]
descargas = rd.randint(0,5000000)
calific = rd.uniform(0,5)
fecha = random_date(d1,d2)
str1 = f"'{en}', '{dn}','{fNV[i]}', {precio}, '{genero}', {descargas}, {calific}, '{fecha}'\n"
str2 = f"INSERT INTO {schema}.videojuego (ENombre, DNombre, Nombre, Precio, Genero, Descargas, Calificacion, Fecha) VALUES ('{en}', '{dn}','{fNV[i]}', {precio}, '{genero}', {descargas}, {calific}, '{fecha}');\n"
file1.write(str1)
file2.write(str2)
file1.close()
file2.close()
def generateEdicionNormal(schema, dataSize):
file1 = open(f"{schema}/textFiles/{dataSize}/edicionesNormales{dataSize}.txt","a")
file2 = open(f"{schema}/scripts/{dataSize}/insert_edicionesNormales{dataSize}.sql","a")
print("a\n")
global fNE,fNV
rd.shuffle(fNE)
rd.shuffle(fNV)
for i in range(dataSize):
en = fNE[rd.randint(0, dataSize - 1)]
dn = fNE[rd.randint(0, dataSize - 1)]
vn = fNV[rd.randint(0, dataSize - 1)]
str1 = f"'{en}', '{dn}', '{vn}'\n"
str2 = f"INSERT INTO {schema}.edicionNormal (ENombre, DNombre, Nombre) VALUES ('{en}', '{dn}', '{vn}');\n"
file1.write(str1)
file2.write(str2)
file1.close()
file2.close()
def generateEdicionEspecial(schema, dataSize):
file1 = open(f"{schema}/textFiles/{dataSize}/edicionesEspeciales{dataSize}.txt","a")
file2 = open(f"{schema}/scripts/{dataSize}/insert_edicionesEspeciales{dataSize}.sql","a")
print("a\n")
global fNE,fNV
rd.shuffle(fNE)
rd.shuffle(fNV)
fR = recompensas()
for i in range(dataSize):
en = fNE[rd.randint(0, dataSize - 1)]
dn = fNE[rd.randint(0, dataSize - 1)]
vn = fNV[rd.randint(0, dataSize - 1)]
recompensa = fR[rd.randint(0, len(fR) - 1)]
str1 = f"'{en}', '{dn}', '{vn}', '{recompensa}'\n"
str2 = f"INSERT INTO {schema}.edicionNormal (ENombre, DNombre, Nombre, Recompensa) VALUES ('{en}', '{dn}', '{vn}', '{recompensa}');\n"
file1.write(str1)
file2.write(str2)
file1.close()
file2.close()
def generateCupon(schema, dataSize):
global fNV, fNE, fC
checkFolders(schema, dataSize)
file1 = open(f"{schema}/textFiles/{dataSize}/cupones{dataSize}.txt","a")
file2 = open(f"{schema}/scripts/{dataSize}/insert_cupones{dataSize}.sql","a")
d1 = datetime.strptime('1/1/2002', '%m/%d/%Y')
d2 = datetime.strptime('11/1/2020', '%m/%d/%Y')
for i in range(dataSize):
en = fNE[rd.randint(0,dataSize-1)]
dn = fNE[rd.randint(0,dataSize-1)]
vn = fNV[rd.randint(0,dataSize-1)]
correo = fC[rd.randint(0,dataSize-1)]
descnt = rd.randint(1,99)
codigo = i+1
fecha_ini = random_date(d1,d2)
fecha_fin = random_date(datetime.strptime(fecha_ini, '%Y/%m/%d'),d2)
str1 = f"{codigo}, '{correo}','{en}', '{dn}', '{vn}', {descnt}, '{fecha_ini}', '{fecha_fin}'\n"
str2 = f"INSERT INTO {schema}.cupon (Codigo, Correo, ENombre, DNombre, Nombre, PorcentajeDesc, Inicio, Fin) VALUES ({codigo}, '{correo}','{en}', '{dn}', '{vn}', {descnt}, '{fecha_ini}', '{fecha_fin}');\n"
file1.write(str1)
file2.write(str2)
file1.close()
file2.close()
def generateCompra(schema, dataSize):
global fNV, fNE, fC
fP = formaDePago()
checkFolders(schema, dataSize)
file1 = open(f"{schema}/textFiles/{dataSize}/compras{dataSize}.txt","a")
file2 = open(f"{schema}/scripts/{dataSize}/insert_compras{dataSize}.sql","a")
d1 = datetime.strptime('1/1/2002', '%m/%d/%Y')
d2 = datetime.strptime('11/1/2020', '%m/%d/%Y')
for i in range(dataSize):
en = fNE[rd.randint(0,dataSize-1)]
dn = fNE[rd.randint(0,dataSize-1)]
vn = fNV[rd.randint(0,dataSize-1)]
correo = fC[rd.randint(0,dataSize-1)]
formap = fP[rd.randint(0,len(fP)-1)]
fecha= random_date(d1,d2)
str1 = f"'{correo}','{en}', '{dn}', '{vn}', '{fecha}', '{formap}'\n"
str2 = f"INSERT INTO {schema}.compra (Correo, ENombre, DNombre, Nombre, Fecha, FormaDePago) VALUES ('{correo}','{en}', '{dn}', '{vn}', '{fecha}', '{formap}');\n"
file1.write(str1)
file2.write(str2)
file1.close()
file2.close()
# MAL ESTRUCTURADO LOS JUEGOS CREACION Y FECHA DE COMPRA!! FALTA CONSTRAINT, TAMBIÉN ESTÁ MAL CUPONES
|
UTF-8
|
Python
| false | false | 8,551 |
py
| 9 |
generators.py
| 8 | 0.607439 | 0.577611 | 0 | 219 | 38.041096 | 219 |
tristan-salles/book-python
| 5,111,011,093,197 |
97cc66f155205c9a9a409418499a73f0d593f870
|
3d8ac133cb4908af5ec69e03014dd925c252d2ae
|
/concurrency/solution/threading_timer.py
|
9c2e052e072f4d2ff3855236eb30d09aaaab1a52
|
[
"MIT"
] |
permissive
|
https://github.com/tristan-salles/book-python
|
7b2afaf44d6a7887a802991a211b53b38d46e91f
|
5e1dbbb58fb03fb5c6936ec331ec1f907e42037f
|
refs/heads/master
| 2020-06-22T21:50:29.843279 | 2019-07-18T14:06:51 | 2019-07-18T14:06:51 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import subprocess
from shlex import shlex
from queue import Queue
from threading import Timer
TIMEOUT = 2.0
DELAY = 5.0
TODO = [
'ping python.astrotech.io',
'ls -la',
'echo "hello world"',
'cat /etc/passwd',
]
work_queue = Queue()
def run():
while True:
cmd = work_queue.get()
cmd = shlex(cmd)
try:
subprocess.run(cmd, timeout=TIMEOUT, shell=True)
except subprocess.TimeoutExpired:
print('Timeout')
work_queue.task_done()
t = Timer(DELAY, run)
t.start()
# Zapełnij kolejkę
for todo in TODO:
work_queue.put(todo)
# wait to complete all tasks
print('before join')
t.join(timeout=TIMEOUT)
print('afer join')
print('done.')
|
UTF-8
|
Python
| false | false | 721 |
py
| 205 |
threading_timer.py
| 129 | 0.624478 | 0.618915 | 0 | 43 | 15.72093 | 60 |
songzhenhe/NSHA2018
| 13,219,909,355,977 |
233127a08c5ad928f6d6d46e48c8df66302b88f0
|
ef354f79c1833a68ce149373b2e20d5f857e63ea
|
/source_models/faults/build_national_fsm_collapsed_rates.py
|
33e63684e537a476d23720d9d2dec7645f9a583a
|
[] |
no_license
|
https://github.com/songzhenhe/NSHA2018
|
29995caf5cc560faa924ad9bfe3d9e2efb72ad9c
|
86df6654318461055b0d36124bf8a91d10a9869f
|
refs/heads/master
| 2021-07-08T20:58:33.660905 | 2017-10-05T04:52:48 | 2017-10-05T04:52:48 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""Code for building the fault source model with rates from
collapsed logic tree branches
Jonathan Griffin
Geoscience Australia
April 2017
"""
#module use ~access/modules
#module load pythonlib/basemap
import os, sys
import numpy as np
from NSHA2018.source_models.faults.shapefile2nrml import shapefile_2_simplefault, \
shapefile_2_simplefault_CE, shapefile_2_simplefault_MM, shp2nrml
from NSHA2018.source_models.logic_trees import logic_tree
from NSHA2018.source_models.utils.utils import largest_remainder
from NSHA2018.source_models.utils.area_sources import nrml2sourcelist, \
area2pt_source, weighted_pt_source
from NSHA2018.source_models.utils.pt2fault_distance import read_simplefault_source, \
pt2fault_distance, write_combined_faults_points, combine_pt_sources, read_pt_source
#from hmtk.parsers.source_model.nrml04_parser import nrmlSourceModelParser
from openquake.hazardlib.sourcewriter import write_source_model
from openquake.hazardlib.scalerel.leonard2014 import Leonard2014_SCR
from subprocess import call
from openquake.hazardlib.sourceconverter import SourceConverter, \
area_to_point_sources, SourceGroup
# Basic parameters
shapefile = 'FSM/FSD_simple_faults.shp'
shapefile_faultname_attribute = 'Name'
shapefile_dip_attribute = 'Dip'
shapefile_sliprate_attribute = 'SL_RT_LT'
shapefile_uplift_attribute = 'UP_RT_LT'
source_model_name = 'National_Fault_Source_Model_2018_Collapsed_AUS6'
simple_fault_tectonic_region = None # Define based on neotectonic domains
magnitude_scaling_relation = 'Leonard2014_SCR'
rupture_aspect_ratio = 2
upper_depth = 0.001
lower_depth = 20.0
a_value = None
#b_region_shapefile = '../zones/shapefiles/Leonard2008/LEONARD08_NSHA18_MFD.shp'
b_region_shapefile = '../zones/2012_mw_ge_4.0/NSHA13_Background/shapefiles/NSHA13_BACKGROUND_NSHA18_MFD.shp'
default_b = 1.0#None # Get from Leonard 2008 regions
min_mag = 5.5 #4.8
#max_mag = 7.5 #None # Get from scaling
rake = 90
output_dir = source_model_name
combined_output_dir = 'National_Seismotectonic_Source_Model_2018_ASU6'
bin_width = 0.1 # Width of MFD bins in magnitude units
domains_shapefile = '../zones/shapefiles/NSHA13_Background/NSHA13_BACKGROUND_NSHA18.shp'
#area_source_model = '../zones/2012_mw_ge_4.0/NSHA13/input/collapsed/NSHA13_collapsed.xml'
#area_source_model = '../zones/2012_mw_ge_4.0/AUS6/input/collapsed/AUS6_collapsed.xml'
area_source_model = '../zones/2012_mw_ge_4.0/AUS6/input/collapsed/AUS6_collapsed.xml'
area_source_model_name = area_source_model.split('/')[0].rstrip('.xml')
investigation_time = 50
fault_mesh_spacing = 2 #2 Fault source mesh
rupture_mesh_spacing = 2 #10 # Area source mesh
area_source_discretisation = 10 #20
# Get logic tree information
lt = logic_tree.LogicTree('../../shared/seismic_source_model_weights_rounded_p0.4.csv')
# Get basic information from shapefile
fault_traces, faultnames, dips, sliprates, fault_lengths = \
shp2nrml.parse_line_shapefile(shapefile,
shapefile_faultname_attribute,
shapefile_dip_attribute,
shapefile_sliprate_attribute,
shapefile_uplift_attribute=shapefile_uplift_attribute,
slip_units = 'm/ma')
# Get b-value and trt from domains
trts = shp2nrml.trt_from_domains(fault_traces, domains_shapefile,
default_trt = 'Non-cratonic')
trt_list = list(set(trts)) # unique trt values
b_values = shp2nrml.b_value_from_region(fault_traces,
b_region_shapefile,
default_b = 1.0)
# Output to be appened line by line to this list
output_xml_add = []
output_xml_mb = []
output_xml_geom = []
output_xml_all_methods = []
output_xmls = [output_xml_add, output_xml_mb, output_xml_geom, output_xml_all_methods]
# Append nrml headers
shp2nrml.append_xml_header(output_xml_add, ('%s_additive' % source_model_name))
shp2nrml.append_xml_header(output_xml_mb, ('%s_moment_balanced' % source_model_name))
shp2nrml.append_xml_header(output_xml_geom, ('%s_geom_filtered' % source_model_name))
shp2nrml.append_xml_header(output_xml_all_methods, ('%s_all_methods_collapsed' % source_model_name))
for i, fault_trace in enumerate(fault_traces):
# Get basic parameters
fault_area = fault_lengths[i]*(float(lower_depth)-float(upper_depth))
sliprate = sliprates[i]
trt = trts[i]
faultname = faultnames[i]
b_value = b_values[i]
dip = dips[i]
print 'Calculating rates for %s in domain %s' % (faultname, trt)
# Calculate M_max from scaling relations
scalrel = Leonard2014_SCR()
max_mag = scalrel.get_median_mag(fault_area, float(rake))
# Round to nearest 0.05 mag unit
max_mag = np.round((max_mag-0.05), 1) + 0.05
print 'Maximum magnitude is %.3f' % max_mag
# Append geometry information
for output_xml in output_xmls:
shp2nrml.append_rupture_geometry(output_xml, fault_trace,
dip, i, faultname,
upper_depth, lower_depth,
trt)
# Get truncated Gutenberg-Richter rates
gr_mags, gr_rates, moment_rate = \
shp2nrml.sliprate2GR_incremental(sliprate, fault_area,
b_value, max_mag,
min_mag, bin_width)
gr_mags = np.around(gr_mags, 2)# Rounding to ensure equality of magnitude bins
gr_add_value, gr_add_weight = lt.get_weights('FSM_MFD', trt, branch_value = 'GR_Add')
gr_mb_value, gr_mb_weight = lt.get_weights('FSM_MFD', trt, branch_value = 'GR_MB')
gr_geom_value, gr_geom_weight = lt.get_weights('FSM_MFD', trt, branch_value = 'GR_Geom')
gr_add_weight = gr_add_weight[0]
gr_mb_weight = gr_mb_weight[0]
gr_geom_weight = gr_geom_weight[0]
# Get Youngs and Coppersmith 1985 Characteristic rates
char_mag = gr_mags[-1] - 0.25 + 0.05 # Adding 0.05 avoids round issues
print char_mag, b_value, min_mag, max_mag, moment_rate, bin_width
ce_mags, ce_rates = shp2nrml.momentrate2YC_incremental(char_mag, b_value,
min_mag, max_mag,
moment_rate, bin_width)
ce_mags = np.around(ce_mags, 2)# Rounding to ensure equality of magnitude bins
ce_add_value, ce_add_weight = lt.get_weights('FSM_MFD', trt, branch_value = 'YC_Add')
ce_mb_value, ce_mb_weight = lt.get_weights('FSM_MFD', trt, branch_value = 'YC_MB')
ce_geom_value, ce_geom_weight = lt.get_weights('FSM_MFD', trt, branch_value = 'YC_Geom')
ce_add_weight = ce_add_weight[0]
ce_mb_weight = ce_mb_weight[0]
ce_geom_weight = ce_geom_weight[0]
# Get Maximum Magnitude distirbution
mm_max_mag = gr_mags[-1] + 0.1 # Avoid rounding issues
mm_mags, mm_rates = shp2nrml.momentrate2MM_incremental(mm_max_mag,
moment_rate,
bin_width)
mm_mags = np.around(mm_mags, 2) # Rounding to ensure equality of magnitude bins
mm_add_value, mm_add_weight = lt.get_weights('FSM_MFD', trt, branch_value = 'MM_Add')
mm_mb_value, mm_mb_weight = lt.get_weights('FSM_MFD', trt, branch_value = 'MM_MB')
mm_geom_value, mm_geom_weight = lt.get_weights('FSM_MFD', trt, branch_value = 'MM_Geom')
mm_add_weight = mm_add_weight[0]
mm_mb_weight = mm_mb_weight[0]
mm_geom_weight = mm_geom_weight[0]
# Calculate collapsed weights
additive_rates = []
mb_rates = []
geom_rates = []
all_method_rates = []
for mag_bin in gr_mags:
additive_rate = np.sum(gr_add_weight*gr_rates[np.where(gr_mags == mag_bin)]) + \
np.sum(ce_add_weight*ce_rates[np.where(ce_mags == mag_bin)]) + \
np.sum(mm_add_weight*mm_rates[np.where(mm_mags == mag_bin)])
additive_rates.append(additive_rate)
mb_rate = np.sum(gr_mb_weight*gr_rates[np.where(gr_mags == mag_bin)]) + \
np.sum(ce_mb_weight*ce_rates[np.where(ce_mags == mag_bin)]) + \
np.sum(mm_mb_weight*mm_rates[np.where(mm_mags == mag_bin)])
mb_rates.append(mb_rate)
geom_rate = np.sum(gr_geom_weight*gr_rates[np.where(gr_mags == mag_bin)]) + \
np.sum(ce_geom_weight*ce_rates[np.where(ce_mags == mag_bin)]) + \
np.sum(mm_geom_weight*mm_rates[np.where(mm_mags == mag_bin)])
geom_rates.append(geom_rate)
all_method_rate = np.sum(gr_add_weight*gr_rates[np.where(gr_mags == mag_bin)]) + \
np.sum(ce_add_weight*ce_rates[np.where(ce_mags == mag_bin)]) + \
np.sum(mm_add_weight*mm_rates[np.where(mm_mags == mag_bin)]) + \
np.sum(gr_mb_weight*gr_rates[np.where(gr_mags == mag_bin)]) + \
np.sum(ce_mb_weight*ce_rates[np.where(ce_mags == mag_bin)]) + \
np.sum(mm_mb_weight*mm_rates[np.where(mm_mags == mag_bin)]) + \
np.sum(gr_geom_weight*gr_rates[np.where(gr_mags == mag_bin)]) + \
np.sum(ce_geom_weight*ce_rates[np.where(ce_mags == mag_bin)]) + \
np.sum(mm_geom_weight*mm_rates[np.where(mm_mags == mag_bin)])
all_method_rates.append(all_method_rate)
# Append collapsed weights to xml
shp2nrml.append_incremental_mfd(output_xml_add, magnitude_scaling_relation,
rupture_aspect_ratio, rake,
min(gr_mags), bin_width, additive_rates)
shp2nrml.append_incremental_mfd(output_xml_mb, magnitude_scaling_relation,
rupture_aspect_ratio, rake,
min(gr_mags), bin_width, mb_rates)
shp2nrml.append_incremental_mfd(output_xml_geom, magnitude_scaling_relation,
rupture_aspect_ratio, rake,
min(gr_mags), bin_width, geom_rates)
shp2nrml.append_incremental_mfd(output_xml_all_methods, magnitude_scaling_relation,
rupture_aspect_ratio, rake,
min(gr_mags), bin_width, all_method_rates)
# Close xml
for output_xml in output_xmls:
output_xml.append(' </sourceModel>')
output_xml.append('</nrml>')
# Add newlines
output_xml_add = [oxml + '\n' for oxml in output_xml_add]
output_xml_mb = [oxml + '\n' for oxml in output_xml_mb]
output_xml_geom = [oxml + '\n' for oxml in output_xml_geom]
output_xml_all_methods = [oxml + '\n' for oxml in output_xml_all_methods]
# Write to file fault models on their own
try:
os.mkdir(source_model_name)
except:
pass
f = open(os.path.join(source_model_name, source_model_name + '_additive.xml'),
'w')
f.writelines(output_xml_add)
f.close()
f = open(os.path.join(source_model_name, source_model_name + '_moment_balance.xml'),
'w')
f.writelines(output_xml_mb)
f.close()
f = open(os.path.join(source_model_name, source_model_name + '_geom_filtered.xml'),
'w')
f.writelines(output_xml_geom)
f.close()
f = open(os.path.join(source_model_name, source_model_name + '_all_methods_collapsed.xml'),
'w')
f.writelines(output_xml_all_methods)
f.close()
#Free memory
del output_xml_add
del output_xml_mb
del output_xml_geom
del output_xml_all_methods
# Now read in the area source model
print 'Reading area source model %s' % area_source_model
area_sources = nrml2sourcelist(area_source_model,
investigation_time=investigation_time,
rupture_mesh_spacing=rupture_mesh_spacing,
width_of_mfd_bin=bin_width,
area_source_discretisation=area_source_discretisation)
# Convert area sources to point sources for filtering
print 'Converting to point sources'
area_pt_filename = area_source_model[:-4] + '_pts.xml'
#name = area_source_model.split('/')[-1][:-4] + '_pts'
point_sources = area2pt_source(area_source_model, sources=area_sources,
filename=area_pt_filename,
name=source_model_name)
pt_source_list = []
for source_group in point_sources:
for source in source_group:
pt_source_list.append(source)
# Now apply weightings diretly to the point source files
# and write one .xml file for each of the methods
# (add, mb, and geom). Note these are summed as we
# have already collapsed rates for the fault sources
total_add_weight = {}
total_mb_weight = {}
total_geom_weight = {}
for trt in trt_list:
gr_add_value, gr_add_weight = lt.get_weights('FSM_MFD', trt, branch_value = 'GR_Add')
gr_mb_value, gr_mb_weight = lt.get_weights('FSM_MFD', trt, branch_value = 'GR_MB')
gr_geom_value, gr_geom_weight = lt.get_weights('FSM_MFD', trt, branch_value = 'GR_Geom')
ce_add_value, ce_add_weight = lt.get_weights('FSM_MFD', trt, branch_value = 'YC_Add')
ce_mb_value, ce_mb_weight = lt.get_weights('FSM_MFD', trt, branch_value = 'YC_MB')
ce_geom_value, ce_geom_weight = lt.get_weights('FSM_MFD', trt, branch_value = 'YC_Geom')
mm_add_value, mm_add_weight = lt.get_weights('FSM_MFD', trt, branch_value = 'MM_Add')
mm_mb_value, mm_mb_weight = lt.get_weights('FSM_MFD', trt, branch_value = 'MM_MB')
mm_geom_value, mm_geom_weight = lt.get_weights('FSM_MFD', trt, branch_value = 'MM_Geom')
# Get total method weights as these will be applied to zone/pt sources
total_add_weight[trt] = gr_add_weight[0] + ce_add_weight[0] + mm_add_weight[0]
total_mb_weight[trt] = gr_mb_weight[0] + ce_mb_weight[0] + mm_mb_weight[0]
total_geom_weight[trt] = gr_geom_weight[0] + ce_geom_weight[0] + mm_geom_weight[0]
#####################
print 'Not including moment balanced approach for now!'
for trt in trt_list:
print 'Renormalising weights for other methods'
partial_weight_sum = total_add_weight[trt]+total_geom_weight[trt]
total_add_weight[trt] = total_add_weight[trt]*(1/partial_weight_sum)
total_geom_weight[trt] = total_geom_weight[trt]*(1/partial_weight_sum)
total_add_weight[trt], total_geom_weight[trt] = largest_remainder([total_add_weight[trt],
total_geom_weight[trt]],
expected_sum=1,precision=3)
##################
additive_pt_sources_filename = area_source_model[:-4] + '_pts_add_weighted.xml'
model_name = area_source_model.split('/')[-1].rstrip('.xml') + '_additive'
print 'Writing %s' % model_name
additive_pt_sources = weighted_pt_source(pt_source_list, total_add_weight,
model_name, additive_pt_sources_filename,
nrml_version='04')
mb_pt_sources_filename = area_source_model[:-4] + '_pts_mb_weighted.xml'
model_name = area_source_model.split('/')[-1].rstrip('.xml') + '_mb'
print 'Writing %s' % model_name
mb_pt_sources = weighted_pt_source(pt_source_list, total_mb_weight,
model_name, mb_pt_sources_filename,
nrml_version='04')
geom_pt_sources_filename = area_source_model[:-4] + '_pts_geom_weighted.xml'
model_name = area_source_model.split('/')[-1].rstrip('.xml') + '_geom_filter'
print 'Writing %s' % model_name
geom_pt_sources = weighted_pt_source(pt_source_list, total_geom_weight,
model_name, geom_pt_sources_filename,
nrml_version='04')
print 'Exiting here'
sys.exit()
# Apply geometrical filtering
print 'Applying geometrical filtering - this should be pre-calculated using run_geom_filter.sh!'
fsm = os.path.join(source_model_name, source_model_name + '_geom_filtered.xml')
fault_sources = read_simplefault_source(fsm, rupture_mesh_spacing = fault_mesh_spacing)
geom_filtered_pt_source_file = area_source_model[:-4] + '_pts_geom_filtered.xml'
geom_filtered_pt_sources = read_pt_source(geom_filtered_pt_source_file)
#pt2fault_distance(geom_pt_sources, fault_sources, min_distance=5.0,
# filename=geom_filtered_pt_source_file,
# buffer_distance = 100.,
# name=source_model_name)
outfile = os.path.join(source_model_name, source_model_name + '_geom_filtered_zone.xml')
write_combined_faults_points(geom_filtered_pt_sources, fault_sources,
outfile, model_name, nrml_version = '04')
# Apply additive approach
print 'Writing full additive model'
fsm = os.path.join(source_model_name, source_model_name + '_additive.xml')
model_name = source_model_name + '_additive'
outfile = os.path.join(source_model_name, source_model_name + '_additive_zone.xml')
fault_sources = read_simplefault_source(fsm, rupture_mesh_spacing = fault_mesh_spacing)
write_combined_faults_points(additive_pt_sources, fault_sources,
outfile, model_name, nrml_version = '04')
# Merge pt source rates
merged_filename = area_source_model[:-4] + '_pts_geom_add_merged_pts.xml'
model_name = area_source_model.split('/')[-1].rstrip('.xml') + '_add_geom_merged'
combined_pt_sources = combine_pt_sources([additive_pt_sources, geom_filtered_pt_sources],
merged_filename, model_name, nrml_version = '04',
id_location_flag='location')
# Combine merged point sources with merged fault source model
print 'Writing collapsed logic tree seismotectonic model'
fsm = os.path.join(source_model_name, source_model_name + '_all_methods_collapsed.xml')
model_name = source_model_name + '_' + area_source_model_name + '_collapsed'
outfile = os.path.join(source_model_name, source_model_name + '_' + \
area_source_model_name +'_all_methods_collapsed.xml')
fault_sources = read_simplefault_source(fsm, rupture_mesh_spacing = fault_mesh_spacing)
write_combined_faults_points(combined_pt_sources, fault_sources,
outfile, model_name, nrml_version = '04')
|
UTF-8
|
Python
| false | false | 18,482 |
py
| 489 |
build_national_fsm_collapsed_rates.py
| 9 | 0.622768 | 0.608809 | 0 | 349 | 51.95702 | 110 |
sinishadj/dj-txmoney
| 19,670,950,230,068 |
f16edcfac1b552fe9a9bac34b54cf8ec16d81353
|
cf1a898cfba062ba42506e73e4251236032858f1
|
/txmoney/settings.py
|
7f6216c711b8e3a6d9673248c8441416ca2aa8e1
|
[] |
no_license
|
https://github.com/sinishadj/dj-txmoney
|
1e9a92a6a387655bd9ea4ea91b9c85eae69e5789
|
7d0d3e3ef94b0c1a65f2bf8430dd75140de1afdf
|
refs/heads/master
| 2020-03-28T22:59:21.810399 | 2016-07-07T10:29:42 | 2016-07-07T10:29:42 | 149,269,560 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# coding=utf-8
"""
This module is largely inspired by django-rest_framework-framework settings.
Settings for txdmoney are all namespaced in the TXDMONEY setting.
For example your project's `settings.py` file might look like this:
TXDMONEY = {
'DEFAULT_BACKEND': 'txdmoney.backends.OpenExchangeBackend',
'BASE_CURRENCY': 'EUR'
'BACKEND_KEY': '00000000000000'
}
This module provides the `txmoney_settings` object, that is used to access
txdmoney settings, checking for user settings first, then falling
back to the defaults.
"""
from __future__ import absolute_import, unicode_literals
import importlib
import six
from django.conf import settings
USER_SETTINGS = getattr(settings, str('TXMONEY'), None)
DEFAULTS = {
'DEFAULT_BACKEND': 'txmoney.rates.backends.OpenExchangeBackend',
'BASE_CURRENCY': 'USD',
'BACKEND_KEY': '',
'SAME_BASE_CURRENCY': True,
'OPENEXCHANGE_NAME': 'openexchangerates.org',
'OPENEXCHANGE_URL': 'https://openexchangerates.org/api/latest.json',
'OPENEXCHANGE_BASE_CURRENCY': 'USD',
}
# List of settings that cannot be empty
MANDATORY = (
'DEFAULT_BACKEND',
'BACKEND_KEY',
)
# List of settings that may be in string import notation.
IMPORT_STRINGS = (
'DEFAULT_BACKEND',
)
def perform_import(val, setting_name):
"""
If the given setting is a string import notation, then perform the necessary import or imports.
:param val: setting value
:param setting_name: setting name
"""
if isinstance(val, six.string_types):
return import_from_string(val, setting_name)
elif isinstance(val, (list, tuple)):
return [import_from_string(item, setting_name) for item in val]
return val
def import_from_string(val, setting_name):
"""
Attempt to import a class from a string representation.
:param val: setting value
:param setting_name: setting name
"""
try:
parts = val.split('.')
module_path, class_name = '.'.join(parts[:-1]), parts[-1]
module = importlib.import_module(module_path)
return getattr(module, class_name)
except ImportError as e:
msg = 'Could not import "{}" for setting "{}". {}: {}.'.format(val, setting_name, e.__class__.__name__, e)
raise ImportError(msg)
class TXMoneySettings:
"""
A settings object, that allows txdmoney settings to be accessed as properties.
Any setting with string import paths will be automatically resolved
and return the class, rather than the string literal.
"""
def __init__(self, user_settings=None, defaults=None, import_strings=None, mandatory=None):
self.user_settings = user_settings or {}
self.defaults = defaults or {}
self.import_strings = import_strings or ()
self.mandatory = mandatory or ()
def __getattr__(self, attr):
if attr not in self.defaults.keys():
raise AttributeError('Invalid txmoney setting: "{}"'.format(attr))
try:
# Check if present in user settings
val = self.user_settings[attr]
except KeyError:
# Fall back to defaults
val = self.defaults[attr]
# Coerce import strings into classes
if val and attr in self.import_strings:
val = perform_import(val, attr)
self.validate_setting(attr, val)
# Cache the result
setattr(self, attr, val)
return val
def validate_setting(self, attr, val):
if not val and attr in self.mandatory:
raise AttributeError('txmoney setting: "{}" is mandatory'.format(attr))
txmoney_settings = TXMoneySettings(USER_SETTINGS, DEFAULTS, IMPORT_STRINGS, MANDATORY)
|
UTF-8
|
Python
| false | false | 3,687 |
py
| 27 |
settings.py
| 23 | 0.664226 | 0.659615 | 0 | 119 | 29.983193 | 114 |
hichamkettani6/computer-sciences
| 6,554,120,094,935 |
c04acdcc18d0474341e603c4b1afe257216ba5a7
|
5be916e4a6e36a204b7a05bfb673fd56e7369010
|
/Python/src/2020-21/20201221_lab11-6_3.py
|
1ddd7d760eaceabe17f83e727b3111bd53d064ea
|
[
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
https://github.com/hichamkettani6/computer-sciences
|
ccd27c058f65a0b79950d7d92b64ddbb9ce6d384
|
07c59e4269eae3efbf4f771c36a67e7b1ea09a57
|
refs/heads/master
| 2023-08-16T05:14:51.383989 | 2021-10-22T12:43:47 | 2021-10-22T12:43:47 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# ###### /=================================================\
# ####### | CLASS EXAMPLE FOR "COMPUTER SCIENCES" (07JCJ**) |
# #### \ | https://github.com/squillero/computer-sciences |
# ###G c\ | |
# ## _\ | © 2020 Giovanni Squillero <squillero@polito.it> |
# | _/ | Free for personal or classroom use. |
# | _/ \=================================================/
FILE_URL = 'https://www.cia.gov/library/publications/the-world-factbook/rankorder/rawdata_2004.txt'
import urllib.request as req
def main():
"""Entry point"""
countries = dict()
try:
for raw_line in req.urlopen(FILE_URL):
line = raw_line.decode('utf-8').rstrip()
first_block, income = line.rsplit(maxsplit=1)
line_number, country = first_block.split(maxsplit=1)
value = int(income[1:].replace(',', ''))
countries[country] = value
except OSError as problem:
print(f"Yeuch, we have a problem: {problem}")
# not sorted (ie. creation order)
for k in countries:
print(f"{k} -> {countries[k]}")
print()
# sorted in alphabetic order
for k in sorted(countries):
print(f"{k} -> {countries[k]}")
print()
# sorted according to the value
from operator import itemgetter
for k in sorted(countries, key=itemgetter(1), reverse=True):
print(f"{k} -> {countries[k]}")
print()
# sorted according to the value (but using a lambda)
for k in sorted(countries, key=lambda k: countries[k], reverse=True):
print(f"{k} -> {countries[k]}")
print()
if __name__ == '__main__':
main()
|
UTF-8
|
Python
| false | false | 1,737 |
py
| 68 |
20201221_lab11-6_3.py
| 67 | 0.509793 | 0.501152 | 0 | 50 | 33.72 | 99 |
connordlee/data-533-lab-4-Akash-and-Connor
| 4,277,787,465,195 |
cf7fa22a28441f8c554fded8cf50b4e23d2e7da1
|
4dbd4b0f18720609b124612374852997ed8aabfc
|
/TestMemberTypes.py
|
5f362692cb31f54004fdf776d316e184438166e6
|
[] |
no_license
|
https://github.com/connordlee/data-533-lab-4-Akash-and-Connor
|
9dbc9c7c73b57e4159bf3aeb8575a4e2aa4cab27
|
9bec8e8a1d972a71b84343e2ac1add75700f9e17
|
refs/heads/master
| 2020-09-25T09:00:56.247119 | 2019-12-13T00:12:53 | 2019-12-13T00:12:53 | 225,969,308 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import unittest
from club import club
from club.member.membertypes import player, staff
class TestMemberTypes(unittest.TestCase):
@classmethod
def setUpClass(cls):
# Initializing Staff for testing
cls.zidane = staff('Zinedine Zidane', 'France', 5500000, 9, 'Skipper')
cls.vazquez = staff('Roberto Vazquez', 'Spain', 3200000, 1, 'Goalkeeping Coach')
# Initial izing Players for testing
cls.ramos = player('Sergio Ramos', 'Spain', 1500000, 14, 'Right Back', 15)
cls.benzema = player('Karim Benzema', 'France', 7920000, 10, 'Striker', 9)
def setUp(self):
# Initializing Club Info For Testing
plist = [self.ramos.asList(), self.benzema.asList()]
slist = [self.zidane.asList(), self.vazquez.asList()]
self.madrid = club.club(plist, slist)
@classmethod
def tearDownClass(cls):
# Deleting Staff and Player Objects
del cls.zidane
del cls.vazquez
del cls.ramos
del cls.benzema
def tearDown(self):
# Deleting Club Object
del self.madrid
def test_updatePosition(self):
# Testing Updating Players stored inside a club (2 assertions)
self.madrid.members['players'][0].updatePosition('Center Back')
self.madrid.members['players'][1].updatePosition('Left Right Out')
self.assertEqual(self.madrid.members['players'][0].position,'Center Back')
self.assertEqual(self.madrid.members['players'][1].position, 'Left Right Out')
# Testing updating players independent of club (2 assertions)
self.ramos.updatePosition('Bench Warmer')
self.benzema.updatePosition('Forward')
self.assertEqual(self.ramos.position, 'Bench Warmer')
self.assertEqual(self.benzema.position, 'Forward')
# Testing that assertIs works on the updated players (2 assertions)
self.assertIs(self.ramos.position, 'Bench Warmer')
self.assertIs(self.benzema.position, 'Forward')
def test_updateJerseyNum(self):
# Testing Updating Players stored inside a club (2 assertions)
self.madrid.members['players'][0].updateJerseyNum(0)
self.madrid.members['players'][1].updateJerseyNum(9)
self.assertEqual(self.madrid.members['players'][0].jersey,0)
self.assertEqual(self.madrid.members['players'][1].jersey, 9)
# Testing updating players independent of club (2 assertions)
self.ramos.updateJerseyNum(4000000)
self.benzema.updateJerseyNum(-43)
self.assertEqual(self.ramos.jersey, 4000000)
self.assertEqual(self.benzema.jersey, -43)
# Testing that assertIs works on the updated players (2 assertions)
self.assertIs(self.ramos.jersey, 4000000)
self.assertIs(self.benzema.jersey, -43)
def test_updateTitle(self):
# Testing Updating Players stored inside a club (2 assertions)
self.madrid.members['staff'][0].updateTitle('Manager')
self.madrid.members['staff'][1].updateTitle('Keeper Keeper')
self.assertEqual(self.madrid.members['staff'][0].title,'Manager')
self.assertEqual(self.madrid.members['staff'][1].title, 'Keeper Keeper')
# Testing updating players independent of club (2 assertions)
self.zidane.updateTitle('Bench Warmer')
self.vazquez.updateTitle('Forward')
self.assertEqual(self.zidane.title, 'Bench Warmer')
self.assertEqual(self.vazquez.title, 'Forward')
# Testing that assertIs works on the updated players (2 assertions)
self.assertIs(self.zidane.title, 'Bench Warmer')
self.assertIs(self.vazquez.title, 'Forward')
def test_asList(self):
# Running the asList function on one player and one staff
ramosList = self.ramos.asList()
zidaneList = self.zidane.asList()
# Testing that the output is of type list (2 assertions)
self.assertIsInstance(ramosList, list)
self.assertIsInstance(zidaneList, list)
# Testing that information in lists are correct (4 assertions)
self.assertEqual(ramosList[0], 'Sergio Ramos')
self.assertEqual(ramosList[5], 15)
self.assertEqual(zidaneList[1], 'France')
self.assertEqual(zidaneList[2], 5500000)
unittest.main(argv=[''], verbosity=2, exit=False)
|
UTF-8
|
Python
| false | false | 4,330 |
py
| 27 |
TestMemberTypes.py
| 14 | 0.668129 | 0.64388 | 0 | 91 | 46.593407 | 88 |
parsaroohi/eshop
| 3,358,664,439,785 |
6c41d588802217a25383c6bfa0d66949ce2fcb61
|
f11e23deb3dc2a6fe8cc045d4f55e39f163f5ab5
|
/eshop_product_category/models.py
|
2c1ab2d2851c6d5ebb087094d5366690924fb45a
|
[] |
no_license
|
https://github.com/parsaroohi/eshop
|
2f99ecf7b433f959a8d4978a1e338587bacb0d73
|
139acb9945737a8b3e6e2b147614912074b3eca9
|
refs/heads/main
| 2023-08-12T16:55:48.193121 | 2021-10-17T15:37:00 | 2021-10-17T15:37:00 | 418,174,675 | 2 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.db import models
# Create your models here.
class ProductCategory(models.Model):
title=models.CharField(max_length=150,verbose_name='عنوان')
name=models.CharField(max_length=150,verbose_name='عنوان در url')
class Meta:
verbose_name='دسته بندی'
verbose_name_plural='دسته بندی ها'
def __str__(self):
return self.title
|
UTF-8
|
Python
| false | false | 401 |
py
| 52 |
models.py
| 45 | 0.687332 | 0.671159 | 0 | 14 | 25.571429 | 69 |
dominicgomez/fighter
| 10,651,518,901,893 |
3ae22bbd2001133e3f9427a04a029547e8dcc45f
|
6a131b6554797f5024afcb330cd80edeb18a3207
|
/src/util.py
|
1b172948dcccb57b85ec8745d115a984580fb9c1
|
[] |
no_license
|
https://github.com/dominicgomez/fighter
|
fb1cb70be97e34850772145b525b4ddfb35b1908
|
aba27551d4be3ed4df8a47cfe14b8475cbbc9c52
|
refs/heads/master
| 2020-05-26T13:33:11.746375 | 2017-03-19T08:41:50 | 2017-03-19T08:41:50 | 85,002,140 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
__author__ = 'Dominic Gomez'
__email__ = 'DominicAnthonyGomez@gmail.com'
"""Miscellaneous functions that make life easier."""
import operator
BLACK = (0,0,0)
def tupadd(lhs, rhs):
"""Componentwise addition of tuples.
lhs ((int,int)): The first tuple.
rhs ((int,int)): The second tuple.
"""
return tuple(map(operator.add, lhs, rhs))
def is_in_bounds(elem_sz, pos, cont_sz):
"""Determine if a rectangle is completely inside another rectangle.
elem_sz ((int,int)): The (w,h) dimensions of the inner rectangle.
pos ((int,int)): The (x,y) coords of the inner rectangle.
cont_sz ((int,int)): The (w,h) dimensions of the outer rectangle (container).
"""
(elem_w,elem_h) = elem_sz
(x,y) = pos
(cont_w,cont_h) = cont_sz
# Make sure it's not too high or too far left.
if x < 0 or y < 0: return False
# Make sure it's not too low or too far right.
if x > (cont_w - elem_w) or y > (cont_h - elem_h): return False
return True
|
UTF-8
|
Python
| false | false | 993 |
py
| 7 |
util.py
| 7 | 0.628399 | 0.623364 | 0 | 33 | 29.090909 | 81 |
maverick2789/Sherlock
| 15,607,911,196,399 |
fdc51aee79be2542787143a1ade3a4d906f76339
|
22ece3d0e592351fe9d8ca7a9c1b20f7be63b960
|
/Manisha/pipeline3.py
|
c602f536958d329dcd0e23d51ee2da7f8af736de
|
[] |
no_license
|
https://github.com/maverick2789/Sherlock
|
0f0fadb7e9784f7cf0667bca0636e18c7a4e5b8d
|
bcc5970ea174c60344b08ad84170a21131a63f04
|
refs/heads/master
| 2016-09-13T17:29:12.383598 | 2016-05-01T11:21:30 | 2016-05-01T11:21:40 | 56,921,648 | 0 | 0 | null | false | 2016-05-01T10:14:34 | 2016-04-23T13:51:46 | 2016-04-23T16:42:07 | 2016-05-01T10:14:34 | 24 | 0 | 0 | 0 |
Python
| null | null |
# -*- coding: utf-8 -*-
"""
Created on Sat Apr 30 02:48:03 2016
@author: Salazar
"""
def leaves(tree):
"""Finds NP (nounphrase) leaf nodes of a chunk tree."""
for subtree in tree.subtrees(filter = lambda t: t.label()=='NP'):
yield subtree.leaves()
def normalise(word):
"""Normalises words to lowercase and stems and lemmatizes it."""
word = word.lower()
word = stemmer.stem_word(word)
word = lemmatizer.lemmatize(word)
return word
def acceptable_word(word):
"""Checks conditions for acceptable word: length, stopword."""
accepted = bool(2 <= len(word) <= 40
and word.lower() not in stopwords)
return accepted
def get_terms(tree):
for leaf in leaves(tree):
term = [ normalise(w) for w,t in leaf if acceptable_word(w) ]
print term
def process_sent_insert_db(sent):
toks=sent.split()
postoks = nltk.pos_tag(toks)
print postoks
tree = chunker.parse(postoks)
print tree
tree.draw()
for subtree in tree.subtrees(filter = lambda t: t.label()=='NP'):
#print 'subtree:',subtree
leaves= subtree.leaves()
#print 'leaves ',leaves
if len(leaves)==1: #be sure its only a noun..hence node without property
node_word=wordnet_lemmatizer.lemmatize(leaves[0][0])
print 'will create node ',node_word
#graphAPI.insert_node(client,node_word)
else: #maybe adj/noun followed by noun
#properties=[a[0] for a in leaves if a[1]=='JJ']
properties=[['type',a[0]] for a in leaves[:-1]]
node_word=wordnet_lemmatizer.lemmatize(leaves[-1][0])
print 'will create node ',node_word,properties
#graphAPI.insert_node(client,node_word,properties)
import nltk
from nltk.corpus import stopwords
from nltk.stem import WordNetLemmatizer
#import graphAPI
text="""England was inhabited for many centuries before its written history began.
The earliest races that possessed the country were stunted, brutal savages.
They used pieces of rough flint for tools and weapons. From flint too they produced fire.
They lived by hunting and fishing, and often had no homes but caves and rock shelters."""
lemmatizer = nltk.WordNetLemmatizer()
grammar = r"""
#NBAR:
# {<DT>?<NN.*|JJ>*<NN.*>} # Nouns and Adjectives, terminated with Nouns
TP:
{<DT>|<W.*>|<IN>}
NP:
{<PRP\$>?<JJ.*>*<NN>+}
{<NNP>+}
{<PRP\$>?<JJ.*>*<NNS>+}
{<NN>+}
COMBONP:
{<NP|COMBONP>+<CC><NP|COMBONP>+}
COMBONP2:
{<NP|COMBONP>+<CC><NP|COMBONP>+}
# NP:
# {<NBAR>}
# {<NBAR><IN><NBAR>} # Above, connected with in/of/etc...
VP:
{<TP>*<VB.?>?<TP>*<VB.?>*<TP>*}
adv:
{<RB.*>}
NUM:{<CD>}
NVN:
{<NP|COMBONP|COMBONP2><VP><NP|COMBONP|COMBONP2>}
"""
chunker = nltk.RegexpParser(grammar)
wordnet_lemmatizer = WordNetLemmatizer()
#client=graphAPI.open_db()
#main work starts here
text=nltk.sent_tokenize(text)
for sent in text:
process_sent_insert_db(sent)
#obj = graphAPI.fetch_thing(client,'animal')
|
UTF-8
|
Python
| false | false | 3,135 |
py
| 11 |
pipeline3.py
| 8 | 0.613078 | 0.604147 | 0 | 102 | 29.617647 | 92 |
kellnett/code_class_project_2020
| 2,491,081,061,917 |
e62fcf58a38e94e45ab816cae01695ed7172ab29
|
2d083340548b283f3eb885472faae331dcd86252
|
/fun.py
|
c3acffeb7f2f80db89eabffb25a4e22725044a5e
|
[] |
no_license
|
https://github.com/kellnett/code_class_project_2020
|
0b2bd1ff2b3f0f1908749a10fdeb39ff3c27d097
|
172e9529e3134772267dd7a76d53b3b2f5ee3b38
|
refs/heads/master
| 2022-05-24T15:04:50.470092 | 2020-05-01T16:57:17 | 2020-05-01T16:57:17 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#codes the functions necessary to run 'runProject.py'
#define main monitor screen as 'testMonitor' in PsychoPy
from psychopy import visual, event, core, misc
from psychopy.hardware import keyboard
import numpy
import random
#define global variables
win=None
fix=None
rect=None
kb=None
mouse=None
log_file=None
three=None
six=None
twelve=None
color_dir=None
color_wheel=None
color_select=None
rating_dir1=None
rating_dir2=None
myRatingScale=None
d1=None
d2=None
p1=None
p2=None
p3=None
p4=None
p5=None
p6=None
p7=None
#Input: None
#Output:None
#Initialize project intro and practice trial instructions
def InitializeDirections():
global d1, d2
global p1, p2, p3, p4, p5, p6, p7
#d1 and d2: Introduce the Experiment
d1=visual.TextStim(
win,
height=1.5,
wrapWidth=30,
color='blue',
pos=(-1.5,9),
text='''
Welcome to Kelle's coding project!
''')
d2=visual.TextStim(
win,
height=0.6,
wrapWidth=28,
color='black',
pos=(-0.7,1),
text='''
In this task, we will see how well you can time three different intervals.
Without actively counting, you must wait and press any key when you
feel the interval has elapsed.\n\n
But that's not all. Some trials will have a background constantly changing
colors. At the end of those trials, you will have to report the color of the
screen at the end of the interval.\n\n\n\n
Ready to start? Just kidding, let's do a practice trial.
Press any key to continue.
''')
#p1-p8: Practice Trial Instructions
p1=visual.TextStim(
win,
height=0.6,
wrapWidth=28,
color='black',
pos=(-1,3),
text='''
At the beginning of each trial, a number will be displayed in the center of the screen.
This is the interval (in seconds) you will time. During the actual task, this number will
appear and disappear on its own.\n
For this practice trial, press any key to display the interval.
''')
p2=visual.TextStim(
win,
height=0.6,
wrapWidth=28,
color='black',
pos=(-1,8),
text='''
The interval is 6 seconds but don't start timing until you see a small black square in the center
of the screen. During the task, this square will appear and disappear automatically.
For now, press any key to continue.
''')
p3=visual.TextStim(
win,
height=0.6,
wrapWidth=28,
color='black',
pos=(-1,8),
text='''
When you perceive 6 seconds have passed, press any key.
''')
p4=visual.TextStim(
win,
height=0.6,
wrapWidth=28,
color='black',
pos=(-1,8),
text='''
Here is the interval. This time, we'll wait for it to disappear on its own. Remember to start
timing when you see the black square and press any key at the end of the interval.
''')
p5=visual.TextStim(
win,
height=0.6,
wrapWidth=28,
color='black',
pos=(-1,8),
text='''
**Remember the screen color at the time you press**
''')
p6=visual.TextStim(
win,
height=0.6,
wrapWidth=28,
color='black',
pos=(-1,8),
text='''
Cool! Using the mouse, select the color of the background at the end of the interval.
''')
p7=visual.TextStim(
win,
height=0.6,
wrapWidth=28,
color='black',
pos=(-1,4),
text='''
Each trial will be like one of the two we just practiced: white or rainbow. For all trials, start
timing when you see the black square and press any key when the time has elapsed.
There are 24 trials and three possible intervals lengths: 3, 6 or 12 seconds.\n\n
When you are ready to start, press any key and the task will begin!
''')
#Input: None
#Output:None
#Make color wheel and color wheel directions
def MakeColorWheel():
global color_wheel, color_select, color_dir
#Color wheel directions
color_dir=visual.TextStim(
win,
height=0.7,
wrapWidth=28,
color='black',
pos=(-1,8),
text='''
Use the mouse to select the color of the screen at the end of the interval.
''')
#Create array of colors in hsv
hsv=numpy.ones([256,256,3], dtype=float)
hsv[:,:,0]=numpy.linspace(0,360,256, endpoint=False)
hsv[:,:,1]=1
hsv[:,:,2]=1
#Create color wheel
color_wheel=visual.RadialStim(
win,
tex=misc.hsv2rgb(hsv),
angularCycles=1,
interpolate=True,
texRes=360,
size=(10,10),
pos=(-8,0))
#Create color selection box
color_select=visual.Rect(
win,
width=10,
height=10,
pos=(5,0),
lineColor='black',
fillColor='white')
#Input: None
#Output:None
#Draw rating scale and rating scale directions
def MakeRatingScale():
global myRatingScale, rating_dir1, rating_dir2
#Rating directions (rating_dir1 for practice trials, rating_dir2 for task)
rating_dir1=visual.TextStim(
win,
height=0.6,
wrapWidth=28,
color='black',
pos=(-1,3),
text='''
On the next screen you will use the keyboard arrows to select how well you timed the interval.
Select "Early" if you think you pressed well before the interval elapsed, "Close" if you think
you pressed approximately when the interval elapsed, or "Late" if you think you pressed well
after the interval elapsed. There's no right or wrong answer for this, just your own opinion!
(You will be asked to do this after each trial).\n\n
Press any key to display the rating scale and make your selection.
''')
rating_dir2=visual.TextStim(
win,
height=0.7,
wrapWidth=28,
color='black',
pos=(-1,7),
text='''
Use the arrows to select how well you timed the interval.\n\n
Press enter when done.''')
#Initialize rating scale
myRatingScale=visual.RatingScale(
win,
pos=(0,0),
lineColor='black',
markerStart='Close',
choices=['Early', 'Close', 'Late'])
#Input: None
#Output:None
#Make all other visual stims
def MakeVisualStim():
global rect, fix
global three, six, twelve
three=visual.TextStim(
win,
height=3,
color='black',
pos=(0,0),
text="3")
six = visual.TextStim(
win,
height=3,
color='black',
pos=(0,0),
text="6")
twelve=visual.TextStim(
win,
height=3,
color='black',
pos=(0,0),
text="12")
rect=visual.Rect(
win,
width=200,
height=100,
fillColorSpace='hsv',
fillColor=[0,0,1])
fix=visual.GratingStim(
win,
size=0.5,
pos=[0,0],
sf=0,
color='black')
#input: None
#ouput: None
#Load everything and open data file
##Update monitor name and new subject file name
def Initialize():
global win
global kb, mouse
global log_file
##Change data file for each new subject (sub_01.csv, sub_02.csv, etc.)
log_file=open('test_00.csv', 'a')
#Initialize window
##Define main monitor as 'testMonitor' OR update below
win = visual.Window(
[1024,768],
monitor='testMonitor',
units='deg',
color='white',
fullscr=True)
#initialize directions, color wheel, rating scale and all other visual stim
InitializeDirections()
MakeColorWheel()
MakeRatingScale()
MakeVisualStim()
#initialize mouse and keyboard
mouse=event.Mouse()
kb=keyboard.Keyboard()
#Input: None
#Output:None
#Introduces the task and walks participant through 2 example trials
def ShowInstructions():
global win
global d1, d2, d3
global p1, p2, p3, p4, p5, p6, p7, p8
global six, twelve
global myRatingScale, rating_dir1, rating_dir2
global log_file
#Introduce task, press any key to advance
while not event.getKeys():
d1.draw()
d2.draw()
win.flip()
##First Practice Trial ('White' trial)
#Explain interval number display, press any key to advance
while not event.getKeys():
p1.draw()
win.flip()
core.wait(0.5)
six.draw()
win.flip()
core.wait(0.5)
#Display practice interval "6"
while not event.getKeys():
six.draw()
p2.draw()
win.flip()
win.flip()
core.wait(1)
#Show fixation box and explain, press any key to advance
while not event.getKeys():
fix.draw()
p3.draw()
win.flip()
#Show scale and explain, press ENTER to advance
event.clearEvents()
while not event.getKeys():
rating_dir1.draw()
win.flip()
RatingScale()
##Second Practice Trial ('Rainbow' trial)
#Display practice interval '12', will advance automatically
p4.draw()
twelve.draw()
win.flip()
core.wait(10)
win.flip()
core.wait(2)
#Reset keyboard responses
event.clearEvents(eventType='keyboard')
#Show fixation square with color changing background, press any key to advance
color=1
while not event.getKeys():
if color==-1:
fix.draw()
win.flip()
else:
rect.setFillColor([color,1,1])
rect.draw()
fix.draw()
p5.draw()
win.flip()
core.wait(0.05)
color+=2
p6.draw()
win.flip()
ColorSelect()
RatingScale()
#Final directions before task begins
while not event.getKeys():
p7.draw()
win.flip()
win.flip()
core.wait(5)
#Input: background start color: hsv[0]
#Output:selected color [hsv]
#Use mouse to click color from color wheel, display color in box to the right
def ColorSelect(color=0):
global win, rect, fix
global mouse, kb
global color_select, color_dir
mouse_click=True
#if its a 'white' trial, assign new color as -1 and continue without color selection
if color==-1:
new_color=[-1,1,1]
else:
mouse_click=False
#until the mouse is clicked, update mouse position and display color mouse is on
while mouse_click==False:
mouse_pos=mouse.getPos() #find mouse position
dx=mouse_pos[0]+8 #assign x value, +8 to adjust for wheel location
dy=mouse_pos[1] #assign y value
#get circle radius (distance from mouse to color wheel center)
r=numpy.sqrt((dx**2)+(dy**2))
#if mouse is in the center of the circle, make radius 1
if r<0.0001:
r=1
#find the degree that corresponds to the mouse location
hue=numpy.arccos(dy/r)/(numpy.pi/180)
#adjust x value to be positive
if dx<0:
hue=360-hue
#assign value to color mouse is over
new_color=[hue,1,1]
#if mouse is outside the color wheel, display white
if r>5:
new_color=[0,0,1]
#fill display with color mouse is over
color_select.setFillColor(new_color, 'hsv')
#display colorwheel, display, and directions
color_wheel.draw()
color_select.draw()
color_dir.draw()
win.flip()
#don't allow mouse clicks outside of color wheel
if r<5:
buttons=mouse.getPressed()
mouse_click=buttons[0]
return(new_color)
#Input: None
#Output:selected rating, string-'Early', 'Close', or 'Late'
#Display rating scale and directions, press ENTER to select
def RatingScale():
global win
global kb
global myRatingScale, rating_dir2
myRatingScale.noResponse=True
#show and update rating scale until ENTER is pressed
while myRatingScale.noResponse:
myRatingScale.draw()
rating_dir2.draw()
win.flip()
#get selected rating for the output
rating=myRatingScale.getRating()
return(rating)
#Input: None
#Output: 24 combos of [0] start color and [1] interval
#Should have 8 trials of each interval
#Each item in output list is 1 trial
def GenerateTrialSequence():
trials=[]
#start color options: 4 'rainbow' trials, 4 'white' trials (-1)
colors=[0, 120, 180, 300,
-1, -1, -1, -1]
#three interval options (in seconds)
intervals=[3,6,12]
#make all combinations of start colors and trial intervals
for int in intervals:
for col in colors:
t =[col,int]
trials.append(t)
#return list of trials
return trials
#Input: Trial sequence list
#Output:None
#Randomizes order of trial sequence list
def RandomizeTrialSequence(trials):
random.shuffle(trials)
#Input: Randomized trial sequence list: [color[0], interval]...
#Output:None--writes data to .csv file [trial_interval, trial_color, RT, color_end, color_selected, rating]
#If color==-1, it is a colorless/'white' trial
def RunTrial(color=0, interval=30):
global win, rect, fix
global kb
global myRatingScale, rating_dir2
global log_file
response=''
color_start=color
#Add trial info to response line
response+='trial_interval, ' + str(interval) + ', ' + 'trial_color, ' + str(color_start) + ', '
#Display interval for this trial
if interval==3:
three.draw()
win.flip()
core.wait(2)
elif interval==6:
six.draw()
win.flip()
core.wait(2)
elif interval==12:
twelve.draw()
win.flip()
core.wait(2)
win.flip()
core.wait(1)
#reset keyboard responses
kb.clock.reset()
kb.clearEvents()
event.clearEvents(eventType='keyboard')
rainbow=color
#Until key has been pressed, flip fixation square
#If a 'rainbow' trial, also change background color, starting on trial "color"
while not event.getKeys():
if rainbow==-1:
fix.draw()
win.flip()
else:
rect.setFillColor([rainbow,1,1])
rect.draw()
fix.draw()
win.flip()
core.wait(0.05)
rainbow+=2
win.flip()
core.wait(0.5)
#If color!=0 (i.e. Rainbow trial), display color wheel and have participant select a color
color_selected=ColorSelect(color)
#If a key press has been made, find RT for key press and add to responses
ptbKeys=kb.getKeys()
if ptbKeys!=[]:
response+='RT, ' + f"{ptbKeys[0].rt:.4f}" + ", "
else:
response+='-1' + ", "
#Add color at key press (timed color) and chosen color from color wheel
response+='color_end, ' + str(rainbow) + ', selected_color, ' + f"{color_selected[0]:.4f}" + ', '
win.flip()
core.wait(1)
#Display rating scale, ENTER to advance
rating=RatingScale()
#Add rating selection to responses
response+=rating
#Add all responses to log file, move to next row
log_file.write(response + "\n")
#Input: List of trials from generating sequence
#Output:None
#For each item in trial list, run task
def RunTask(trials):
for i in range(len(trials)):
RunTrial(trials[i][0], trials[i][1])
#Input: None
#Output:None
#Closes log_file, closes window and quits core
def TerminateTask():
global win
global log_file
log_file.close()
win.close()
core.quit()
|
UTF-8
|
Python
| false | false | 16,025 |
py
| 4 |
fun.py
| 2 | 0.58727 | 0.566115 | 0 | 646 | 23.806502 | 111 |
kws/jekyll-sharepoint
| 549,755,826,671 |
d295cec59e0d7a2cd8e1c41bf8e55244bb30c087
|
a460bb06a9e26e5f000c83e68c898fe2372021ed
|
/jekpoint/commands/show.py
|
360e9537f430b45a8940263cef5f63eab00e7425
|
[
"MIT"
] |
permissive
|
https://github.com/kws/jekyll-sharepoint
|
87361eb21404531e02b4eee731c2aa387d6267aa
|
e2f5a3ff41152476eb2b3735a5308ba302ec62ca
|
refs/heads/main
| 2023-04-16T01:54:17.840976 | 2021-04-25T09:29:18 | 2021-04-25T09:29:18 | 357,247,594 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from bs4 import BeautifulSoup, Comment
from jekpoint.api import SharePointApi
def add_arguments(parser):
parser.add_argument('page_name', type=str, help='Site-relative path to page')
def run(args, config):
api = SharePointApi(config.client, f"{config.site_url}{config.site_prefix}/", f"{config.site_prefix}/{config.site_pages}/")
result = api.get_page_details(args.page_name)
print(result.text)
data = result.value
content_data = data['CanvasContent1']
soup = BeautifulSoup(content_data, 'html.parser')
for div in soup.find_all('div'):
if control_data := div.get('data-sp-controldata'):
div['data-sp-controldata'] = '@data@'
div.insert(0, Comment(control_data))
print(soup.prettify())
|
UTF-8
|
Python
| false | false | 759 |
py
| 15 |
show.py
| 12 | 0.673254 | 0.669302 | 0 | 21 | 35.095238 | 127 |
johnalt/python-project-lvl1
| 1,838,246,021,624 |
239a36993dc7189049f582dfb25a41f6f0ddf625
|
514f279a0357997091303f321728fd7bd727c4d9
|
/brain_games/games/prime_game.py
|
f671074608489dbd8b0ef281b6433e5038a9f078
|
[] |
no_license
|
https://github.com/johnalt/python-project-lvl1
|
044affbb55b864e1305d78a0a0ef986caac0bda3
|
30fbae2d7fd69950c94644042bcb50461f2dc400
|
refs/heads/master
| 2020-09-27T17:02:24.002010 | 2020-04-30T11:15:59 | 2020-04-30T11:15:59 | 226,564,669 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import random
import math
RULE_TEXT = 'Answer "yes" if given number is prime. Otherwise answer "no".'
ANSWER_PATTERN = 'yes|no'
def prime_check(number):
if number == 2:
return 'yes'
divider = int(math.sqrt(number))
while number % divider != 0:
divider -= 1
if divider == 1:
return 'yes'
else:
return 'no'
def generate_round():
number = random.randint(2, 300)
res = prime_check(number)
question = 'Question: {}'.format(number)
return res, question
|
UTF-8
|
Python
| false | false | 521 |
py
| 13 |
prime_game.py
| 12 | 0.606526 | 0.591171 | 0 | 25 | 19.84 | 75 |
SinethG/HDCBIS18.2F-018
| 1,546,188,263,647 |
e6c354071e09a9daeadb0aa209fb384ab0daa56f
|
07a5e91df8e5035501a238ef7bcb7081b54a6aef
|
/01_ceasarDecrypt.py
|
6a0a440c11003680e56343f9b5ed43e68ecd594c
|
[] |
no_license
|
https://github.com/SinethG/HDCBIS18.2F-018
|
056a3244357b8db1e343acff124284a793bae824
|
7b08799131fb7f28ec4d0bc2e750b35192c858c2
|
refs/heads/master
| 2020-04-04T14:30:31.149793 | 2018-11-28T08:40:31 | 2018-11-28T08:40:31 | 156,000,954 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
text=input("Input Text:")
alphabet="abcdefghijklmnopqrstuvwxyz"
k=3
cipher=""
for c in text:
if c in alphabet:
cipher +=alphabet[(alphabet.index(c)-k)%(len(alphabet))]
print('Your Decrypted Message is '+ cipher)
|
UTF-8
|
Python
| false | false | 225 |
py
| 3 |
01_ceasarDecrypt.py
| 3 | 0.693333 | 0.688889 | 0 | 8 | 26.25 | 60 |
arzzon/PythonLearning
| 7,078,106,121,138 |
f4ea00cf6a4ee837c3eae894bb7f03ce11473d74
|
260efc49d90d8117336072e9e8a1e8351aa89199
|
/PythonInbuilts/Collections/Dictionaries/OrderedDict/ordereddict.py
|
d7ea1305cdc79b8a797768d7fc5a2d46d0ac1fba
|
[] |
no_license
|
https://github.com/arzzon/PythonLearning
|
c0af9b9640baf3f155a07df947aef331cc808519
|
2a23d40e0cef91a5547810dd8e45d598f6576391
|
refs/heads/master
| 2022-12-07T23:43:52.688186 | 2020-08-22T16:03:04 | 2020-08-22T16:03:04 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''
DESCRIPTION:
An OrderedDict is a dict that remembers the order that keys were first inserted.
If a new entry overwrites an existing entry, the original insertion position is
left unchanged. Deleting an entry and reinserting it will move it to the end.
NOTE:
1. By default it follows LIFO principle while deleting the item, for example
if we call od.popitem() => it will remove the item that was inserted recently
but we can also ensure that it follows the FIFO while deleting the items by
providing a last=False parameter(by default last=True LIFO) in the popitem as
a parameter od.popitem(last=False) => this will ensure that the first item that
was inserted is removed.
LAST USED:
LRU Cache
'''
from collections import OrderedDict
if __name__ == "__main__":
od = OrderedDict()
od['first'] = 8
od["second"] = 4
od["third"] = 7
od["fourth"] = 14
od["fifth"] = 3
for key, value in od.items():
print(key, ':', value)
print("# Order is maintained that is guaranteed")
'''
first: 8
second: 4
third: 7
fourth: 14
fifth: 3
'''
print("#in simple dictionary order is not maintained")
print("#Methods available in OrderedDict")
print("#Adding a key, value to the dict")
od = OrderedDict()
od['first'] = 8
print("#Checking whether a value exists or not")
key = "first"
if key in od:
print("Yes")
else:
print("No")
print("#Remove item LIFO")
od['first'] = 8
od["second"] = 4
od["third"] = 7
od["fourth"] = 14
od["fifth"] = 3
# It will remove ("fifth",3) which is the last one
od.popitem(last=True) # od.popitem()
for key, value in od.items():
print(key, ':', value)
print("#Remove item FIFO")
od["fifth"] = 3 # inserted it back
# It will remove ("first",8) which is the first one
od.popitem(last=False)
for key, value in od.items():
print(key, ':', value)
print("#Remove a particular (key,value), pop method is used unlike popitem")
od['first'] = 8
od["second"] = 4
od["third"] = 7
od["fourth"] = 14
od["fifth"] = 3
# It will remove ("fifth",3) which is the last one
print("item removed", od.pop("third")) # od.popitem()
for key, value in od.items():
print(key, ':', value)
print()
print("#Deleting an element")
del od["fourth"]
for key, value in od.items():
print(key, ":", value)
|
UTF-8
|
Python
| false | false | 2,525 |
py
| 15 |
ordereddict.py
| 15 | 0.599208 | 0.587327 | 0 | 81 | 30.185185 | 90 |
EitanRosenzvaig/mio
| 17,463,337,061,353 |
8140bf6eec9a03768943888badc03cf61fba6a7f
|
d3f1a3d6975aa8ee7aa852de0b9a2b75a4cecbe1
|
/saleor/core/models.py
|
b81d57ba040e1cc0d171ebda2c5b95151a81ae4d
|
[
"BSD-3-Clause"
] |
permissive
|
https://github.com/EitanRosenzvaig/mio
|
4ccbca00a270c52c3a7aa06009182d0e70908260
|
5e11b29ccb1724cae6b49ceef7350dcbc986083c
|
refs/heads/master
| 2018-10-16T18:51:53.709289 | 2018-09-03T18:09:55 | 2018-09-03T18:09:55 | 136,364,366 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.conf import settings
from django.db import models
from django.db.models import F, Max
from django.utils.timezone import now
from django.contrib.postgres.fields import ArrayField
class BaseNote(models.Model):
user = models.ForeignKey(
settings.AUTH_USER_MODEL, blank=True, null=True,
on_delete=models.SET_NULL)
date = models.DateTimeField(db_index=True, auto_now_add=True)
content = models.TextField()
is_public = models.BooleanField(default=True)
class Meta:
abstract = True
class SortableModel(models.Model):
sort_order = models.PositiveIntegerField(editable=False, db_index=True)
class Meta:
abstract = True
def get_ordering_queryset(self):
raise NotImplementedError('Unknown ordering queryset')
def save(self, *args, **kwargs):
if self.sort_order is None:
qs = self.get_ordering_queryset()
existing_max = qs.aggregate(Max('sort_order'))
existing_max = existing_max.get('sort_order__max')
self.sort_order = 0 if existing_max is None else existing_max + 1
super().save(*args, **kwargs)
def delete(self, *args, **kwargs):
qs = self.get_ordering_queryset()
qs.filter(sort_order__gt=self.sort_order).update(
sort_order=F('sort_order') - 1)
super().delete(*args, **kwargs)
class Event(models.Model):
url = models.TextField(blank=False)
referrer = models.TextField(blank=True)
created_at = models.DateTimeField(
default=now, editable=False)
visitor_id = models.CharField(max_length=38, blank=True)
user_agent = models.TextField(blank=True)
query_string = models.TextField(blank=True)
products = ArrayField(models.IntegerField(), blank=True)
# data = models.JSONField()
|
UTF-8
|
Python
| false | false | 1,807 |
py
| 24 |
models.py
| 19 | 0.671278 | 0.668511 | 0 | 53 | 33.113208 | 77 |
bilibileam/BeamPythonBench
| 13,950,053,805,158 |
89b1a9125819f2695e82ef5d2e224befe6b9e492
|
438356f47fc8ce0c625fc8b252298f0949bb564a
|
/src/thinkbayes/priceIsRight.py
|
5f9b6eb02a0277f69d73291d3c688af8e942bbfe
|
[] |
no_license
|
https://github.com/bilibileam/BeamPythonBench
|
d81f84f4b5a274edd57d21a09cfa8ee587959849
|
966f1a6e2ba7f7d2a58b2e3f20591f37ffbaf3a6
|
refs/heads/master
| 2018-12-12T16:14:03.866281 | 2018-11-14T02:37:06 | 2018-11-14T02:37:06 | 142,291,220 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import scipy
class EstimatedPdf(Pdf):
def __init__(self, sample):
self.kde = scipy.stats.gaussian_kde(sample)
def Density(self, x):
return self.kde.evaluate(x)
prices = ReadData()
pdf = thinkbayes.EstimatedPdf(prices)
low, high = 0, 75000
n = 101
xs = numpy.linspace(low, high, n)
pmf = pdf.MakePmf(xs)
|
UTF-8
|
Python
| false | false | 332 |
py
| 34 |
priceIsRight.py
| 32 | 0.659639 | 0.63253 | 0 | 14 | 22.785714 | 51 |
DanielWeidinger/Skyn
| 3,427,383,937,484 |
24d66c4512c437101abb3ea785a8fda3459a345f
|
bf225e44bce22bf355e28af66faf91e03d918b50
|
/Mask/meta/meta_data.py
|
9d2535eb5f98aa822c93bd41688ac1b67cc08fc9
|
[
"MIT"
] |
permissive
|
https://github.com/DanielWeidinger/Skyn
|
1b0911ed558689f68fb106ee55a9214d63b272be
|
0503a6f07aadf580ba8390818b7cd674e29625ba
|
refs/heads/master
| 2023-02-12T11:16:32.914031 | 2021-01-06T14:38:02 | 2021-01-06T14:38:02 | 297,622,631 | 0 | 0 |
MIT
| true | 2020-10-27T14:57:26 | 2020-09-22T11:06:15 | 2020-10-25T14:32:51 | 2020-10-27T14:57:25 | 1,316 | 0 | 0 | 0 |
Python
| false | false |
class Metadata:
'''
Metadata:
Contain everything about an image
- Mask
- Image
- Description
'''
def __init__(self, meta, dataset, img, mask):
self.meta = meta
self.dataset = dataset
self.img = img
self.type = meta["clinical"]["benign_malignant"] # malignant , benign
self.mask = mask
|
UTF-8
|
Python
| false | false | 375 |
py
| 13 |
meta_data.py
| 11 | 0.536 | 0.536 | 0 | 14 | 25.785714 | 78 |
fulfill2/project2
| 9,216,999,853,614 |
d3bb19cacdbc71245c7cd2bfd42740a936649c69
|
7c008b3d9aa67f106a4afb5b68a06cc648028be3
|
/app.py
|
a2ddc3eef1d642401fa43b33efef03496a14484b
|
[] |
no_license
|
https://github.com/fulfill2/project2
|
f13a63b97cc0348459904ffa529505e32cf289d0
|
5fe9e4c37ffb06aba84e2422d23dafa31d66aa3f
|
refs/heads/master
| 2022-04-22T08:59:44.936545 | 2020-04-24T08:12:53 | 2020-04-24T08:12:53 | 258,447,400 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import os
import pymysql
from datetime import datetime
from flask import Flask, render_template
from flask import request, redirect, abort, session, jsonify
app = Flask(__name__,
static_folder="static",
template_folder="views")
app.config['ENV'] = 'development'
app.config['DEBUG'] = True
app.secret_key = 'sookbun'
db = pymysql.connect(
user='root',
passwd='123456',
host='localhost',
db='songs',
charset='utf8',
cursorclass=pymysql.cursors.DictCursor
)
def get_menu():
cursor = db.cursor()
cursor.execute("select id, songtitle from content")
menu = [f"<li><a href='/{row['id']}'>{row['songtitle']}</a></li>"
for row in cursor.fetchall()]
return '\n'.join(menu)
def get_template(filename):
with open('views/' + filename, 'r', encoding="utf-8") as f:
template = f.read()
return template
@app.route("/")
def index():
if 'user' in session:
title = 'Welcome ' + session['user']['name']
menu = get_menu()
button_name = "logout"
else:
title = 'Welcome'
menu = ""
button_name = "login"
message = '노래 가사집에 오신 것을 환영합니다.'
return render_template('template.html',
id="",
title=title,
lyrics =message,
url = "",
menu=menu,
button_name = button_name)
@app.route("/<id>")
def html(id):
cursor = db.cursor()
cursor.execute(f"select * from content where id = '{id}'")
topic = cursor.fetchone()
if topic is None:
abort(404)
return render_template('template.html',
id=topic['id'],
title=topic['songtitle'],
lyrics=topic['lyrics'],
url =topic['url'],
menu=get_menu(),
button_name = "logout"
)
@app.route("/delete/<id>")
def delete(id):
cursor = db.cursor()
cursor.execute(f"delete from content where songtitle='{id}'")
db.commit()
return redirect("/")
@app.route("/create", methods=['GET', 'POST'])
def create():
if request.method == 'POST':
cursor = db.cursor()
sql = f"""
insert into content (songtitle, lyrics, created, url, author_id)
values ('{request.form['title']}', '{request.form['lyrics']}',
'{datetime.now()}', '{request.form['url']}', '4')
"""
cursor.execute(sql)
db.commit()
return redirect('/')
return render_template('create.html',
title='신규 추가',
menu=get_menu(),
)
@app.route("/login", methods=['GET', 'POST'])
def login():
message = ""
if request.method == 'POST':
cursor = db.cursor()
cursor.execute(f"""
select id, name, password from author
where name = '{request.form['id']}'""")
user = cursor.fetchone()
if user is None:
message = "<p>회원이 아닙니다.</p>"
else:
cursor.execute(f"""
select id, name, password from author
where name = '{request.form['id']}' and
password = SHA2('{request.form['password']}', 256)""")
user = cursor.fetchone()
if user is None:
message = "<p>패스워드를 확인해 주세요</p>"
else:
# 로그인 성공에는 메인으로
session['user'] = user
return redirect("/")
return render_template('login.html',
message=message,
menu=get_menu())
@app.route('/logout')
def logout():
session.pop('user', None)
return redirect('/')
@app.route("/favicon.ico")
def favicon():
return abort(404)
######################
## restful API
@app.route("/api/author", methods=['get', 'post'])
def author_list():
cursor = db.cursor()
if request.method == 'GET':
cursor.execute("select * from author")
return render_template('template.html',
id="",
title=title,
content=jsonify(cursor.fetchall()),
menu=get_menu())
elif request.method == 'POST':
sql = f"""insert into author (name, password)
values ('{request.form['name']}',
SHA2('{request.form['password']}', 256))"""
cursor.execute(sql)
db.commit()
return render_template('template_members.html',
id="",
title=title,
content=content,
menu=get_menu(),
link = "/",
message ="")
return abort(405)
@app.route("/api/author/<author_id>", methods=['get', 'put', 'delete'])
def author(author_id):
cursor = db.cursor()
if request.method == 'GET':
return render_template('template_members.html',
id="",
title=title,
content=content,
menu=get_menu(),
link = "/",
message ="회원정보수정에 성공하였습니다.")
# cursor.execute(f"select * from author where id = {author_id}")
# author = cursor.fetchone()
# if author:
# return render_template('template_members.html',
# id="",
# title=title,
# content=content,
# menu=get_menu())
# else:
# return abort(404)
elif request.method == 'PUT':
sql = f"""update author set
name = '{request.form['name']}',
password = SHA2('{request.form['password_new']}', 256)
where id = '{author_id}'"""
cursor.execute(sql)
db.commit()
return render_template('template_members.html',
id="",
title=title,
content=content,
menu=get_menu(),
link = "/main",
message ="회원정보수정에 성공하였습니다.")
elif request.method == 'DELETE':
cursor.execute(f"delete from author where id = '{author_id}'")
db.commit()
return render_template('template_members.html',
id="",
title=title,
content=content,
menu=get_menu(),
link = "/main",
message ="회원정보 삭제에 성공하였습니다.")
return abort(405)
app.run(port=8000)
|
UTF-8
|
Python
| false | false | 7,126 |
py
| 4 |
app.py
| 2 | 0.457386 | 0.451627 | 0 | 228 | 29.464912 | 76 |
piyushbhadauriya/WJU_OS
| 9,191,230,044,153 |
465a2d966d43c4e638b92e8d87974b8e86194b18
|
e2eefb2da7f1a113e44bde5c09074bc8c368e94f
|
/mprocess/wprocess2.py
|
8a4df215bfd3421b26a9a41190f81b71111946af
|
[] |
no_license
|
https://github.com/piyushbhadauriya/WJU_OS
|
88c73001c30c116f8f59b587ad27ade33561137d
|
7d70f23bcb0485abe91abceaedb4a41c3e911e4f
|
refs/heads/master
| 2020-03-28T22:10:25.391954 | 2019-01-15T01:00:55 | 2019-01-15T01:00:55 | 149,210,169 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import multiprocessing as mp
def foo(q):
q.put('hello')
q.put([42,None,'hello'])
if __name__ == '__main__':
ctx =
q = ctx.Queue()
p = ctx.Process(Target=foo,args=(q,))
p.start()
print(q.get())
print(q.get())
print(q.get())
p.join()
|
UTF-8
|
Python
| false | false | 275 |
py
| 48 |
wprocess2.py
| 47 | 0.512727 | 0.505455 | 0 | 15 | 17.4 | 41 |
Vignesh-Nswamy/DCGAN
| 9,363,028,716,828 |
e2da6df104467dd214f47d4c7af3584fcb629aa5
|
9cbf0fbe160663cd4ede3afdf91bee54254f8369
|
/models/generator.py
|
1dc62ccf0b95d5b3c6054d1d966843d8da191910
|
[] |
no_license
|
https://github.com/Vignesh-Nswamy/DCGAN
|
fa8086982e2e875b8efed4ae801064b822f179d1
|
6cb83ecec875453773557273b5cf47848ee5b9e4
|
refs/heads/master
| 2022-09-22T11:58:36.347796 | 2020-05-27T14:12:18 | 2020-05-27T14:12:18 | 241,244,915 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import os
import tensorflow as tf
from tensorflow.keras.layers import Conv2DTranspose
from tensorflow.keras.layers import LeakyReLU
from tensorflow.keras.layers import BatchNormalization
from tensorflow.keras.layers import Reshape
from tensorflow.keras.layers import Dense
from tensorflow.keras.models import load_model
from tensorflow.keras import Sequential
class Generator:
def __init__(self, configs):
self.configs = configs
self.weight_initializer = tf.keras.initializers.TruncatedNormal(stddev=0.02, mean=0.0, seed=42)
self.input_dim = eval(self.configs.noise_dim)
def __add_conv_block(self, model, filters, kernel_size, strides, block_num):
model.add(Conv2DTranspose(filters=filters,
kernel_size=kernel_size,
strides=strides,
padding='same',
use_bias=False,
kernel_initializer=self.weight_initializer,
name=f'conv_T_{block_num}'))
model.add(BatchNormalization(name=f'batch_norm_{block_num}'))
model.add(LeakyReLU())
def __create(self):
model = Sequential()
model.add(Dense(4 * 4 * 1024,
input_shape=self.input_dim,
kernel_initializer=self.weight_initializer,
name='fc_1'))
model.add(BatchNormalization(name='batch_norm'))
model.add(LeakyReLU())
model.add(Reshape((4, 4, 1024),
name='reshape'))
for block_num, num_filters in enumerate([512, 256, 128]):
self.__add_conv_block(model, num_filters, 5, 2, block_num)
model.add(Conv2DTranspose(filters=3,
kernel_size=5,
strides=2,
padding='same',
activation='tanh',
use_bias=False,
kernel_initializer=self.weight_initializer,
name='out_conv'))
return model
def model(self):
ckpt_dir = self.configs.checkpoints.path
if os.path.exists(os.path.join(ckpt_dir, 'generator.ckpt')):
print(f'Found saved generator at {os.path.join(ckpt_dir, "generator.ckpt")}. Loading...')
return load_model(os.path.join(ckpt_dir, "generator.ckpt"))
else:
return self.__create()
|
UTF-8
|
Python
| false | false | 2,555 |
py
| 8 |
generator.py
| 6 | 0.545205 | 0.530724 | 0 | 61 | 40.868852 | 103 |
dfreshreed/iron_bank
| 12,137,577,607,037 |
d9c75ba09843c1ba8f3e4f33c4cc75907f6e4223
|
13c5a979c34158e4f2a387ca0519715c4d4b71a4
|
/bank_app/bank/admin.py
|
0b2e10c122eda20a797a79580a505a1a02ddd07d
|
[] |
no_license
|
https://github.com/dfreshreed/iron_bank
|
bc66dd8ff83110ea9b1e60bfeffd0a659ef41133
|
143b65b083bcbaa95c124291c55b66ce17855b76
|
refs/heads/master
| 2021-06-11T00:56:56.702086 | 2016-10-31T03:30:06 | 2016-10-31T03:30:06 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.contrib import admin
from bank.models import Transaction, Profile
# Register your models here.
admin.site.register([Transaction, Profile])
|
UTF-8
|
Python
| false | false | 152 |
py
| 8 |
admin.py
| 5 | 0.809211 | 0.809211 | 0 | 5 | 29.4 | 44 |
hamidzr/kodi-extras
| 1,864,015,843,533 |
d270ea7341bfd8476d9faaccfe2149e2760008ba
|
669cd831b30f29cd928042ca57d5c65d746cab44
|
/playYtSearch.py
|
f1349f730eb1f4264f36338f8e4494f368d9c768
|
[
"MIT"
] |
permissive
|
https://github.com/hamidzr/kodi-extras
|
cf1128f92217e105b3aad8f5d227e66cf19a0090
|
3c3fd11ddcaa112080878ffc79d195fa88984f4f
|
refs/heads/master
| 2022-12-13T17:06:34.604494 | 2018-08-09T13:59:21 | 2018-08-09T13:59:21 | 92,514,772 | 2 | 0 |
MIT
| false | 2022-12-08T02:24:13 | 2017-05-26T13:45:07 | 2018-11-11T17:29:16 | 2022-12-08T02:24:12 | 25 | 1 | 0 | 3 |
Python
| false | false |
#!/usr/bin/env python3
from utils.kodiBasics import *
from utils.youtube import *
itemCount = int(sys.argv[1]) # number of results to add for each search query
for query in sys.argv[2:]:
# search youtube and create a list of urls
searchResultsJson = ytSearch(query);
urls = []
for idx, res in enumerate(searchResultsJson['items']):
try:
id = res['id']['videoId']
if idx < itemCount:
print(str(idx+1) + '. ' + res['snippet']['title'])
urls.append("plugin://plugin.video.youtube/?action=play_video&videoid={}".format(id))
else:
break
except:
print('an error occured when processing {}th item'.format(idx))
clearPlaylist()
createPlaylist(urls)
setShuffle(False)
playPlaylist()
|
UTF-8
|
Python
| false | false | 806 |
py
| 13 |
playYtSearch.py
| 11 | 0.609181 | 0.604218 | 0 | 26 | 30 | 101 |
Bkdeets/FOREX-Trading
| 4,200,478,044,869 |
a98dc792bf33011f3bc09d3c4829a48d879f7b0d
|
c9908a61d12ea90c2681199f93e2e14250a3b6ce
|
/Trade.py
|
9b904338451e8a857af090c288ea9288551d1c30
|
[] |
no_license
|
https://github.com/Bkdeets/FOREX-Trading
|
5fc1427dfcea337641b186797ef73fa48f5659b2
|
56add2075b43ca52456c56395cb4ed4e966697b5
|
refs/heads/master
| 2020-04-04T18:03:50.599322 | 2019-01-27T18:48:38 | 2019-01-27T18:48:38 | 156,148,330 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
class Trade:
isOpen = False
isLong = False
isWin = False
entryPrice = None
exitPrice = None
startIndex = 0
endIndex = 0
pctChange = 0
def __init__(self, startIndex, endIndex=0, isOpen=False, isLong=False, entryPrice=None, exitPrice=None):
self.isOpen = isOpen
self.isLong = isLong
self.entryPrice = entryPrice
self.exitPrice = exitPrice
self.startIndex = startIndex
self.endIndex = endIndex
def setIsOpen(self,booleanValue):
self.isOpen = booleanValue
def getIsOpen(self):
return self.isOpen
def setIsLong(self,booleanValue):
self.isLong = booleanValue
def getIsLong(self):
return self.isLong
def setEntryPrice(self,doubleValue):
self.entryPrice = doubleValue
def getEntryPrice(self):
return self.entryPrice
def setExitPrice(self,doubleValue):
self.exitPrice = doubleValue
def getExitPrice(self):
return self.exitPrice
def setStartIndex(self,index):
self.startIndex = index
def getStartIndex(self):
return self.startIndex
def setEndIndex(self,index):
self.endIndex = index
def getEndIndex(self):
return self.endIndex
def initPctChange(self):
if self.exitPrice:
self.pctChange = (self.exitPrice-self.entryPrice)/self.entryPrice
def getPctChange(self):
return self.pctChange
def initIsWin(self):
if not self.isOpen:
if self.isLong and self.pctChange > 0:
self.isWin = True
elif not self.isLong and self.pctChange < 0:
self.isWin = True
else:
self.isWin = False
def getIsWin(self):
return self.isWin
def exitProcedures(self, index, exitPrice):
self.setExitPrice(exitPrice)
self.setEndIndex(index)
self.initPctChange()
self.setIsOpen(False)
self.initIsWin()
def entryProcedures(self, index, entryPrice, isLong):
self.setStartIndex(index)
self.setEntryPrice(entryPrice)
self.setIsLong(isLong)
self.setIsOpen(True)
def toList(self):
output = []
if self.isLong:
output.append("Long")
else:
output.append("Short")
if self.isWin:
output.append("Win")
else:
output.append("Loss")
output.append(self.entryPrice)
output.append(self.exitPrice)
output.append(self.pctChange)
output.append(self.startIndex)
output.append(self.endIndex)
output.append(self.isOpen)
return output
|
UTF-8
|
Python
| false | false | 2,692 |
py
| 30 |
Trade.py
| 27 | 0.608098 | 0.605869 | 0 | 109 | 23.66055 | 108 |
jojojoseph94/lc-practice
| 16,690,242,949,250 |
fe7426fb9946643b46cb0739aa4538c7d3b09f3f
|
31b0a4b3dd26e5efe48950a7ef5a4876f6e5bef2
|
/strings/Candy Crush.py
|
9dcedf9cf5589fb4019e2935bb0b2a27713b0282
|
[] |
no_license
|
https://github.com/jojojoseph94/lc-practice
|
2355a9e21ee353b39a5419593061ec04c10f841b
|
f61d1573c8963b8b813c662a6c1ef4f7dda64146
|
refs/heads/main
| 2023-02-16T06:16:15.912965 | 2021-01-08T07:32:30 | 2021-01-08T07:32:30 | 320,364,262 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""
This question is about implementing a basic elimination algorithm for Candy Crush.
Given a 2D integer array board representing the grid of candy, different positive integers board[i][j] represent different types of candies. A value of board[i][j] = 0 represents that the cell at position (i, j) is empty. The given board represents the state of the game following the player's move. Now, you need to restore the board to a stable state by crushing candies according to the following rules:
If three or more candies of the same type are adjacent vertically or horizontally, "crush" them all at the same time - these positions become empty.
After crushing all candies simultaneously, if an empty space on the board has candies on top of itself, then these candies will drop until they hit a candy or bottom at the same time. (No new candies will drop outside the top boundary.)
After the above steps, there may exist more candies that can be crushed. If so, you need to repeat the above steps.
If there does not exist more candies that can be crushed (ie. the board is stable), then return the current board.
You need to perform the above rules until the board becomes stable, then return the current board.
Example:
Input:
board =
[[110,5,112,113,114],[210,211,5,213,214],[310,311,3,313,314],[410,411,412,5,414],[5,1,512,3,3],[610,4,1,613,614],[710,1,2,713,714],[810,1,2,1,1],[1,1,2,2,2],[4,1,4,4,1014]]
Output:
[[0,0,0,0,0],[0,0,0,0,0],[0,0,0,0,0],[110,0,0,0,114],[210,0,0,0,214],[310,0,0,113,314],[410,0,0,213,414],[610,211,112,313,614],[710,311,412,613,714],[810,411,512,713,1014]]
"""
class Solution:
def candyCrush(self, board: List[List[int]]) -> List[List[int]]:
def candy_crush():
change = False
#horizontal
for i in range(0,len(board)):
fl = 0
for j in range(1,len(board[0])):
if board[i][j] == board[i][j-1]:
fl +=1
else:
if fl >= 2:
for k in range(j-fl-1,j):
if board[i][k] > 0:
board[i][k] *=-1
change = True
fl = 0
if fl >=2:
for k in range(len(board[0])-1-fl,len(board[0])):
if board[i][k] > 0:
board[i][k] *=-1
change = True
#vertical
for i in range(0,len(board[0])):
fl = 0
for j in range(1,len(board)):
if abs(board[j][i]) == abs(board[j-1][i]):
fl +=1
else:
if fl >= 2:
for k in range(j-fl-1,j):
if board[k][i] > 0:
board[k][i] *=-1
change = True
fl = 0
if fl >= 2:
for k in range(len(board)-fl-1,len(board)):
if board[k][i] > 0:
board[k][i] *=-1
change = True
#clear
for i in range(0,len(board)):
for j in range(0,len(board[0])):
if board[i][j] < 0:
board[i][j] = 0
return change
def drop():
#vertical drop
#2pointer read head write head
for i in range(0,len(board[0])):
wh = len(board)-1
for j in range(len(board)-1, -1, -1):
if board[j][i]:
board[wh][i]=board[j][i]
wh-=1
for k in range(wh+1):
board[k][i] = 0
cnt = 0
while 1:
if candy_crush():
drop()
#break
else:
break
return board
|
UTF-8
|
Python
| false | false | 4,070 |
py
| 66 |
Candy Crush.py
| 65 | 0.473464 | 0.411302 | 0 | 90 | 44.233333 | 406 |
acid-n/GeekBrains
| 326,417,547,343 |
3d714565a9f57d8e87deba93fb121294fe3cea68
|
066f2d36f8406212d0ce6adeb78042766c39007e
|
/Messenger/part_2/lesson_2/logs/config_client_log.py
|
7419f7d63fd596c6f6beef971b4fbdb1395ffb9b
|
[] |
no_license
|
https://github.com/acid-n/GeekBrains
|
c919b3d4f7a72718f30f08f6c72b26122193b535
|
bc66f411104ff34dbb912ebcdc383bed9a0a7ba5
|
refs/heads/master
| 2021-07-10T23:03:21.691131 | 2021-03-09T14:00:45 | 2021-03-09T14:00:45 | 233,423,738 | 0 | 0 | null | false | 2021-03-20T03:15:47 | 2020-01-12T16:37:39 | 2021-03-09T14:01:04 | 2021-03-20T03:15:47 | 17,334 | 0 | 0 | 1 |
Python
| false | false |
# Конфиг клиентского логгера
import sys
import os
import logging
from logging.handlers import TimedRotatingFileHandler
sys.path.append('../')
from common.variables import LOGGING_LEVEL
# Настраиваем формат логов
CLIENT_FORMATTER = logging.Formatter('%(asctime)s - %(levelname)s - %(filename)s - %(message)s')
# Заносим в найстройки имя файла для логирования
PATH = os.path.dirname(os.path.abspath(__file__))
PATH = os.path.join(PATH, 'client.log')
# Создаем поток вывода логов
STREAM_HANDLER = logging.StreamHandler(sys.stderr)
STREAM_HANDLER.setFormatter(CLIENT_FORMATTER)
STREAM_HANDLER.setLevel(logging.ERROR)
LOG_FILE = TimedRotatingFileHandler(PATH, encoding='utf-8', interval=1, when='midnight')
LOG_FILE.setFormatter(CLIENT_FORMATTER)
# Создаем регистратор и настраиваем его
LOGGER = logging.getLogger('client')
LOGGER.addHandler(STREAM_HANDLER)
LOGGER.addHandler(LOG_FILE)
LOGGER.setLevel(LOGGING_LEVEL)
# Отладка
if __name__ == '__main__':
LOGGER.critical('Критическая ошибка')
LOGGER.error('Ошибка')
LOGGER.debug('Отладочная информация')
LOGGER.info('Информационное сообщение')
|
UTF-8
|
Python
| false | false | 1,327 |
py
| 219 |
config_client_log.py
| 146 | 0.764388 | 0.76259 | 0 | 37 | 29.054054 | 96 |
XiaoxuanHEI/Link_Invalidation
| 17,841,294,160,355 |
77030c85b5fdcedae40c9c6bc49b5225bb79ff4c
|
40126990d5560b6ea2e325bc73e3bb5666cf87e6
|
/main.py
|
23c637bcec30edac80e425bc7e27f90e990250b8
|
[] |
no_license
|
https://github.com/XiaoxuanHEI/Link_Invalidation
|
6d9a5646af9ecc1d943cadb7cc7f8a45724a34f4
|
e9d4742e74e19c14f35a7e5e1621e4e03610ca25
|
refs/heads/master
| 2022-11-04T07:30:57.960346 | 2020-06-15T22:24:28 | 2020-06-15T22:24:28 | 272,555,994 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from ontology import Ontology
from measures import changeBirthDate
from measures import changeGender
from measures import changeReligion
if __name__ == '__main__':
refalignrdfToTsv = True
source = Ontology("data/000/onto.owl")
target = Ontology("data/001/onto.owl")
subjList = source.uniqueSubjects()
#some test about how deal with triples that come from class Ontology:
iter=0
for i,j,k in source.onto:
print("i",i)
print("j",j)
print("k",k)
#print(source.uniqueProps())
#print(len(source.uniqueProps()))
if iter == 10:
break
for i,j,k in target.onto:
print("i",i)
print("j",j)
print("k",k)
#print(source.uniqueProps())
#print(len(source.uniqueProps()))
|
UTF-8
|
Python
| false | false | 795 |
py
| 3 |
main.py
| 3 | 0.605031 | 0.593711 | 0 | 32 | 23.8125 | 73 |
NasSilverBullet/NumPy-training
| 10,814,727,692,187 |
19ffcd42f3840b2fdf99ea2436cfdf9fba8704cd
|
29426a9177d3159e55c7cf31d91a6464f4a4c2be
|
/script/day1.py
|
92088f916a0c23a3c806902ac970677e544fd7dc
|
[] |
no_license
|
https://github.com/NasSilverBullet/NumPy-training
|
22d9543c9f93cfabac2704d6404bbe93c5a1d2b1
|
1946de79dd4c952f47480a8b835152c3d1ace6e5
|
refs/heads/master
| 2020-04-10T00:39:06.816883 | 2018-12-15T12:58:56 | 2018-12-15T12:58:56 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import numpy as np
print(np.__version__)
np.show_config()
Z = np.zeros(10)
print(Z)
Z = np.zeros((10, 10))
print(f"{Z.size * Z.itemsize} bytes")
Z = np.zeros(10)
Z[4] = 1
print(Z)
Z = np.arange(10, 50)
print(Z)
Z = np.arange(50)
Z = Z[::-1]
print(Z)
nz = np.nonzero([1, 2, 0, 0, 4, 0])
print(nz)
Z = np.eye(3)
print(Z)
Z = np.random.random((3, 3, 3))
print(Z)
Z = np.random.random((10, 10))
Zmin, Zmax = Z.min(), Z.max()
print(Zmin, Zmax)
Z = np.ones((10, 10))
Z[1:-1, 1:-1] = 0
print(Z)
Z = np.ones((5, 5))
Z = np.pad(Z, pad_width=1, mode='constant', constant_values=0)
print(Z)
|
UTF-8
|
Python
| false | false | 593 |
py
| 3 |
day1.py
| 2 | 0.576728 | 0.50253 | 0 | 43 | 12.790698 | 62 |
devilfishcn/tornado
| 16,398,185,144,773 |
ef0ddf75101784ccaae01cf7ef832fdbe3397663
|
b0cd39db083ff345a7959571441806f23f4c5459
|
/redirect.py
|
bb4f12f61f3b770b863d3901a27423d5daa6a1d7
|
[] |
no_license
|
https://github.com/devilfishcn/tornado
|
97d9cf60cc06dbbf6d12d87bb3c0ecd2f4e7adda
|
4321393a66a8aa02b4522c386fba4cf7d6872e51
|
refs/heads/master
| 2021-09-01T09:34:28.647296 | 2017-12-26T08:13:17 | 2017-12-26T08:13:17 | 108,522,971 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import tornado.ioloop
import tornado.web
class MainHandler(tornado.web.RequestHandler):
def get(self):
name = self.get_argument('name')
self.write(name)
self.write('\n')
age = self.get_argument('age')
self.write(age)
self.write('\n')
self.redirect('/maint')
class MaintHandler(tornado.web.RequestHandler):
def get(self):
self.write('maint\n')
def make_app():
return tornado.web.Application([
(r"/maint",MaintHandler),
(r"/",MainHandler),
])
if __name__=="__main__":
app=make_app()
app.listen(8001,'127.0.0.1')
tornado.ioloop.IOLoop.current().start()
|
UTF-8
|
Python
| false | false | 681 |
py
| 11 |
redirect.py
| 10 | 0.578561 | 0.563877 | 0 | 27 | 24.259259 | 47 |
kjaeschwartz/pygubuapp
| 12,841,952,262,758 |
69013ef1d6214d5e41b12f39bd6a7cec071e6261
|
2aed28602825c7254e9f59c4a0e1d1d7a60944ed
|
/pyautoguiADVANCEDv3.py
|
e2250303fc77e33b52ed49ccfa615ba0346b905b
|
[] |
no_license
|
https://github.com/kjaeschwartz/pygubuapp
|
a52a4c9aa3f89db44511e593d053025f1dba390a
|
4d5b6a687daf61a758702b2c7af10ffdabe02397
|
refs/heads/main
| 2023-07-26T19:32:43.059351 | 2021-09-08T15:07:01 | 2021-09-08T15:07:01 | 404,389,737 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import os
import time
import tkinter as tk
import tkinter.ttk as ttk
import webbrowser
from tkinter.scrolledtext import ScrolledText
from datetime import date
import datetime
import shutil
import openpyxl
import pyautogui
#start py running pygubu-designer.exe
import pygubu
PROJECT_PATH = os.path.abspath(os.path.dirname(__file__))
PROJECT_UI = os.path.join(PROJECT_PATH, "uiADVANCEDBACKUP.ui")
id_list = []
globalColumnstartEnd = []
#TODO, FIND A WAY TO MAKE THE MESSAGE DYNAMIC.
class UiadvancedbackupApp:
def __init__(self,master= None):
# build ui
# self.builder = builder = pygubu.Builder()
self.mainFrame = ttk.Frame(master)
self.notebook4 = ttk.Notebook(self.mainFrame)
self.tab1frame = ttk.Frame(self.notebook4)
self.exporterLabel = ttk.Label(self.tab1frame)
self.mainLabel = tk.IntVar(value='ID Exporter')
self.exporterLabel.configure(anchor='ne', borderwidth='2', compound='top', font='{Arial CYR} 14 {bold}')
self.exporterLabel.configure(foreground='#276a70', relief='flat', state='disabled', style='Toolbutton')
self.exporterLabel.configure(takefocus=True, text='ID Exporter', textvariable=self.mainLabel)
self.exporterLabel.grid(column='0', row='0')
self.leftFrame = ttk.Frame(self.tab1frame)
self.ID_entry = ScrolledText(self.leftFrame)
self.ID_entry.configure(autoseparators='true', background='#f7fcfd', blockcursor='true', borderwidth='1')
self.ID_entry.configure(height='12', highlightbackground='#69c4cb', highlightthickness='1', setgrid='false')
self.ID_entry.configure(state='normal', tabstyle='wordprocessor', takefocus=False, undo='true')
self.ID_entry.configure(width='20')
self.ID_entry.pack(side='top')
self.enterButton = tk.Button(self.leftFrame)
self.enterButton.configure(background='#8ceaea', justify='left', relief='raised', state='normal')
self.enterButton.configure(text='Enter',command=self.retrieve_input)
self.enterButton.pack(expand='true', fill='x', ipadx='13', padx='55', pady='10', side='top')
self.leftFrame.configure(height='200', padding='10', relief='flat', width='200')
self.leftFrame.grid(column='0', row='1')
self.rightFrame = tk.Frame(self.tab1frame)
self.inputATMbutton = tk.Button(self.rightFrame)
self.inputATMbutton.configure(background='#8ceaea', compound='top', foreground='#030a07', justify='left')
self.inputATMbutton.configure(padx='22', relief='raised', text='Input to ATM',command=self.open_ids_ATM)
self.inputATMbutton.pack(expand='true', fill='x', pady='15', side='top')
self.sendGSTDBsheetButton = tk.Button(self.rightFrame)
self.sendGSTDBsheetButton.configure(background='#8ceaea', compound='top', text='Send to GSTDB sheet',command=self.send2GSTDB_func)
self.sendGSTDBsheetButton.pack(anchor='s', expand='true', fill='x', pady='15', side='top')
self.makeGSTDBfoldersButton = tk.Button(self.rightFrame)
self.makeGSTDBfoldersButton.configure(background='#8ceaea', justify='left', text='Make GSTDB folders',command=self.make_GSTDB_folders_func)
self.makeGSTDBfoldersButton.pack(fill='x', pady='15', side='top')
self.CTsearchSetupButton = tk.Button(self.rightFrame)
self.CTsearchSetupButton.configure(background='#8ceaea', cursor='arrow', justify='left', padx='13')
self.CTsearchSetupButton.configure(relief='raised', text='CT search setup')
self.CTsearchSetupButton.pack(fill='x', pady='15', side='top')
self.rightFrame.configure(height='300', highlightbackground='#bcb5e6',
highlightcolor='#a8bef2')
self.rightFrame.configure(padx='20', takefocus=False, width='200')
self.rightFrame.grid(column='1', row='1', sticky='n')
self.rightFrame.grid_propagate(0)
self.tab1frame.configure(borderwidth='2', height='400',padding='14 0', relief='flat', takefocus=True)
self.tab1frame.configure(width='600')
self.tab1frame.pack()
self.notebook4.add(self.tab1frame, compound='center', state='normal', sticky='nw', text='Main')
self.tab2frame = tk.Frame(self.notebook4)
self.send2label = ttk.Label(self.tab2frame)
self.send2label.configure(anchor='center', background='#0d0b02', borderwidth='2', compound='top')
self.send2label.configure(cursor='arrow', font='{Arial CYR} 20 {bold}', foreground='#63c1c9', justify='center')
self.send2label.configure(relief='flat', state='disabled', takefocus=True, text='Send to')
self.send2label.place(anchor='center', relx='0.5', rely='.07', width='350', x='0', y='0')
self.frame7 = tk.Frame(self.tab2frame)
self.Send2idList = ScrolledText(self.frame7)
self.Send2idList.configure(state='normal')
_text_ = ''''''
self.Send2idList.insert('0.0', _text_)
self.Send2idList.pack(expand='false', padx='15', pady='10', side='top')
self.frame7.configure(background='#e8f7f7', height='200', width='200')
self.frame7.place(anchor='nw', height='225', relx='.06', rely='.16', width='175', x='0', y='0')
self.frame8 = tk.Frame(self.tab2frame)
self.send2_FMsheet = tk.Button(self.frame8)
self.send2_FMsheet.configure(background='#8ceaea', foreground='#030a07', justify='left', padx='5')
self.send2_FMsheet.configure(relief='raised', text='Send to FM sheet', width='10')
self.send2_FMsheet.pack(anchor='center', expand='false', fill='x', padx='20', pady='15', side='top')
self.send2followup = tk.Button(self.frame8)
self.send2followup.configure(background='#8ceaea', cursor='arrow', relief='raised',
text='Send to follow up sheet')
self.send2followup.pack(fill='x', padx='20', pady='15', side='top')
self.send2idarchive = tk.Button(self.frame8)
self.send2idarchive.configure(background='#8ceaea', justify='center', takefocus=False,
text='Send to ID archive')
self.send2idarchive.pack(fill='x', padx='20', pady='15', side='top')
self.send2GSTDBbutton = tk.Button(self.frame8)
self.send2GSTDBbutton.configure(background='#8ceaea', cursor='arrow', justify='center', overrelief='raised')
self.send2GSTDBbutton.configure(padx='20', relief='raised', state='normal', text='Send to GSTDB sheet',command= self.show_ids)
self.send2GSTDBbutton.configure(width='10')
self.send2GSTDBbutton.pack(fill='x', padx='20', pady='15', side='top')
self.frame8.configure(background='#e8f7f7', height='200', width='200')
self.frame8.place(anchor='nw', height='225', relx='.5', rely='.16', width='175', x='0', y='0')
self.tab2frame.configure(height='200', pady='5', width='200')
self.tab2frame.pack(side='top')
self.notebook4.add(self.tab2frame, text='Send to')
self.tab3frame = ttk.Frame(self.notebook4)
self.LeftFrame3 = tk.Frame(self.tab3frame, container='false')
self.frame11 = tk.Frame(self.LeftFrame3, container='false')
self.CommonNameLabel = tk.Label(self.frame11)
self.CommonNameLabel.configure(background='#d5dcba', borderwidth='1',
font='{Bahnschrift SemiCondensed} 14 {bold}', foreground='#69763d')
self.CommonNameLabel.configure(highlightbackground='#020605', highlightcolor='#061213', highlightthickness='3',
text='Common Name Tool')
self.CommonNameLabel.pack(side='top')
self.LastEntry = tk.Entry(self.frame11)
_text_ = ''''''
self.LastEntry.delete('0', 'end')
self.LastEntry.insert('0', _text_)
self.LastEntry.place(anchor='nw', x='35', y='50')
self.FirstEntry = tk.Entry(self.frame11)
_text_ = ''''''
self.FirstEntry.delete('0', 'end')
self.FirstEntry.insert('0', _text_)
self.FirstEntry.place(anchor='nw', x='35', y='75')
self.YOBentry = tk.Entry(self.frame11)
_text_ = ''''''
self.YOBentry.delete('0', 'end')
self.YOBentry.insert('0', _text_)
self.YOBentry.place(anchor='nw', x='35', y='100')
self.LastLabel = tk.Label(self.frame11)
self.LastLabel.configure(font='{Bahnschrift SemiCondensed} 12 {}', text='Last')
self.LastLabel.place(anchor='center', x='10', y='60')
self.FirstLabel = tk.Label(self.frame11)
self.FirstLabel.configure(font='{Bahnschrift SemiCondensed} 12 {}', text='First')
self.FirstLabel.place(anchor='center', x='10', y='85')
self.YOBlabel = tk.Label(self.frame11)
self.YOBlabel.configure(font='{Bahnschrift SemiCondensed} 12 {}', text='Y.O.B')
self.YOBlabel.place(anchor='center', x='10', y='110')
self.CommNameEnter = tk.Button(self.frame11)
self.CommNameEnter.configure(background='#a7b56f', text='Enter')
self.CommNameEnter.place(anchor='center', relx='.5', width='60', y='150')
self.ComNameMsgOutput = tk.Message(self.frame11)
self.ComNameMsgOutput.configure(background='#60e63e', font='{Bahnschrift SemiCondensed} 12 {}',
highlightbackground='#071603', highlightcolor='#071603')
self.ComNameMsgOutput.configure(highlightthickness='1', relief='flat', takefocus=False,
text='MEETS IDENTIFIER POLICY')
self.ComNameMsgOutput.configure(width='130')
self.ComNameMsgOutput.place(anchor='center', height='60', relx='0.5', width='150', y='210')
self.frame11.configure(borderwidth='1', highlightbackground='#69763d', highlightcolor='#b9dfb3',
highlightthickness='2')
self.frame11.configure(padx='10', pady='10', relief='groove', takefocus=False)
self.frame11.configure(width='200')
self.frame11.pack(anchor='center', expand='true', fill='both', padx='10', pady='10', side='left')
self.LeftFrame3.configure(background='#dadada', borderwidth='1', highlightbackground='#bcb5e6',
highlightcolor='#a8bef2')
self.LeftFrame3.configure(relief='groove', takefocus=False, width='200')
self.LeftFrame3.pack(anchor='e', expand='false', fill='both', side='left')
self.frame10 = tk.Frame(self.tab3frame)
self.frame10.configure(height='300', width='200')
self.frame10.pack(side='top')
self.tab3frame.configure(height='400', relief='sunken', width='600')
self.tab3frame.pack(side='right')
self.tab3frame.pack_propagate(0)
self.notebook4.add(self.tab3frame, text='Tools')
self.tab4frame = ttk.Frame(self.notebook4)
self.LeftFrame4 = tk.Frame(self.tab4frame, container='false')
self.frame5 = tk.Frame(self.LeftFrame4)
self.GASTDB_ETA_label = tk.Label(self.frame5)
self.GASTDB_ETA_label.configure(background='#fce7e0', font='{Bahnschrift SemiCondensed} 14 {bold}',
foreground='#eb825f', highlightbackground='#f0f9f7')
self.GASTDB_ETA_label.configure(highlightcolor='#276a70', highlightthickness='3', text='GASTDB ETA')
self.GASTDB_ETA_label.pack(anchor='s', fill='x', padx='1', pady='1', side='top')
self.frame5.configure(background='#fbfdfd', height='7', highlightbackground='#060f0d', highlightthickness='1')
self.frame5.configure(relief='groove', width='200')
self.frame5.pack(fill='x', side='top')
self.GASTDB_ETA_text = tk.Text(self.LeftFrame4)
self.GASTDB_ETA_text.configure(font='{Calibri} 30 {}', foreground='#000000', height='1',
highlightbackground='#eb825f')
self.GASTDB_ETA_text.configure(highlightcolor='#83b6be', highlightthickness='1', insertborderwidth='0',
width='5')
_text_ = '''ETA'''
self.GASTDB_ETA_text.insert('0.0', _text_)
self.GASTDB_ETA_text.pack(anchor='center', expand='true', fill='x', side='top')
self.button2 = tk.Button(self.LeftFrame4)
self.button2.configure(background='#eb825f', text='Get ETA')
self.button2.pack(ipadx='11', pady='0 40', side='top')
self.LeftFrame4.configure(borderwidth='1', highlightbackground='#eb825f', highlightcolor='#a8bef2',
highlightthickness='2')
self.LeftFrame4.configure(padx='20', pady='20', relief='groove', takefocus=False)
self.LeftFrame4.configure(width='200')
self.LeftFrame4.pack(anchor='center', expand='true', fill='both', side='left')
self.RightFrame4 = tk.Frame(self.tab4frame, container='false')
self.frame6 = tk.Frame(self.RightFrame4)
self.label5 = tk.Label(self.frame6)
self.label5.configure(background='#f9f4e6', font='{Bahnschrift SemiCondensed} 14 {bold}', foreground='#ddbb31',
highlightbackground='#ddbb31')
self.label5.configure(highlightcolor='#276a70', highlightthickness='3', relief='flat', text='CT ETA')
self.label5.pack(anchor='s', fill='x', padx='1', pady='1', side='top')
self.frame6.configure(background='#010203', height='7', highlightbackground='#8a9bc4', highlightthickness='1')
self.frame6.configure(relief='groove', width='200')
self.frame6.pack(fill='x', side='top')
self.text4 = tk.Text(self.RightFrame4)
self.text4.configure(font='{Calibri} 30 {}', foreground='#180303', height='1', highlightbackground='#ddbb31')
self.text4.configure(highlightthickness='1', insertborderwidth='0', width='5')
_text_ = '''ETA'''
self.text4.insert('0.0', _text_)
self.text4.pack(anchor='center', expand='true', fill='x', side='top')
self.button4 = tk.Button(self.RightFrame4)
self.button4.configure(background='#ddbb31', text='Get ETA')
self.button4.pack(ipadx='11', pady='0 43', side='top')
self.RightFrame4.configure(highlightbackground='#ddbb31', highlightcolor='#a8bef2', highlightthickness='2',
padx='20')
self.RightFrame4.configure(pady='20', relief='groove', takefocus=False, width='200')
self.RightFrame4.pack(anchor='center', expand='true', fill='both', side='left')
self.tab4frame.configure(height='400', relief='sunken', width='600')
self.tab4frame.pack(side='top')
self.tab4frame.pack_propagate(0)
self.notebook4.add(self.tab4frame, text='ETA dates')
self.frame2 = ttk.Frame(self.notebook4)
self.leftFrame5 = tk.Frame(self.frame2, container='false')
self.frame22 = tk.Frame(self.leftFrame5)
self.DOBlabel = tk.Label(self.frame22)
self.DOBlabel.configure(font='{Bahnschrift} 12 {bold}', foreground='#051212', relief='flat', text='DOB')
self.DOBlabel.place(anchor='nw', rely='0.0', x='7', y='60')
self.AgeLabel = tk.Label(self.frame22)
self.AgeLabel.configure(font='{Bahnschrift} 12 {bold}', foreground='#030c0c', relief='flat', text='AGE')
self.AgeLabel.place(anchor='nw', x='7', y='155')
self.frame22.configure(height='280', width='40')
self.frame22.grid(column='1', ipady='10', row='0')
self.leftFrame5.rowconfigure('0', minsize='300')
self.leftFrame5.columnconfigure('1', minsize='40')
self.frame25 = tk.Frame(self.leftFrame5)
self.DOBentrybox = tk.Entry(self.frame25)
self.DOBentrybox.configure(font='{Arial} 24 {}', highlightbackground='#00ffff', highlightthickness='1',
justify='left')
self.DOBentrybox.configure(takefocus=False, width='160')
_text_ = '''DOB'''
self.DOBentrybox.delete('0', 'end')
self.DOBentrybox.insert('0', _text_)
self.DOBentrybox.place(anchor='center', relwidth='.8', x='80', y='65')
self.AGEMESSAGE = tk.Message(self.frame25)
self.AGEMESSAGE.configure(anchor='w', background='#ffffff', font='{Ariel} 24 {}', highlightbackground='#00ffff')
self.AGEMESSAGE.configure(highlightthickness='1', relief='sunken')
self.AGEMESSAGE.place(anchor='center', relwidth='.8', x='80', y='160')
self.getageButton = tk.Button(self.frame25)
self.getageButton.configure(background='#78dcdc', text='Get Age')
self.getageButton.place(anchor='nw', x='43', y='205')
self.frame25.configure(height='280', relief='sunken', width='160')
self.frame25.grid(column='2', ipady='10', row='0')
self.leftFrame5.rowconfigure('0', minsize='300')
self.leftFrame5.columnconfigure('2', minsize='160')
self.label29 = tk.Label(self.leftFrame5)
self.label29.configure(font='{Times New Roman} 12 {bold underline}', foreground='#2b6a80', justify='center',
text='AGE TOOL')
self.label29.place(anchor='nw', height='20', width='200', x='0', y='3')
self.canvas3 = tk.Canvas(self.leftFrame5)
self.canvas3.configure(background='#2b6a80', borderwidth='1', closeenough='0', confine='false')
self.canvas3.configure(height='40', relief='flat', takefocus=False, width='200')
self.canvas3.place(anchor='n', height='6', width='400', y='20')
self.leftFrame5.configure(background='#fcfefd', borderwidth='1', height='270', relief='sunken')
self.leftFrame5.configure(width='200')
self.leftFrame5.pack(side='left')
self.rightFrame5 = tk.Frame(self.frame2, container='false')
self.entry8 = tk.Entry(self.rightFrame5)
self.entry8.configure(highlightbackground='#00ffff', highlightthickness='1')
_text_ = '''LAST'''
self.entry8.delete('0', 'end')
self.entry8.insert('0', _text_)
self.entry8.place(anchor='nw', bordermode='outside', relx='.3', rely='0.15', x='0', y='0')
self.entry9 = tk.Entry(self.rightFrame5)
self.entry9.configure(highlightbackground='#00ffff', highlightthickness='1')
_text_ = '''FIRST'''
self.entry9.delete('0', 'end')
self.entry9.insert('0', _text_)
self.entry9.place(anchor='nw', relx='0.3', rely='.3', x='0', y='0')
self.entry11 = tk.Entry(self.rightFrame5)
self.entry11.configure(highlightbackground='#00ffff', highlightthickness='1')
_text_ = '''YOB'''
self.entry11.delete('0', 'end')
self.entry11.insert('0', _text_)
self.entry11.place(anchor='nw', relx='.3', rely='.45', y='0')
self.COMnameLabelFrame = tk.LabelFrame(self.rightFrame5)
self.comNameLast = tk.Label(self.COMnameLabelFrame)
self.comNameLast.configure(font='{Bahnschrift Condensed} 12 {}', justify='left', text='Last')
self.comNameLast.place(anchor='nw', relx='.05', rely='0.01', x='1', y='0')
self.ComNameFirst = tk.Label(self.COMnameLabelFrame)
self.ComNameFirst.configure(font='{Bahnschrift Condensed} 12 {}', text='First')
self.ComNameFirst.place(anchor='nw', relx='.05', rely='.37', x='1', y='0')
self.ComNameYOB = tk.Label(self.COMnameLabelFrame)
self.ComNameYOB.configure(font='{Bahnschrift Condensed} 12 {}', padx='4', text='YOB')
self.ComNameYOB.place(anchor='nw', relx='.05', rely='.8', x='0', y='0')
self.COMnameLabelFrame.configure(borderwidth='1', height='100', relief='flat', width='200')
self.COMnameLabelFrame.place(anchor='nw', height='110', width='40', x='0', y='40')
self.commnameANSWER = tk.Text(self.rightFrame5)
self.commnameANSWER.configure(background='#e1f7d5', blockcursor='false', borderwidth='2',
font='{Cambria} 14 {bold}')
self.commnameANSWER.configure(foreground='#091801', height='2', insertofftime='0', padx='20')
self.commnameANSWER.configure(pady='15', relief='sunken', selectborderwidth='0', setgrid='false')
self.commnameANSWER.configure(tabs='1', tabstyle='tabular', takefocus=False, width='10')
self.commnameANSWER.configure(wrap='word')
_text_ = '''MEETS ID POLICY'''
self.commnameANSWER.insert('0.0', _text_)
self.commnameANSWER.place(anchor='nw', bordermode='outside', x='20', y='199')
self.ComNameEnter = tk.Button(self.rightFrame5)
self.ComNameEnter.configure(background='#00c1c1', foreground='#020202', text='ENTER')
self.ComNameEnter.place(anchor='nw', x='70', y='160')
self.frame27 = tk.Frame(self.rightFrame5, container='false')
self.ComNameLabel = tk.Label(self.frame27)
self.ComNameLabel.configure(font='{Times New Roman} 12 {bold underline}', foreground='#2b6a80',
justify='center', text='COMMON NAME TOOL')
self.ComNameLabel.pack(expand='true', fill='both', padx='5', side='top')
self.frame27.configure(height='30', width='200')
self.frame27.place(anchor='nw', x='0', y='0')
self.rightFrame5.configure(borderwidth='2', height='200', highlightbackground='#2b6a80',
highlightcolor='#2b6a80')
self.rightFrame5.configure(highlightthickness='1', relief='raised', width='200')
self.rightFrame5.pack(expand='true', fill='both', side='top')
self.frame2.configure(height='400', relief='sunken', width='400')
self.frame2.pack(side='top')
self.notebook4.add(self.frame2, text='Name/Age')
self.notebook4.configure(height='290', width='400')
self.notebook4.pack(side='top')
self.mainFrame.configure(height='200', width='200')
self.mainFrame.pack(side='top')
# Main widget
self.mainwindow = self.mainFrame
def retrieve_input(self):
id = self.ID_entry.get("1.0", 'end-1c')
idListprototype = id.splitlines()
# print(idListprototype)
if len(id_list) != 0:
id_list.clear()
# print(f"id_list =={id_list}")
for j in idListprototype:
id_list.append(j)
print(id_list)
return id_list
class open_ids():
def make_chrome_window2(self):
fw = pyautogui.getWindowsWithTitle('ATM - Google Chrome')
pyautogui.scroll(200)
if len(fw) == 0:
print("l is 0")
webbrowser.open('https://atm.accuratebackground.com/atm/login.jsp')
fw = pyautogui.getWindowsWithTitle('Vendor Login | Accurate Background - Google Chrome')
fw = fw[0]
fw.width = 974
fw.topleft = (953, 0)
def get_new_tab(self):
webbrowser.open('https://atm.accuratebackground.com/atm/findSearch.html')
time.sleep(1.1)
def search_id_fetch(self, ids_list):
self.make_chrome_window2()
# for h in range(0,len(ids_list)):
for h in id_list:
time.sleep(.05)
self.get_new_tab()
# findsearch = ["enter_id_box.png", "search_press_box.png"]
time.sleep(.05)
enter_id_box = (1340, 405)
press_search_button = (1625, 405)
pyautogui.click(enter_id_box)
time.sleep(.05)
pyautogui.typewrite(h)
time.sleep(.15)
pyautogui.click(press_search_button)
# auto_start()
def open_ids_ATM(self):
ids_list = self.retrieve_input()
self.open_ids().search_id_fetch(ids_list)
pyautogui.hotkey('ctrl', '2', interval=.07)
# Testimport = "test succeeded"
#
def make_GSTDB_folders_func(self):
exec(open('C:\\Users\kschwartz\PycharmProjects\pythonProject\make_GSCDB_folders.py').read())
def send2pendingArchiveFunc(self):
if len(id_list) != 0:
pendingIDspath = 'C:\\Users\kschwartz\Documents\CT_pending_IDS.xlsx'
pendIDsWB = openpyxl.load_workbook(pendingIDspath)
main_sheet= pendIDsWB['main_pending_ids']
idcolumnList = main_sheet['B2':'B1001']
def count_filled_entries():
i = 0
for rowOfCellObjects in idcolumnList:
for cellObj in rowOfCellObjects:
if cellObj.value != None:
i +=1
return i
def addNewEntries():
filledCells = count_filled_entries()
start_point = filledCells + 1
globalColumnstartEnd.append(str(start_point))
end_point = str(len(id_list) + filledCells)
globalColumnstartEnd.append(str(end_point))
print(f"GlobalColumnsStartEnd is {globalColumnstartEnd}")
print(f"start point is {start_point}")
print(f"end point is {end_point}")
columnStart = "B" + str(start_point)
columnEnd = "B" + str(end_point)
print(f"Column start is {columnStart}")
print(f"Column end is {columnEnd}")
filledIDcolumn = main_sheet[columnStart:columnEnd]
for rowOfCellObjects in filledIDcolumn:
for cellObj in rowOfCellObjects:
# print(cellObj.coordinate, cellObj.value)
cellObj.value = str(id_list[filledIDcolumn.index(rowOfCellObjects)])
pendIDsWB.save("C:\\Users\kschwartz\Documents\CT_pending_IDS.xls")
pendIDsWB.close()
os.startfile("C:\\Users\kschwartz\Documents\CT_pending_IDS.xls")
addNewEntries()
print("sent to CT_pending_IDS workbook")
def sendnames2pendingArchiveFunc(self):
pendingIDspath = 'C:\\Users\kschwartz\Documents\CT_pending_IDS.xlsx'
pendIDsWB = openpyxl.load_workbook(pendingIDspath)
nameColStart = "C" + (globalColumnstartEnd[0] +1)
nameColEnd = "C" + globalColumnstartEnd[1]
main_sheet = pendIDsWB['main_pending_ids']
nameColList = main_sheet[nameColStart:nameColEnd]
for rowOfCellObjects in nameColList:
for cellObj in rowOfCellObjects:
# print(cellObj.coordinate, cellObj.value)
cellObj.value = str(id_list[nameColList.index(rowOfCellObjects)])
pendIDsWB.save("C:\\Users\kschwartz\Documents\CT_pending_IDS.xls")
pendIDsWB.close()
os.startfile("C:\\Users\kschwartz\Documents\CT_pending_IDS.xls")
print("sent names to pendIDsWB")
def send2GSTDB_func(self):
if len(id_list) != 0:
GSTDBsheetPath = "C:\\Users\kschwartz\Documents\GA-SCDB-Search-helper_realTEST.xlsm"
GA_wb = openpyxl.load_workbook(GSTDBsheetPath)
GA_sheet = GA_wb['main_sheet']
def wipePreviousEntries():
for rowOfCellObjects in GA_sheet['B2':'B51']:
for cellObj in rowOfCellObjects:
if cellObj.value != None:
cellObj.value = ''
for rowOfCellObjects in GA_sheet['D2':'D51']:
for cellObj in rowOfCellObjects:
if cellObj.value != None:
cellObj.value = ''
for rowOfCellObjects in GA_sheet['F2':'F51']:
for cellObj in rowOfCellObjects:
if cellObj.value != None:
cellObj.value = ''
for rowOfCellObjects in GA_sheet['J2':'J12']:
for cellObj in rowOfCellObjects:
if cellObj.value != None:
cellObj.value = ''
for rowOfCellObjects in GA_sheet['L2':'L12']:
for cellObj in rowOfCellObjects:
if cellObj.value != None:
cellObj.value = ''
def addNewEntries():
end_point = str(len(id_list) + 1)
print(f"end point is {end_point}")
columnEnd = "B" + end_point
print(f"Column end is {columnEnd}")
sheet2list = GA_sheet['B2':columnEnd]
for rowOfCellObjects in sheet2list:
for cellObj in rowOfCellObjects:
# print(cellObj.coordinate, cellObj.value)
cellObj.value = str(id_list[sheet2list.index(rowOfCellObjects)])
wipePreviousEntries()
addNewEntries()
GA_wb.save("C:\\Users\kschwartz\Documents\GA-SCDB-Search-helper_realTEST.xls")
GA_wb.close()
print("sent to sheet")
os.startfile("C:\\Users\kschwartz\Documents\GA-SCDB-Search-helper_realTEST.xls")
return
def sendIDS2junkWorkbook(self):
genjunkWBpath = 'C:\\Users\kschwartz\Documents\CT_pending_IDS.xlsx'
pendIDsWB = openpyxl.load_workbook(pendingIDspath)
nameColStart = "C" + (globalColumnstartEnd[0] +1)
nameColEnd = "C" + globalColumnstartEnd[1]
main_sheet = pendIDsWB['main_pending_ids']
# def cleartable():
# for a in sheet['A1':'A2']
def get_age(self,birthdate):
days_in_year = 365.2425
age = int((date.today() - birthDate).days / days_in_year)
messagebox.showinfo('Message',str(age))
return age
def show_ids(self):
self.Send2idList.configure(state='normal')
_text_ = "hi"
print("show_ids_func")
def run(self):
self.mainwindow.mainloop()
def callback(self, event=None):
pass
def run(self):
self.mainwindow.mainloop()
def run(self):
self.mainwindow.mainloop()
if __name__ == '__main__':
root = tk.Tk()
app = UiadvancedbackupApp(root)
app.run()
msgOUTputbox = UiadvancedbackupApp.send2idlist
|
UTF-8
|
Python
| false | false | 30,387 |
py
| 17 |
pyautoguiADVANCEDv3.py
| 13 | 0.601408 | 0.565011 | 0 | 516 | 56.893411 | 147 |
songyw0517/Diary
| 19,069,654,814,345 |
c3da3b352d2c2837e15664d4edb56159ccf433d2
|
7bb4404a7f55e390980da44bcf6db357f3e65b9a
|
/DB/database_management.py
|
efef70b5a1db3ae70a40bc7b7e2b3779336869bf
|
[] |
no_license
|
https://github.com/songyw0517/Diary
|
98444f1913131fa7a718172a1aaf98b1335e5afb
|
8ab9b9ac00becb2c2a9dd320d9ab978b6b411860
|
refs/heads/master
| 2021-01-05T02:58:50.988962 | 2020-07-22T07:58:16 | 2020-07-22T07:58:16 | 240,853,010 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from flask import request
import pymysql
def login_validation(db, value):
with db.cursor() as cursor:
query = "SELECT ID FROM test_login WHERE ID=%s and PW=%s"
cursor.execute(query, value)
data = (cursor.fetchall())
cursor.close()
for row in data:
data = row[0]
if data:
print("성공")
else:
print("실패")
|
UTF-8
|
Python
| false | false | 406 |
py
| 9 |
database_management.py
| 6 | 0.542714 | 0.540201 | 0 | 15 | 25.533333 | 65 |
djzwns/single-bungle-hwagok-imageprocessing
| 6,038,724,031,156 |
97b1d8b049ff422aeaedd75e6c237d2946b31409
|
31c160311463302f3dd714af3ff746e5f1c576de
|
/practice code/Code04-02 붓꽃 머신러닝 2(기학습된 모데 사용).py
|
94fcf5cf7336f8005b517469ddfd5b7a58cf4755
|
[] |
no_license
|
https://github.com/djzwns/single-bungle-hwagok-imageprocessing
|
3c86efef5d94426db20b74bc4e1349e1e9dfb416
|
3a7fe002f7336c25dd6a26a5c53c2b243afd7e58
|
refs/heads/master
| 2023-09-04T06:54:42.263919 | 2021-10-18T10:19:18 | 2021-10-18T10:19:18 | 415,910,164 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# 기 학습된 모델(Pre-Trained Model)을 불러와서 예측하기
import joblib
clf = joblib.load("iris_150_KNN3_96.dmp")
myData = [4.8, 3.3, 1.3, 0.3]
result = clf.predict([myData])
print("이 꽃은 %s 입니다." % (result[0]))
|
UTF-8
|
Python
| false | false | 237 |
py
| 43 |
Code04-02 붓꽃 머신러닝 2(기학습된 모데 사용).py
| 40 | 0.646154 | 0.569231 | 0 | 7 | 26.857143 | 41 |
JoseRivas1998/Snake-pygame
| 16,982,300,707,782 |
95a47481ee5e3b1134d5d1e19de7118f5e032bdb
|
e1a6082207c7b16637479ffea69ad7074ec36f69
|
/app.py
|
73cc6166d6a07083f6144394ec3443d4252286af
|
[] |
no_license
|
https://github.com/JoseRivas1998/Snake-pygame
|
d6d8ac247016b91ad2accfbe1dcbd811a283c8e3
|
e22dee003c0c1f52aa8b0a9cfe4658431c1599e0
|
refs/heads/master
| 2020-07-01T10:44:35.903962 | 2019-08-08T05:20:28 | 2019-08-08T05:20:28 | 201,151,104 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from typing import Tuple
import pygame
from pygame.surface import Surface
from gamestatemanager import GameStateManager
from gamestatetype import GameStateType
from input import MyInput, MyInputProcessor
# CONSTANTS
BLACK = (0, 0, 0)
WHITE = (255, 255, 255)
class App:
size: Tuple[float, float]
running: bool
window_title: str
screen: Surface
font: pygame.font.Font
last_time: float
delta_time: float
gsm: GameStateManager
def __init__(self, width, height, title):
self.size = (width, height)
self.running = False
self.window_title = title
def begin(self):
pygame.init()
self.screen = pygame.display.set_mode(self.size)
pygame.display.set_caption(self.window_title)
self.gsm = GameStateManager(GameStateType.TITLE)
self.last_time = 0
self.running = True
self.game_loop()
def poll_event(self):
for event in pygame.event.get():
if event.type == pygame.QUIT:
self.running = False
if event.type == pygame.KEYDOWN:
MyInputProcessor.key_down(event.key)
if event.type == pygame.KEYUP:
MyInputProcessor.key_up(event.key)
pass
def step(self):
self.screen.fill((0xD2, 0xD2, 0xD2))
self.poll_event()
self.gsm.step(self.screen, self.size, self.delta_time)
MyInput.update()
pygame.display.flip()
if self.gsm.should_exit:
self.exit()
def calculate_delta_time(self):
current_time = pygame.time.get_ticks()
self.delta_time = (current_time - self.last_time) / 1000.0
self.last_time = current_time
def game_loop(self):
while self.running:
self.calculate_delta_time()
self.step()
def exit(self):
self.running = False
|
UTF-8
|
Python
| false | false | 1,873 |
py
| 17 |
app.py
| 16 | 0.609717 | 0.596903 | 0 | 68 | 26.544118 | 66 |
maxvonhippel/S3CAMX
| 15,307,263,477,307 |
fa80bc176c8ff96360925bd71290da0b51a15cd8
|
e4fb5d5b74926250f6730bfa3995e52ea55e4dc4
|
/src/configparser.py
|
5f39d39366a51bf9f8984998f999f82b4e06b5ce
|
[
"BSD-2-Clause"
] |
permissive
|
https://github.com/maxvonhippel/S3CAMX
|
7629225410eaae455148e7ec0512c9575829b17c
|
03f63f84e6e1e18598edff52a901f669c2fee342
|
refs/heads/master
| 2022-12-07T09:44:14.519479 | 2016-06-21T05:21:27 | 2016-06-21T05:21:27 | 292,167,603 | 0 | 0 | null | true | 2020-09-02T03:20:25 | 2020-09-02T03:20:24 | 2020-01-22T06:05:39 | 2017-05-16T14:52:06 | 3,969 | 0 | 0 | 0 | null | false | false |
#!/usr/bin/python
# -*- coding: utf-8 -*-
import logging
logger = logging.getLogger(__name__)
def parse_config(description_str):
output_dict = {}
# ##!!##logger.debug('parsing abstraction parameters')
# split the description into lines by breaking at "\n"
description_arr = description_str.splitlines()
for l in description_arr:
# assume it as a comment and move on
if l[0] == '#':
continue
# split each line on "="
parameter_def = l.split('=')
# strip leading and trailing whitespaces
parameter_name = parameter_def[0].strip()
parameter_val = parameter_def[1].strip()
output_dict[parameter_name] = parameter_val
# store the RHS type of the output dict, i.e., all RHS are strings
output_dict['type'] = 'string'
return output_dict
|
UTF-8
|
Python
| false | false | 853 |
py
| 130 |
configparser.py
| 77 | 0.61313 | 0.608441 | 0 | 37 | 22.054054 | 70 |
haxmanster/Hornet2
| 1,099,511,676,427 |
bef202eb186d2802b7360317a26641a5e8c3a298
|
0263bd70831758133ee317de39f59221d1338844
|
/functions.py
|
1f9b92d36b2828ab462da75dc1e118a06053f6d6
|
[] |
no_license
|
https://github.com/haxmanster/Hornet2
|
c752748105c1a6743e0d773b165bcfb0dba210a1
|
e2638749d46127f5043c6aeac66c2b503e2aadcc
|
refs/heads/master
| 2022-12-11T05:30:15.613383 | 2019-06-14T15:05:57 | 2019-06-14T15:05:57 | 146,297,914 | 0 | 0 | null | false | 2022-12-08T02:52:35 | 2018-08-27T12:59:34 | 2019-06-14T15:13:39 | 2022-12-08T02:52:32 | 105,001 | 0 | 0 | 9 |
HTML
| false | false |
import sqlite3
import hashlib
from flask import flash
ALLOWED_EXTENSIONS = {'txt', 'pdf', 'jpg', 'png', 'jpeg', 'gif', 'doc', 'rar'}
def db_connect():
with sqlite3.connect("static/user.db") as db:
cursor = db.cursor()
cursor.execute('SELECT * FROM users, dzieci')
data = cursor.fetchall()
return data
def check_username():
find = db_connect()
return find
def find_child(pesel):
data = db_connect()
for rows in data[:]:
pesel = rows[5]
name = rows[6]
surname = rows[7]
date_of_birth = rows[8]
group = rows[9]
result = "PESEL :" + " " + pesel, "IMIĘ :" + " " + name, "NAZWISKO :" + " " + surname, \
"DATA URODZENIA :" + " " + date_of_birth, "GRUPA PRZEDSZKOLNA :" + " " + group
return result
def check_grupa(username):
with sqlite3.connect("static/user.db") as db:
cur = db.cursor()
cur.execute("SELECT * FROM users")
rows = cur.fetchall()
for row in rows:
db_grupa = row[1]
db_user = row[2]
if db_grupa == db_grupa and db_user == username:
return db_grupa
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
def hash_passwd(hashed_password):
hash_pass = hashlib.sha224(hashed_password.encode()).hexdigest()
return hash_pass
def check_password(hashed_password, user_password):
return hashed_password == hashlib.sha224(user_password.encode()).hexdigest()
def validate(username, password):
con = sqlite3.connect('static/user.db')
completion = False
with con:
cur = con.cursor()
cur.execute("SELECT * FROM Users")
rows = cur.fetchall()
for row in rows:
db_user = row[2]
db_pass = row[3]
if db_user == username:
completion = check_password(db_pass, password)
return completion
|
UTF-8
|
Python
| false | false | 2,067 |
py
| 13 |
functions.py
| 7 | 0.549855 | 0.53969 | 0 | 72 | 27.694444 | 100 |
Yuanmu93/ImgSpider
| 18,107,582,140,755 |
05be7363e3830d86313bc33d9461a2a0f932eca0
|
6582678251e0430b55468afeda7c204dbfb6da43
|
/jandanspiderB.py
|
b3dcce7e58dc68f06a3e13e2462612a52b39fed5
|
[] |
no_license
|
https://github.com/Yuanmu93/ImgSpider
|
7883ba058081daaef28c5fa613993ed7b0440472
|
1c7cc7a9fddb455dbd8f9a166af604abd4af5ea5
|
refs/heads/master
| 2021-06-20T21:17:54.463642 | 2017-08-03T11:39:10 | 2017-08-03T11:39:10 | 94,325,322 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#CrawJandanSpider
import re
import os
import time
import threading
from multiprocessing import Pool, cpu_count
import requests
from bs4 import BeautifulSoup
import random
headers = {'X-Requested-With': 'XMLHttpRequest',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) '
'Chrome/56.0.2924.87 Safari/537.36'}
path = r"D:\mztue"
def get_ip_list(url, headers):
web_data = requests.get(url, headers=headers)
soup = BeautifulSoup(web_data.text, 'lxml')
ips = soup.find_all('tr')
ip_list = []
for i in range(1, len(ips)):
ip_info = ips[i]
tds = ip_info.find_all('td')
ip_list.append(tds[1].text + ':' + tds[2].text)
return ip_list
def get_random_ip(ip_list):
proxy_list = []
for ip in ip_list:
proxy_list.append('http://' + ip)
proxy_ip = random.choice(proxy_list)
proxies = {'http': proxy_ip}
return proxies
def get_img_url():
g = ['http://jandan.net/ooxx/page-' + str(num) for num in range(90,120)]
img_lists = []
for i in g:
html = requests.get(i, headers=headers, timeout = 30)
page = html.text
soup = BeautifulSoup(page, 'html.parser')
img_list = soup.find_all('img')
img_lists.append(img_list)
return(img_lists)
def get_img_url_lists(img_lists):
imglist = []
for i in range(0, img_lists.__len__()):
img_lists[i] = str(img_lists[i])
imglist.append(img_lists[i])
imgstr = ''.join(imglist)
img_url_list = re.findall(r'src="(.*?.jpg)"', imgstr)
img_str = 'https:'.join(img_url_list)
img_url_lists = re.findall(r'(https://.*?.jpg)', img_str)
return img_url_lists
def save_imgs(img_url_lists):
try:
if not os.path.exists(path):
os.mkdir(path)
os.chdir(path)
x = 0
for i in img_url_lists:
r = requests.get(i)
imgname = "{}.jpg".format(x)
x+=1
with open(imgname, 'ab') as f:
f.write(r.content)
print(imgname)
else:
print('file exist')
except:
print('fail')
if __name__=='__main__':
url = 'http://www.xicidaili.com/nn/'
ip_list = get_ip_list(url, headers=headers)
proxies = get_random_ip(ip_list)
img_lists = get_img_url()
img_url_lists = get_img_url_lists(img_lists)
save_imgs(img_url_lists)
print(proxies)
|
UTF-8
|
Python
| false | false | 2,517 |
py
| 2 |
jandanspiderB.py
| 1 | 0.558602 | 0.54271 | 0 | 101 | 23.861386 | 103 |
frank038/MFM2
| 10,909,216,957,270 |
bba9163fd11dc19fd4f78a0fa949b8193763e3df
|
15724a67370f79534311959b5a99657cf4ad94c4
|
/mfm2/modules_menu/sz_extract.py
|
5f5a3dc473693040cf8ef96bf562a80be29f53f4
|
[] |
no_license
|
https://github.com/frank038/MFM2
|
0b3209cfcc9a9c72fadd1b0f702733fc0a6642f4
|
2506095a967352f3d4ea49997ad110c308653e5d
|
refs/heads/main
| 2023-07-01T12:42:48.006163 | 2021-08-03T09:54:31 | 2021-08-03T09:54:31 | 335,981,794 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python3
"""
module for extract compressed files also with password
"""
import gi
gi.require_version('Gtk', '3.0')
from gi.repository import Gtk, Gdk, Gio
import subprocess
import os
# 1 = on selected item(s); 2 = on background; 3 = both
mtype = 1
# this name appears in the menu
name = "Extract here..."
## not implemented
#icon = "icon"
# the command to launch
command = ""
# position in the menu: int -1 means append only
# or in this string form: "pN"
# p: 0 or 1 or 2 or 3: 0 no separators, 1 separator above, 2 separator below, 3 both separator
# N: position, suggested > 4
# some menu items could be hidden so the position must consider them
position = -1
# enabled: if return is equal to 1 this script will be enabled
# one item at time
def enabled(fpath):
if len(fpath) == 1:
# if the mimetype is supported
file = Gio.File.new_for_path(fpath[0])
file_info = file.query_info('standard::content-type', Gio.FileQueryInfoFlags.NONE, None)
ftype = Gio.FileInfo.get_content_type(file_info)
#
if ftype in ["application/x-compressed-tar", "application/zip", "application/x-cd-image", "application/x-tar",
"application/vnd.comicbook+zip", "application/x-7z-compressed", "application/x-bzip-compressed-tar"]:
# the folder must be writable
if os.access(os.path.dirname(fpath[0]), os.R_OK):
return 1
else:
return 0
else:
return 0
def nname():
return name
# if this returns "" the ModuleClass will be executed
def ccommand(IV):
return command
def ttype():
return mtype
def pposition():
return position
class ModuleClass(Gtk.Window):
def __init__(self, wiconview):
self.wiconview = wiconview
# the main dialog
Gtk.Window.__init__(self, title="")
# center this window to parent
self.set_transient_for(self.wiconview.window)
self.set_position(Gtk.WindowPosition.CENTER_ON_PARENT)
# skip the taskbar
self.set_skip_taskbar_hint(True)
#
self.set_default_size(500, 100)
self.set_border_width(10)
self.connect ('delete-event', lambda w,e: self.destroy())
########
self.mainbox = Gtk.Box(orientation=Gtk.Orientation.VERTICAL)
self.add(self.mainbox)
# get the password
self.entry = Gtk.Entry()
self.entry.set_visibility(False)
self.mainbox.add(self.entry)
self.ckb = Gtk.CheckButton.new_with_label("Hide/Show the password")
self.ckb.connect("toggled", self.on_ckb_toggle)
self.mainbox.add(self.ckb)
self.btnbox = Gtk.Box(orientation=Gtk.Orientation.HORIZONTAL)
self.mainbox.add(self.btnbox)
self.btncancel = Gtk.Button(label="Cancel")
self.btncancel.connect("clicked", lambda w: self.destroy())
self.btnbox.add(self.btncancel)
self.btnok = Gtk.Button(label="Ok")
self.btnbox.add(self.btnok)
self.btnbox.set_homogeneous(True)
########
iterpath = self.wiconview.IV.get_selected_items()[0]
model = self.wiconview.IV.get_model()
# file name
fname = model[iterpath][1]
# path
dname = model[iterpath][3]
# full path and file name
item_path = os.path.join(dname, fname)
(nroot, suffix) = os.path.splitext(fname)
# the folder only name in which extract the archive
ndir_name = nroot
if os.path.exists(os.path.join(dname, ndir_name)):
i = 1
while i:
if os.path.exists(os.path.join(dname, ndir_name+"_("+str(i)+")")):
i += 1
else:
ndir_name = ndir_name+"_("+str(i)+")"
i = 0
#
# test the archive for password
ret = self.test_archive(item_path)
if ret == 1:
try:
ret = subprocess.check_output('7z x "-o{}" -y -aou -- "{}"'.format(os.path.join(dname, ndir_name), item_path), shell=True)
if "Everything is Ok" in ret.decode():
self.generic_dialog1("Info", "Archive extracted.")
else:
self.generic_dialog1("ERROR", "Issues while extracting the archive.")
except Exception as E:
self.generic_dialog1("ERROR", "Issues while extracting the archive:\n{}.".format(str(E)))
self.destroy()
self.destroy()
# ask for the password
elif ret == 2:
self.btnok.connect("clicked", self.on_btn_ok, dname, ndir_name, item_path)
self.show_all()
#
elif ret == 0:
self.generic_dialog1("ERROR", "Issues while checking the archive.")
self.destroy()
#
def test_archive(self, path):
szdata = None
try:
szdata = subprocess.check_output('7z l -slt -bso0 -- "{}"'.format(path), shell=True)
except:
return 0
if szdata != None:
szdata_decoded = szdata.decode()
ddata = szdata_decoded.splitlines()
if "Encrypted = +" in ddata:
return 2
else:
return 1
#
def on_btn_ok(self, button, dname, ndir_name, item_path):
self.spswd = self.entry.get_text()
self.hide()
if not self.spswd:
self.destroy()
try:
ret = subprocess.check_output('7z x "-p{}" "-o{}" -y -aou -- "{}"'.format(self.spswd, os.path.join(dname, ndir_name), item_path), shell=True)
if "Everything is Ok" in ret.decode():
self.generic_dialog1("Info", "Archive extracted.")
else:
self.generic_dialog1("ERROR", "Issues while extracting the archive.")
except Exception as E:
self.generic_dialog1("ERROR", "Issues while extracting the archive:\n{}.".format(str(E)))
self.destroy()
self.destroy()
#
def on_ckb_toggle(self, widget):
if widget.get_active():
self.entry.set_visibility(True)
else:
self.entry.set_visibility(False)
# generic ok dialog
def generic_dialog1(self, message1, message2):
dialog = Gtk.MessageDialog(parent=self.wiconview.window, flags=0, message_type=Gtk.MessageType.ERROR,
buttons=(Gtk.STOCK_OK, Gtk.ButtonsType.OK), text=message1)
dialog.format_secondary_text("{}".format(message2))
dialog.run()
dialog.destroy()
|
UTF-8
|
Python
| false | false | 6,596 |
py
| 29 |
sz_extract.py
| 25 | 0.57641 | 0.566859 | 0 | 181 | 35.441989 | 153 |
archeranimesh/python_devu_in
| 8,461,085,620,902 |
ec614fd33e69be6bb0cfaeebf90a8f872f40a3db
|
180ed6c8ff26b365e56c63d878a69c2e9d9b8c54
|
/code/practice/12_square.py
|
79bb348be9fc9bbe64dad2602f01c3c130cf3a4f
|
[] |
no_license
|
https://github.com/archeranimesh/python_devu_in
|
9af46503b07f82a137d4b820920fa72f96a5067b
|
3d77e9e7b150aae15375b236e3de7f573da4bdce
|
refs/heads/master
| 2020-06-08T21:22:24.329530 | 2019-07-14T11:22:55 | 2019-07-14T11:22:55 | 193,308,932 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# WAP to get a dict of letters and their count from a given word.
def squares(sides):
return sides ** 2
print(squares(30))
|
UTF-8
|
Python
| false | false | 131 |
py
| 41 |
12_square.py
| 35 | 0.687023 | 0.664122 | 0 | 8 | 15.375 | 65 |
MaddTheSane/dim3
| 3,487,513,477,853 |
9fbce5bda8a2bbd0bf3204cb5809b5f379b7821f
|
828214bbbfacf7e8d9702db2a9da1aafdff5d285
|
/unixBuild/SConstruct
|
dcabf90c7d9e58ac47f047709ee37848162cb5eb
|
[] |
no_license
|
https://github.com/MaddTheSane/dim3
|
fb347c09e025decd702b89e826b48723fdcd67ca
|
17d8f06b0e554fbb2b4e927231938a3412b97823
|
refs/heads/master
| 2020-04-13T22:21:22.353678 | 2018-12-29T05:24:35 | 2018-12-29T05:24:35 | 163,477,051 | 1 | 0 | null | true | 2018-12-29T04:54:16 | 2018-12-29T04:54:16 | 2018-10-11T09:17:34 | 2009-07-10T17:14:24 | 26,688 | 0 | 0 | 0 | null | false | null |
import os, string
env = Environment()
env.Append(CCFLAGS = ['-Wall', '-W', '-Wno-unused-parameter'])
env.Append(CPPPATH = ['/usr/local/include/SDL2','/usr/include/webkitgtk-1.0','/usr/include/freetype2','/usr/include/webkitgtk-1.0/JavaScriptCore'])
env.Append(CPPDEFINES = {'D3_PCH': 1})
env.Append(LIBS = [
'SDL2', 'GLEW', 'GL', 'GLU', 'X11', 'openal', 'webkitgtk-1.0', 'png', 'mpg123', 'fontconfig', 'freetype', 'libm', 'libz', 'libjavascriptcoregtk-1.0'])
if ARGUMENTS.get('release', 0) == 0:
env.Append(CCFLAGS = ['-g'])
else:
env.Append(CPPDEFINES = {'NDEBUG': 1})
Export('env')
sources = []
for codepath in ['../dim3Common','../dim3BaseUtility','../dim3MapUtility','../dim3ModelUtility','../dim3Engine']:
for root, directories, files in os.walk(codepath):
for directory in directories:
if string.count(directory, 'Headers'):
env.Append(CPPPATH = [root + '/' + directory])
for file in files:
if os.path.splitext(file)[1] == '.c':
sources += [root + '/' + file]
env.Program('dim3', sources)
|
UTF-8
|
Python
| false | false | 1,097 | 606 |
SConstruct
| 1 | 0.604376 | 0.578851 | 0 | 28 | 38.178571 | 154 |
|
ryan22s/Python3
| 15,685,220,599,769 |
aad3e8b64f1e28e7767c7104fbccac314a04593a
|
fadac33e94923f35229805c9b87fbcb837a5565c
|
/5.2.2.py
|
d58d734f254fed4e73f6e2d41b7095213d5f2858
|
[] |
no_license
|
https://github.com/ryan22s/Python3
|
9c4e5fafbff973e67ab414317139bd8655284624
|
8b2d7ecb22ec520ea0ea21bffd03572fadf3ab66
|
refs/heads/master
| 2020-03-24T06:31:23.006978 | 2019-07-24T18:11:19 | 2019-07-24T18:11:19 | 142,531,725 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
user_num = -1
while not user_num < 1:
user_num = user_num/2
print(user_num)
|
UTF-8
|
Python
| false | false | 90 |
py
| 38 |
5.2.2.py
| 38 | 0.566667 | 0.533333 | 0 | 5 | 16 | 25 |
tytx/BusFareCalculator
| 15,693,810,533,367 |
02efd8a00bd4d8a1be9f926e78cd3c6ad2a315d1
|
43ac688e2c5754c9c0a6e2b24acf2dfa08b0ddfb
|
/admin.py
|
b39b2398b6cbb102d436440010fd7cc2392adab0
|
[] |
no_license
|
https://github.com/tytx/BusFareCalculator
|
26abe59f30350932536938459f296d2a693b4fd3
|
cf9388a55a73473482ec4b666e264dfa7e160b18
|
refs/heads/master
| 2020-06-29T05:28:18.502821 | 2017-12-12T10:32:48 | 2017-12-12T10:32:48 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from config import *
from sync_database import *
def admin():
while(True):
print("//----------admin----------//")
print("1.Ticket\n2.Discount\n3.Age Limits\n4.Exit")
admin_opt = check_input("enter the choice :", 3)
if (admin_opt == 1):
ret=get_source_dest()
if (ret == 0):
print("1.child cost\n2.adult cost")
cost_opt = check_input("enter the choice :", 1)
if (cost_opt == 1):
print("1.remove\n2.add")
child_opt = check_input("enter the option :", 1)
if (child_opt == 1):
if "child" in globals.ticket_dic[globals.stop].keys():
del(globals.ticket_dic[globals.stop]["child"])
else:
print("there is no ticket cost for child ")
elif(child_opt == 2):
cost = check_input("enter the ticket cost for child :", 0)
if "child" in globals.ticket_dic[globals.stop].keys():
globals.ticket_dic[globals.stop]["child"]=cost
else:
globals.ticket_dic[globals.stop].update({"child":cost})
elif(cost_opt == 2):
cost = check_input("enter adult cost", 0)
globals.ticket_dic[globals.stop]["adult"]=cost
elif(admin_opt == 2):
ret=get_source_dest()
if (ret == 0):
print("1.Trip Discount\n2.Passenger Discount")
discount_opt = check_input("enter the choice :", 1)
if(discount_opt == 1):
print("1.remove\n2.add")
trip_opt=check_input("enter your choice :",1)
if(trip_opt ==1):
if "t_disc" in globals.ticket_dic[globals.stop].keys():
del(globals.ticket_dic[globals.stop]["t_disc"])
del(globals.ticket_dic[globals.stop]["t_count"])
del(globals.ticket_dic[globals.stop]["t_flag"])
else:
print("there is no discount to get removed")
else:
t_disc=check_input("enter trip discount :",2)
t_count=check_input("enter number of trips discount to be given :",0)
t_flag=check_input("enter on whom discount to be given :",3)
if "t_disc" in globals.ticket_dic[globals.stop].keys():
globals.ticket_dic[globals.stop]["t_disc"] = t_disc
globals.ticket_dic[globals.stop]["t_count"] = t_count
globals.ticket_dic[globals.stop]["t_flag"] = t_flag
else:
globals.ticket_dic[globals.stop].update({"t_disc":t_disc})
globals.ticket_dic[globals.stop].update({"t_count":t_count})
globals.ticket_dic[globals.stop].update({"t_flag":t_flag})
elif(discount_opt == 2):
print("1.remove\n2.add")
passenger_opt=check_input("enter your choice :",1)
if(passenger_opt ==1):
if "p_disc" in globals.ticket_dic[globals.stop].keys():
del(globals.ticket_dic[globals.stop]["p_disc"])
del(globals.ticket_dic[globals.stop]["p_count"])
del(globals.ticket_dic[globals.stop]["p_flag"])
else:
print("there is no discount to get removed")
else:
p_disc=check_input("enter trip discount :",2)
p_count=check_input("enter number of trips discount to be given :",0)
p_flag=check_input("enter on whom discount to be given :",3)
if "p_disc" in globals.ticket_dic[globals.stop].keys():
globals.ticket_dic[globals.stop]["p_disc"] = p_disc
globals.ticket_dic[globals.stop]["p_count"] = p_count
globals.ticket_dic[globals.stop]["p_flag"] = p_flag
else:
globals.ticket_dic[globals.stop].update({"p_disc":p_disc})
globals.ticket_dic[globals.stop].update({"p_count":p_count})
globals.ticket_dic[globals.stop].update({"p_flag":p_flag})
elif admin_opt == 3:
verify_age_limit()
sync_from_age()
else:
break
|
UTF-8
|
Python
| false | false | 4,810 |
py
| 6 |
admin.py
| 5 | 0.454886 | 0.446362 | 0 | 93 | 50.612903 | 93 |
excellencemichel/progrk
| 6,055,903,933,760 |
84c163a407c4b6d975acd28a61d9b51421410be0
|
b0365a11976fc19e350ba3c448b2bc3720c3eb73
|
/project/qt3/printerDialog.py
|
24ba9a26fa56cd9b446c96e673cb62a49f9900ea
|
[] |
no_license
|
https://github.com/excellencemichel/progrk
|
bd4e6797c21ed921ce4a3d75378ca752cece459d
|
e3144f78d9313ca9e2c836dcf53cf1bc4b3f10b8
|
refs/heads/master
| 2021-04-15T07:54:06.240231 | 2018-12-29T03:42:10 | 2018-12-29T03:42:10 | 116,013,431 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#! /usr/bin/python
#-*-coding:utf-8-*-
from PyQt5 import QtGui
from PyQt5.QtPrintSupport import (
QPrintDialog,
QPrinter,
QPrintPreviewDialog,
)
from PyQt5.QtWidgets import (
QApplication,
QMainWindow,
QPushButton,
QTextEdit,
)
class Window(QMainWindow):
"""
Cette classe nous montre qu'on peut soi-même faire les
dimensions de la fenêtre à la main
"""
def __init__(self):
super().__init__() # Appel du constructeur de la classe QMainWindow
self.title = "PyQt5 Window "
self.top = 100
self.left = 100
self.width = 680
self.height = 500
self.setWindowIcon(QtGui.QIcon("icons/line.png")) #ça na pas marché
self.init_window()
def init_window(self):
self.button = QPushButton("Print", self)
self.button.setGeometry(100,100,100, 50)
self.button.clicked.connect(self.createPrintDialog)
self.button1 = QPushButton("Preview", self)
self.button1.setGeometry(203, 100,100, 50)
self.button1.clicked.connect(self.printPreviewDialog)
self.textEdit = QTextEdit(self)
self.textEdit.setGeometry(100, 150, 200, 200 )
self.setWindowTitle(self.title)
self.setGeometry(self.top, self.left, self.width, self.height)
self.show()
def createPrintDialog(self):
printer = QPrinter(QPrinter.HighResolution)
dialog = QPrintDialog(printer, self)
if dialog.exec_() == QPrintDialog.Accepted:
self.textEdit.print_(printer)
def printPreviewDialog(self):
printer = QPrinter(QPrinter.HighResolution)
previewDialog = QPrintPreviewDialog(printer, self)
previewDialog.paintRequested.connect(self.printPreview)
previewDialog.exec_()
def printPreview(self, printer):
self.textEdit.print_(printer)
if __name__ == '__main__':
import sys
app = QApplication(sys.argv)
window = Window()
sys.exit(app.exec_())
|
UTF-8
|
Python
| false | false | 1,800 |
py
| 216 |
printerDialog.py
| 135 | 0.712535 | 0.682451 | 0 | 97 | 17.474227 | 69 |
fsundstedt-ff/ATP-JSON-Reader
| 13,477,607,415,684 |
4a99732b0e96f93653fc0040843359f83de14306
|
8cff5800097926f5a08264ac92a800678ad97727
|
/script-csv.py
|
3b1f47f7b1a528ca7d24a194222c1154e3bb1889
|
[] |
no_license
|
https://github.com/fsundstedt-ff/ATP-JSON-Reader
|
60c6b4cc4f6b90db7c6cf1a7067a6fbac9e4a482
|
7a8d3c539cf687f28343be7e311a7e3eb76e838c
|
refs/heads/main
| 2023-03-03T13:51:34.099199 | 2021-02-10T22:52:26 | 2021-02-10T22:52:26 | 337,240,918 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import csv
from parse_json import parseJSON
from utilities import dateString
# Opens input file selection window, parses JSON, and outputs as python array of arrays.
# Results are as follows: [[column titles],[data]]
results = parseJSON()
# Creates and writes output file as csv
with open(f"output-{dateString()}.csv", "w", newline="") as c:
write = csv.writer(c)
write.writerow(results[0])
write.writerows(results[1])
|
UTF-8
|
Python
| false | false | 442 |
py
| 4 |
script-csv.py
| 3 | 0.71267 | 0.708145 | 0 | 15 | 28.533333 | 88 |
BITCS-Information-Retrieval-2020/search-rattailcollagen1
| 18,124,762,006,064 |
fa685bf5c0dcec5d2e6da78163bdbb5dd394349a
|
4023900dfbebff3ebf9422472393c87ad5006188
|
/ScienceSearcher/DatabaseAccess.py
|
c4731dfed476bea3f79256d1f278b890ba3ea6b5
|
[
"MIT"
] |
permissive
|
https://github.com/BITCS-Information-Retrieval-2020/search-rattailcollagen1
|
b6a1b9af8e777b880699c25f394c4ad9d31999d0
|
2f8e52cde2b1ec825e6bac56428e3abd3e297b0e
|
refs/heads/master
| 2023-02-13T08:21:59.826726 | 2021-01-17T02:11:12 | 2021-01-17T02:11:12 | 321,301,480 | 10 | 2 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import pymongo
import json
from pymongo.errors import BulkWriteError
class DatabaseAccess:
"""
DatabaseAccess
"""
DEFAULT_SERVICE_PATH = 'mongodb://127.0.0.1:27017'
DEFAULT_SERVICE_NAME = 'mongodb'
DEFAULT_COLLECTION_NAME = 'papers'
def __init__(self,
service_path=DEFAULT_SERVICE_PATH,
service_name=DEFAULT_SERVICE_NAME,
collection_name=DEFAULT_COLLECTION_NAME,
increment_beginning_pointer=-1,
increment_ending_pointer=None):
"""
read section: (increment_beginning_pointer, increment_ending_pointer]
:param service_path: 'mongodb://user:password@address:port/service_name'
:param service_name: 'crawler'
:param collection_name: 'papers'
:param increment_beginning_pointer: -1 if default. maximum _id of the last increment data if specified.
:param increment_ending_pointer: None if default, restrict the maximum _id
"""
self.service_path = service_path
self.service_name = service_name
self.collection_name = collection_name
self.client = pymongo.MongoClient(self.service_path)
self.database = self.client[self.service_name]
self.collection = self.database[self.collection_name]
# 本次增量读取的起始指针(不含)
self.increment_beginning_pointer = increment_beginning_pointer
# 本次增量读取的终止指针(包含)
self.increment_ending_pointer = increment_ending_pointer
self.default_query_object = None
# 本次批读取的终止指针
self.current_ending_pointer = self.increment_beginning_pointer
# 本次增量读取终止标志
self.end_flag = False
# self.batch_pointer = 0
self.batch_size = 1
def build_query_object(self):
if self.increment_ending_pointer is not None:
if not isinstance(self.increment_ending_pointer, int):
raise ValueError('int expected instead of {}'.format(self.increment_ending_pointer))
query_object = {'_id': {'$gt': self.current_ending_pointer,
'$lte': self.increment_ending_pointer}}
else:
query_object = {'_id': {'$gt': self.current_ending_pointer}}
self.default_query_object = query_object
def read_batch(self, batch_size=1):
"""
读取数据库,每次读取batch_size个数据,并以list返回,list中的每一个元素为一个dict。
:param batch_size: int
:return: a list of dict, list = [] or len(list) < batch_size means no more data
"""
self.batch_size = batch_size
# 如果集合中的主键从零开始一直连续,那么:
# batch_cursor = self.collection.find(
# {'_id': {'$gt': self.increment_beginning_pointer + self.batch_pointer,
# '$lt': self.increment_beginning_pointer + self.batch_pointer + self.batch_size}})
# 如果集合中的主键中间存在中断,那么:
# batch_cursor = self.collection.find({'_id': {'$gt': self.increment_beginning_pointer}}).skip(
# self.batch_pointer).limit(self.batch_size)
# after delete batch_pointer
self.build_query_object()
batch_cursor = self.collection.find(self.default_query_object).limit(self.batch_size)
batch_list, batch_length = self.build_batch_list(batch_cursor=batch_cursor)
# if batch_length == self.batch_size:
# self.batch_pointer += self.batch_size
# elif 0 < batch_length < self.batch_size:
# self.batch_pointer += batch_length
# self.end_flag = True
# elif batch_length == 0:
# # self.batch_pointer += 0
# self.end_flag = True
if batch_length == 0:
self.end_flag = True
return batch_list
elif 0 < batch_length < self.batch_size:
self.end_flag = True
try:
assert self.current_ending_pointer + batch_length == batch_list[-1]['_id']
except AssertionError or Exception:
print('集合中存在中断主键: ({},{}]'.format(self.current_ending_pointer, batch_list[-1]['_id']))
self.current_ending_pointer = batch_list[-1]['_id']
return batch_list
@staticmethod
def build_batch_list(batch_cursor):
batch_list = []
for item in batch_cursor:
batch_list.append(item)
return batch_list, len(batch_list)
def import_json_db(self, db_path='./data/papers.json', drop_flag=False):
"""
导入json格式的mongodb数据库
# TODO 由于内存限制,可能有导入上限,如果文件过大,改用*mongoimport*工具
examples:
dba.import_json_db(db_path='./searcher/data/papers.json')
mongo import:
mongoimport --port 27030 -u sa -p Expressin@0618 -d mapdb -c bike_bak --type=json --file bike.csv
:param db_path: json文件路径
:param drop_flag: drop the collection if flag is true, keep the collection otherwise.
:return:
"""
if drop_flag:
self.safe_drop()
try:
with open(file=db_path, mode='r', encoding='utf-8')as fp:
paper_list = json.load(fp=fp)
print('import {} papers.'.format(len(paper_list)))
self.collection.insert_many(paper_list)
except FileNotFoundError as fe:
print(fe.strerror)
except UnicodeDecodeError as de:
print(de.reason)
except BulkWriteError as be:
print('primary key conflict occurred 0and partial insertion was successful:{}'.format(be.details))
def safe_drop(self):
"""
为确保不会突然把远程数据库清空。
:return:
"""
if self.service_path == DatabaseAccess.DEFAULT_SERVICE_PATH:
self.collection.drop()
print('drop successfully.')
else:
print('Cannot drop any collection of remote database!:{}'.format(self.service_path))
|
UTF-8
|
Python
| false | false | 6,222 |
py
| 19 |
DatabaseAccess.py
| 12 | 0.594881 | 0.588908 | 0 | 153 | 37.300654 | 114 |
ndmarinin/2021-1-MAILRU-SDET-Python-N-MARININ
| 15,126,874,829,073 |
31389007ffad7ef39b5c18c4d67be4312e87a6e0
|
ff9082f081e2aa737c53c3ed764bf99d50e1d2cb
|
/SDET-Python-homework-4/code/ui/locators/locators_android.py
|
69007694a002427b71c831ee33e57653e3531537
|
[] |
no_license
|
https://github.com/ndmarinin/2021-1-MAILRU-SDET-Python-N-MARININ
|
d301873489f1d2a51dab263fee173365f82c83aa
|
c0b4605fac4512f1ae36a5a06892789b31139f72
|
refs/heads/main
| 2023-07-16T03:29:13.924090 | 2021-08-17T14:55:31 | 2021-08-17T14:55:31 | 351,865,567 | 0 | 0 | null | false | 2021-08-17T14:55:32 | 2021-03-26T17:45:19 | 2021-08-02T10:33:03 | 2021-08-17T14:55:31 | 24,891 | 0 | 0 | 0 |
Python
| false | false |
from selenium.webdriver.common.by import By
from appium.webdriver.common.mobileby import MobileBy
class BasePageANDROIDLocators:
pass
class MainPageANDROIDLocators(BasePageANDROIDLocators):
ALLOW_BUTTON = (By.ID, 'com.android.packageinstaller:id/permission_allow_button')
KEYBOARD = (MobileBy.ID, 'ru.mail.search.electroscope:id/keyboard')
INPUT_TEXT = (MobileBy.ID, 'ru.mail.search.electroscope:id/input_text')
MENU = (MobileBy.ID, 'ru.mail.search.electroscope:id/assistant_menu_bottom')
class SettingsPageANDROIDLocators(BasePageANDROIDLocators):
MENU = (MobileBy.ID, 'ru.mail.search.electroscope:id/assistant_menu_bottom')
NEWS = (MobileBy.ID, 'ru.mail.search.electroscope:id/user_settings_field_news_sources')
VESTI = (MobileBy.XPATH, "//android.widget.TextView[contains(@text, 'Вести FM')]")
CHECK = (MobileBy.ID, 'ru.mail.search.electroscope:id/news_sources_item_selected')
BACK = (MobileBy.CLASS_NAME, 'android.widget.ImageButton')
ABOUT = (MobileBy.ID, 'ru.mail.search.electroscope:id/user_settings_about')
VERISON = (MobileBy.ID, 'ru.mail.search.electroscope:id/about_version')
COPY_RIGHT = (MobileBy.ID, 'ru.mail.search.electroscope:id/about_copyright')
class SearchPageANDROIDLocators(BasePageANDROIDLocators):
KEYBOARD = (MobileBy.ID, 'ru.mail.search.electroscope:id/keyboard')
INPUT_TEXT = (MobileBy.ID, 'ru.mail.search.electroscope:id/input_text')
SEARCH_BUTTON = (MobileBy.ID, 'ru.mail.search.electroscope:id/text_input_send')
CARD = (MobileBy.ID, 'ru.mail.search.electroscope:id/item_dialog_fact_card_content_text')
CARD_TITLE = (MobileBy.ID, 'ru.mail.search.electroscope:id/item_dialog_fact_card_title')
DIALOG_ITEM = (MobileBy.ID, 'ru.mail.search.electroscope:id/dialog_item')
NUMBERS = (MobileBy.XPATH, "//android.widget.TextView[contains(@text, 'население россии')]")
|
UTF-8
|
Python
| false | false | 1,898 |
py
| 53 |
locators_android.py
| 41 | 0.744409 | 0.744409 | 0 | 33 | 55.878788 | 96 |
nathanle89/geocoding-service
| 4,303,557,256,862 |
e21df12fba464c48ae88a966b1f6beb8007e7a6f
|
ae5ba5f9566ea8455a25f61794324a4cbfcd728b
|
/geocoding/app/views/helpers/custom_exceptions.py
|
772758a5a1010978b013de23f1596c51987f27ae
|
[] |
no_license
|
https://github.com/nathanle89/geocoding-service
|
7290c08076b797574296b68b8d5d8d86a46ebcd5
|
6ded8175734bd4ff05ea93a2327e3550f7f35d70
|
refs/heads/master
| 2021-08-22T04:11:34.526244 | 2018-11-04T01:29:05 | 2018-11-04T01:29:05 | 155,804,387 | 0 | 0 | null | false | 2021-06-10T20:56:49 | 2018-11-02T02:38:06 | 2018-11-04T01:29:27 | 2021-06-10T20:56:49 | 37 | 0 | 0 | 4 |
Python
| false | false |
from rest_framework.exceptions import APIException
class ServiceUnavailable(APIException):
status_code = 503
default_detail = 'Service temporarily unavailable, try again later.'
default_code = 'service_unavailable'
class ServerError(APIException):
status_code = 500
default_detail = 'Oops something went wrong'
default_code = 'server_error'
class ParseError(APIException):
status_code = 400
default_detail = 'Bad Request'
default_code = 'bad_request'
class ValidationError(APIException):
status_code = 422
default_detail = 'Validation failed'
default_code = 'validation_error'
class NotFound(APIException):
status_code = 404
default_detail = 'Not Found'
default_code = 'not_found'
|
UTF-8
|
Python
| false | false | 746 |
py
| 19 |
custom_exceptions.py
| 16 | 0.719839 | 0.699732 | 0 | 26 | 27.692308 | 72 |
neilmacintyre/Trends-in-Higher-Education
| 18,339,510,365,758 |
713ff8ee3352b6042f52ce8ede430c759a58dee9
|
faec5ac1ace7fd162faa10bc5b905c580de207f6
|
/Parser/ClemsonDownloader.py
|
fdb46f9813dcbd3aab6f59ff9008e6ca318aeb24
|
[] |
no_license
|
https://github.com/neilmacintyre/Trends-in-Higher-Education
|
f7b33ce7ea79db225a3a63a226dec52dd2f65ba9
|
565373359142c0e04de50997a063bfc074234ef2
|
refs/heads/master
| 2020-07-18T19:58:12.527341 | 2019-09-22T18:29:39 | 2019-09-22T18:29:39 | 206,303,984 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import requests
url_base = 'https://career.sites.clemson.edu/data_analytics/table_cmd.php?action=getCollege&year='
url_terminal = '&college=All&mode=false°_bac=true°_mas=true°_doc=true&csv=true'
start_year = 2009
end_year = 2016
for year in range(start_year, end_year):
# urls are of the format url_base + XX_YY + url_terminal where XX and YY are the acidemic years
XX = str(year)[2:4]
YY = str(year + 1)[2:4]
res = requests.get('%s%s_%s%s' % (url_base,XX,YY,url_terminal))
with open('../Data/CSV/Clemson/%d-%d.csv' % (year, year + 1), 'w') as csv_file:
csv_file.write(res.text)
|
UTF-8
|
Python
| false | false | 639 |
py
| 28 |
ClemsonDownloader.py
| 24 | 0.643192 | 0.621283 | 0 | 17 | 35.705882 | 99 |
cavieres/Documents
| 11,166,914,975,247 |
c1705c631fe354e0a7165d3b0e28bdf72296eb79
|
d8cfd1271434e943befe33a25c1f526fee953f99
|
/ADP/adp.latamautomation.chile.expert/expert/pages/login_page.py
|
bb3b19be329c85f306803ea2abcf0104e4c442a6
|
[] |
no_license
|
https://github.com/cavieres/Documents
|
6fa3249d5db5b140602b354b159b652563e454fc
|
4d8e8547af738c89d3b9945b34c7a05531b8c303
|
refs/heads/master
| 2021-01-20T08:10:18.445173 | 2019-03-20T02:06:05 | 2019-03-20T02:06:05 | 90,109,077 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from expert.catalogs import login_catalog
from scriptize.components.page_element import PageObjects
class LoginPage(PageObjects):
"""
La clase 'LoginPage' es responsable por la interacción con los componentes de la página de login_action.
"""
def click_desplegar_cuenta(self):
"""
click en el link para desplegar los datos de conexión
"""
self.button.click(login_catalog.INPUT_MUESTRA_CUENTA)
def select_cuenta(self, cuenta):
"""
Llena el campo cuenta en la página de login_action
:param cuenta: cuenta
"""
self.select.select_by_value(login_catalog.SELECT_CUENTA, cuenta)
def wait_fields_load(self):
"""
Esperar la carga de los elementos necesarios para el login
"""
self.wait_for_element_visible(login_catalog.INPUT_CUENTA)
self.wait_for_element_visible(login_catalog.INPUT_JUEGO_DATOS)
self.wait_for_element_visible(login_catalog.INPUT_DOMAIN)
self.wait_for_element_visible(login_catalog.INPUT_USUARIO)
self.wait_for_element_visible(login_catalog.INPUT_CLAVE)
self.wait_for_element_visible(login_catalog.BUTTON_INGRESAR)
def set_cuenta(self, cuenta):
"""
Llena el campo nombre en la página de login_action
:param cuenta: nombre de la cuenta
"""
self.input.fill(login_catalog.INPUT_CUENTA, cuenta)
def set_jdd(self, jdd):
"""
Llena el campo Juego de Datos en la página de login_action
:param jdd: nombre del juego de datos
"""
self.input.fill(login_catalog.INPUT_JUEGO_DATOS, jdd)
def set_dominio(self, dominio):
"""
Llena el campo dominio en la página de login_action
:param dominio: base de dominio
"""
self.input.fill(login_catalog.INPUT_DOMAIN, dominio)
def set_usuario(self, usuario):
"""
Llena el campo usuario en la página de login_action
:param usuario: usuario
"""
self.input.fill(login_catalog.INPUT_USUARIO, usuario)
def set_password(self, password):
"""
Llena el campo password en la página de login_action
:param password: password
"""
self.input.fill(login_catalog.INPUT_CLAVE, password)
def click_login(self):
"""
Click en login_action para validar las credenciales ingresadas
"""
self.button.click(login_catalog.BUTTON_INGRESAR)
|
UTF-8
|
Python
| false | false | 2,490 |
py
| 242 |
login_page.py
| 143 | 0.640468 | 0.640468 | 0 | 73 | 32.986301 | 108 |
OutlierLi/Python_Mooc_Code
| 12,678,743,474,948 |
1b0473d40a1da3659a20e0ffd591d3bf28dcc7f8
|
7bc4186d790ec4402063ee5cf0678290f4c3d6d3
|
/8、正则表达式与JSON/c10.py
|
bb74b261b247d3819da79f811886fb6262e41d0a
|
[] |
no_license
|
https://github.com/OutlierLi/Python_Mooc_Code
|
240be1547aa02987cf8b22f198b25af21d5a8f6b
|
4cff0cb70b3aef023ddcbfeee9d640cb6d6c43f8
|
refs/heads/master
| 2023-03-04T02:53:00.442464 | 2021-02-16T10:02:27 | 2021-02-16T10:02:27 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import re
s = 'life is short, i use python'
r = re.search('life(.*)pyth(\w)n', s)
print(r.group(0))
print(r.group(1))
print(r.group(0,1)) # 返回是一个元组
print(r.groups())
# r = re.findall('life(.*)pyth(.)n', s)
# print(r) # 列表中的一个元组
|
UTF-8
|
Python
| false | false | 263 |
py
| 68 |
c10.py
| 67 | 0.600858 | 0.583691 | 0 | 11 | 19.909091 | 39 |
kailashbuki/predator
| 8,933,532,015,428 |
5e0635278a2372edea77c8ff9bb01228b4cf6e4d
|
be3c008ee5ab918efa5cccee6a55e3c37e7ce370
|
/installed/webserver/views/contrib/validating.py
|
f7aa5e2ddac3cc3900b82935f6b12a63f2d19ea3
|
[
"MIT"
] |
permissive
|
https://github.com/kailashbuki/predator
|
611526f0ca3fd0826ac1af05bba8751fe53e5042
|
f9e702e821858ed1cb3f53dd47d01c0c6979e8bc
|
refs/heads/master
| 2022-02-15T01:33:06.703315 | 2022-02-01T18:48:27 | 2022-02-01T18:48:27 | 2,400,498 | 3 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
def is_valid_file(filename):
if filename.endswith('.pdf'):
return True
return False
|
UTF-8
|
Python
| false | false | 101 |
py
| 52 |
validating.py
| 40 | 0.633663 | 0.633663 | 0 | 4 | 24 | 33 |
Elnurhan/python-oop
| 5,944,234,787,211 |
3b124963e22a6a4d5f8b93c03424700b47682924
|
8ef5c5248b9c50443f8be944449f1b968ba3809e
|
/week4/chain_of_responsobility/ex1.py
|
4f365deaae1c98a65d481518c0e1776b898ae083
|
[] |
no_license
|
https://github.com/Elnurhan/python-oop
|
b8c8a0a08f3209a303425f76b73e358a135164a9
|
2b8c807c1f4d57b9065b1f80c734df87197dbab4
|
refs/heads/master
| 2022-06-13T20:57:26.165056 | 2020-05-09T17:47:50 | 2020-05-09T17:47:50 | 261,805,968 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
class Character:
def __init__(self):
self.name = "Arg"
self.xp = 0
self.passed_quests = set()
self.taken_quests = set()
def add_quest_speak(char):
quest_name = "Поговорить с фермером"
xp = 100
if quest_name not in (char.passed_quests | char.taken_quests):
print(f'Квест получен: "{quest_name}"')
char.taken_quests.add(quest_name)
elif quest_name in char.taken_quests:
print(f'Квест получен: "{quest_name}"')
char.passed_quests.add(quest_name)
char.taken_quests.remove(quest_name)
char.xp += xp
def add_quest_hunt(char):
quest_name = "Охота на крыс"
xp = 300
if quest_name not in (char.passed_quests | char.taken_quests):
print(f'Квест получен: "{quest_name}"')
char.taken_quests.add(quest_name)
elif quest_name in char.taken_quests:
print(f'Квест сдан: "{quest_name}"')
char.passed_quests.add(quest_name)
char.taken_quests.remove(quest_name)
char.xp += xp
def add_quest_carry(char):
quest_name = "Принести доски с сарая"
xp = 200
if quest_name not in (char.passed_quests | char.taken_quests):
print(f'Квест получен: "{quest_name}"')
char.taken_quests.add(quest_name)
elif quest_name in char.taken_quests:
print(f'Квест сдан: "{quest_name}"')
char.passed_quests.add(quest_name)
char.taken_quests.remove(quest_name)
char.xp += xp
class QuestGiver:
def __init__(self):
self.quests = []
def add_quest(self, quest):
self.quests.append(quest)
def handle_quests(self, character):
for quest in self.quests:
quest(character)
all_quests = [add_quest_carry, add_quest_hunt, add_quest_speak]
quest_giver = QuestGiver()
for quest in all_quests:
quest_giver.add_quest(quest)
player = Character()
quest_giver.handle_quests(player)
print("Получено: ", player.taken_quests)
print("Сдано: ", player.passed_quests)
player.taken_quests = {"Принести доски с сарая", "Поговорить с фермером"}
quest_giver.handle_quests(player)
|
UTF-8
|
Python
| false | false | 2,268 |
py
| 2 |
ex1.py
| 2 | 0.631779 | 0.627022 | 0 | 75 | 27.026667 | 73 |
RumenKotsew/PythonPlayground
| 2,774,548,921,250 |
68de7e9cd0b3fea74d852f0639aaefba5b7961cb
|
514cf12e403ca2dadb1dd9c6166576329ac98eed
|
/MoneyInTheBank/money_in_the_bank/sql_manager.py
|
cbb941e1d76d18988786c269bea41a189121d95c
|
[] |
no_license
|
https://github.com/RumenKotsew/PythonPlayground
|
7b2331a740048e3162209c6826880a0469a17ee1
|
81086d56b27d4c6b7efb3470502f173fd184f4de
|
refs/heads/master
| 2021-01-12T18:16:16.105391 | 2017-10-22T19:23:02 | 2017-10-22T19:23:02 | 71,358,417 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import sqlite3
from datetime import datetime
from client import Client
from decorators.login import strong_password, hash_password
from exceptions import PasswordNotStrongEnoughException, UserAndPasswordDontMatchException
conn = sqlite3.connect("bank.db")
cursor = conn.cursor()
def create_clients_table():
create_query = '''create table if not exists
clients(id INTEGER PRIMARY KEY AUTOINCREMENT,
username TEXT,
password TEXT,
balance REAL DEFAULT 0,
message TEXT,
lastfailedloginattempt TEXT DEFAULT NULL,
numberoftriesleft INTEGER)'''
cursor.execute(create_query)
def change_message(new_message, logged_user):
update_sql = "UPDATE clients SET message = '%s' WHERE id = '%s'" % (new_message, logged_user.get_id())
cursor.execute(update_sql)
conn.commit()
logged_user.set_message(new_message)
def change_pass(new_pass, logged_user):
update_sql = "UPDATE clients SET password = '%s' WHERE id = '%s'" % (new_pass, logged_user.get_id())
cursor.execute(update_sql)
conn.commit()
@strong_password()
@hash_password()
def register(username, password):
insert_sql = "insert into clients (username, password) values ('%s', '%s')" % (username, password)
cursor.execute(insert_sql)
conn.commit()
@hash_password()
def login(username, password):
interval = 5 * 60
select_query = "SELECT id, username, balance, message FROM clients WHERE username = '%s' AND password = '%s' LIMIT 1" % (username, password)
time_query = "SELECT lastfailedloginattempt, numberoftriesleft FROM clients WHERE username = '%s" % (username)
cursor.execute(select_query)
user = cursor.fetchone()
if user:
time_counter_reset_query = "UPDATE numberoftriesleft VALUES(0) FROM clients WHERE id = '%s'" % (user.id)
cursor.execute(time_counter_reset_query)
cursor.commit()
return Client(user[0], user[1], user[2], user[3])
cursor.execute(time_query)
user_time = cursor.fetchone()
if not user_time:
raise UserNotFoundException
if user_time.lastfailedloginattempt is None:
pass
else:
time = datetime.now()
if time - user_time.lastfailedloginattempt < interval:
user_time.numberoftriesleft += 1
if user_time.numberoftriesleft == 5:
raise BruteForceException
else:
increase_numbers_query = "UPDATE numberoftriesleft VALUES('%s') FROM clients WHERE id = '%s'" % (user_time.numberoftriesleft, user.id)
cursor.execute(increase_numbers_query)
cursor.commit()
|
UTF-8
|
Python
| false | false | 2,686 |
py
| 48 |
sql_manager.py
| 48 | 0.655622 | 0.65041 | 0 | 79 | 33 | 150 |
omar-Fouad/3D_scene_rec_CNNs
| 11,201,274,731,602 |
2aa22daae4414b82d62ed4f1ca7eb5590e02e268
|
de967d28501ecbebbbe19e1d1165462303f73d37
|
/utils.py
|
2e3327a673ef0be9ea98dd07f1de1ca339eb8ea3
|
[] |
no_license
|
https://github.com/omar-Fouad/3D_scene_rec_CNNs
|
1cbf051f548aba7a6622475238b26ab75001b80c
|
dfb1db57b6f9eaa98eba14c5f48f1ebf3c6a9d2d
|
refs/heads/master
| 2020-07-07T04:53:42.195257 | 2019-05-13T21:03:29 | 2019-05-13T21:03:29 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import math
import cv2
import numpy as np
from torchvision import transforms
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from colormap import rgb2hex
HEIGHT = 320
WIDTH = 320
CLASSES = {0: "invalid", 1: "flat", 2: "constructions", 3: "street furnitures",
4: "vegetation", 5: "sky", 6: "humans", 7: "vehicles"}
CLASS_COLORS = {0: [0, 0, 0], 1: [1, 1, 0], 2: [1, 0.5, 0], 3: [0, 0, 1],
4: [0, 1, 0], 5: [0, 1, 1], 6: [1, 0.4, 1], 7: [1, 0, 0]}
data_transform = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
])
def scale_and_crop_img(img):
img = img[..., ::-1]
# resizing
scale = max(WIDTH/img.shape[1], HEIGHT/img.shape[0])
img = cv2.resize(img, (math.ceil(img.shape[1]*scale), math.ceil(img.shape[0]*scale)))
# center crop to input size
y_crop = img.shape[0] - HEIGHT
x_crop = img.shape[1] - WIDTH
img = img[math.floor(y_crop/2):img.shape[0]-math.ceil(y_crop/2),
math.floor(x_crop/2):img.shape[1]-math.ceil(x_crop/2)]
return img
def transform_img(img):
img = data_transform(img)
img = img[None, :, :, :]
return img
def load_img(img_path):
img = cv2.imread(img_path)
img = scale_and_crop_img(img)
img = transform_img(img)
return img
def correct_img(img):
img = np.transpose(img, (1, 2, 0))
mean = np.array([0.485, 0.456, 0.406])
std = np.array([0.229, 0.224, 0.225])
img = (std * img + mean)
img = np.clip(img, 0, 1)
return img
def show_img_preds(img, depth_pred, seg_pred, uc_th=0.0, apply_depth_mask=False):
plt.figure(0, figsize=(8, 6))
# plot input img
plt.subplot(2, 3, 1)
plt.title("RGB")
img = correct_img(img)
plt.imshow(img)
plt.gca().axes.get_yaxis().set_ticks([])
plt.gca().axes.get_xaxis().set_ticks([])
# plot depth image
plt.subplot(2, 3, 2)
plt.title("depth estimation")
depth_pred = depth_pred[0, :, :]
plt.imshow(depth_pred)
plt.gca().axes.get_yaxis().set_ticks([])
plt.gca().axes.get_xaxis().set_ticks([])
# plot segmentation
plt.subplot(2, 3, 3)
plt.title("segmentation")
seg_labels = np.argmax(seg_pred, 0)+1
mask = np.zeros(shape=(seg_labels.shape[0], seg_labels.shape[1], 3))
for key in CLASSES:
class_mask = np.isin(seg_labels, np.asarray(key))
mask[:, :, 0] += class_mask*CLASS_COLORS[key][0]
mask[:, :, 1] += class_mask*CLASS_COLORS[key][1]
mask[:, :, 2] += class_mask*CLASS_COLORS[key][2]
mask = np.clip(mask, 0, 1)
plt.imshow(img)
plt.imshow(mask, alpha=0.3)
plt.gca().axes.get_yaxis().set_ticks([])
plt.gca().axes.get_xaxis().set_ticks([])
# plot masked depth image
plt.subplot(2, 3, 5)
plt.title("masked de")
if apply_depth_mask:
# mask high gradient regions ~ these are usually not as accurate
grad = np.asarray(np.gradient(depth_pred))
grad = np.abs(grad[0, :, :]) + np.abs(grad[1, :, :])
grad_mask = grad < 0.9
depth_mask = depth_pred < 50.0 # mask everything that is farther than 50m
depth_pred = depth_pred * depth_mask * grad_mask
plt.imshow(depth_pred)
plt.gca().axes.get_yaxis().set_ticks([])
plt.gca().axes.get_xaxis().set_ticks([])
# plot masked seg
plt.subplot(2, 3, 6)
plt.title("masked seg")
# mask out pixels where the certainty of the class prediction is lower than the uc_threshold
uc = np.max(seg_pred, 0)
uc_mask = uc > uc_th
seg_labels = np.argmax(seg_pred, 0)+1
seg_labels *= uc_mask
mask = np.zeros(shape=(seg_labels.shape[0], seg_labels.shape[1], 3))
for key in CLASSES:
class_mask = np.isin(seg_labels, np.asarray(key))
mask[:, :, 0] += class_mask*CLASS_COLORS[key][0]
mask[:, :, 1] += class_mask*CLASS_COLORS[key][1]
mask[:, :, 2] += class_mask*CLASS_COLORS[key][2]
mask = np.clip(mask, 0, 1)
plt.imshow(img)
plt.imshow(mask, alpha=0.3)
plt.gca().axes.get_yaxis().set_ticks([])
plt.gca().axes.get_xaxis().set_ticks([])
plt.draw()
def show_point_cloud(img, depth_pred, seg_pred, f_len, uc_th=0.0, apply_depth_mask=False):
img = correct_img(img)
depth_pred = np.transpose(depth_pred, (1, 2, 0))
depth_pred = depth_pred[:, :, 0]
if apply_depth_mask:
# mask high gradient regions ~ these are usually not as accurate
grad = np.asarray(np.gradient(depth_pred))
grad = np.abs(grad[0, :, :]) + np.abs(grad[1, :, :])
grad_mask = grad < 0.95
depth_mask = (depth_pred < 50.0)*(depth_pred > 5.0) # mask everything that is farther than 50m
depth_pred = depth_pred * depth_mask * grad_mask
# mask out pixels where the certainty of the class prediction is lower than the uc_threshold
uc = np.max(seg_pred, 0)
uc_mask = uc > uc_th
seg_pred = np.argmax(seg_pred, 0)+1
seg_pred *= uc_mask
mask = np.zeros(shape=(seg_pred.shape[0], seg_pred.shape[1], 3))
for key in CLASSES:
class_mask = np.isin(seg_pred, np.asarray(key))
mask[:, :, 0] += class_mask*CLASS_COLORS[key][0]
mask[:, :, 1] += class_mask*CLASS_COLORS[key][1]
mask[:, :, 2] += class_mask*CLASS_COLORS[key][2]
mask = np.clip(mask, 0, 1)
mask = (img*0.7)+(mask*0.3)
# generate 3D points
x = []
y = []
z = []
colors = []
idx = 0
for i in range(depth_pred.shape[0]):
for j in range(depth_pred.shape[1]):
idx += 1
# if the distance is too large or small, skip
if depth_pred[i, j] > 50.0 or depth_pred[i, j] < 5.0:
continue
# if the pixel is classified as sky or if its uncertain, skip
if seg_pred[i, j] == 5 or seg_pred[i, j] == 0:
continue
# only show every 2nd pixel
if idx % 2 == 1:
continue
z.append(depth_pred[i, j])
y.append(i*depth_pred[i, j]/f_len)
x.append((-160)+j*depth_pred[i, j]/f_len)
# color based on mask (0.7*pixel color + 0.3*label color)
r, g, b = int(mask[i, j][0]*255), int(mask[i, j][1]*255), int(mask[i, j][2]*255)
colors.append(rgb2hex(r, g, b))
fig = plt.figure(figsize=(8, 8))
ax = fig.add_subplot(111, projection="3d")
ax.scatter(x, y, z, c=colors, marker=",", s=5)
ax.set_xlabel("X Label")
ax.set_ylabel("Y Label")
ax.set_zlabel("Z Label")
ax.view_init(elev=-37., azim=-117.)
plt.draw()
plt.show()
|
UTF-8
|
Python
| false | false | 6,742 |
py
| 3 |
utils.py
| 2 | 0.560664 | 0.521507 | 0 | 198 | 33.050505 | 103 |
VirtusLab/git-machete
| 7,662,221,692,703 |
9559c0473be51c12e8b11b2469b8b07e917f7534
|
9071dc219693bde591ad12fb31c43c635f3a3f5e
|
/tests/test_add.py
|
3eb5950be3b4f2bf2102e6df40260e37051c6f15
|
[
"MIT"
] |
permissive
|
https://github.com/VirtusLab/git-machete
|
67f51e49d44601daee4cc40fa27de87ecc029af6
|
dca261b0f8c56edb65557d178321a21177872b05
|
refs/heads/master
| 2023-08-17T07:58:32.883018 | 2023-08-12T14:52:48 | 2023-08-12T14:52:48 | 122,743,101 | 711 | 45 |
MIT
| false | 2023-09-08T07:39:27 | 2018-02-24T13:32:07 | 2023-09-06T12:31:25 | 2023-09-08T07:39:26 | 4,404 | 721 | 39 | 74 |
Python
| false | false |
from pytest_mock import MockerFixture
from .base_test import BaseTest
from .mockers import (assert_failure, assert_success, mock_input_returning,
mock_input_returning_y, rewrite_branch_layout_file)
class TestAdd(BaseTest):
def test_add(self, mocker: MockerFixture) -> None:
(
self.repo_sandbox.new_branch("master")
.commit("master commit.")
.new_branch("develop")
.commit("develop commit.")
.new_branch("feature")
.commit("feature commit.")
.check_out("develop")
.commit("New commit on develop")
)
body: str = \
"""
master
develop
feature
"""
rewrite_branch_layout_file(body)
self.repo_sandbox.new_branch("bugfix/feature_fail")
# Test `git machete add` without providing the branch name
self.patch_symbol(mocker, "builtins.input", mock_input_returning("n"))
assert_success(
['add'],
'Add bugfix/feature_fail onto the inferred upstream (parent) branch develop? (y, N)\n'
)
assert_success(
['add', '-y'],
'Adding bugfix/feature_fail onto the inferred upstream (parent) branch develop\n'
'Added branch bugfix/feature_fail onto develop\n'
)
self.repo_sandbox.check_out('develop')
self.repo_sandbox.new_branch("bugfix/some_feature")
assert_success(
['add', '-y', 'bugfix/some_feature'],
'Adding bugfix/some_feature onto the inferred upstream (parent) branch develop\n'
'Added branch bugfix/some_feature onto develop\n'
)
self.repo_sandbox.check_out('develop')
self.repo_sandbox.new_branch("bugfix/another_feature")
assert_success(
['add', '-y', 'refs/heads/bugfix/another_feature'],
'Adding bugfix/another_feature onto the inferred upstream (parent) branch develop\n'
'Added branch bugfix/another_feature onto develop\n'
)
# test with --onto option
self.repo_sandbox.new_branch("chore/remove_indentation")
assert_success(
['add', '--onto=feature'],
'Added branch chore/remove_indentation onto feature\n'
)
def test_add_check_out_remote_branch(self, mocker: MockerFixture) -> None:
"""
Verify the behaviour of a 'git machete add' command in the special case when a remote branch is checked out locally.
"""
(
self.repo_sandbox.new_branch("master")
.commit("master commit.")
.new_branch("feature/foo")
.push()
.check_out("master")
.delete_branch("feature/foo")
)
self.patch_symbol(mocker, "builtins.input", mock_input_returning("n"))
assert_success(
['add', 'foo'],
'A local branch foo does not exist. Create out of the current HEAD? (y, N)\n'
)
assert_success(
['add', '-y', 'foo'],
'A local branch foo does not exist. Creating out of the current HEAD\n'
'Added branch foo as a new root\n'
)
self.patch_symbol(mocker, "builtins.input", mock_input_returning("n"))
assert_success(
['add', '--as-root', 'feature/foo'],
'A local branch feature/foo does not exist, but a remote branch origin/feature/foo exists.\n'
'Check out feature/foo locally? (y, N)\n'
)
assert_success(
['add', '-y', '--as-root', 'feature/foo'],
'A local branch feature/foo does not exist, but a remote branch origin/feature/foo exists.\n'
'Checking out feature/foo locally...\n'
'Added branch feature/foo as a new root\n'
)
def test_add_new_branch_onto_managed_current_branch(self, mocker: MockerFixture) -> None:
(
self.repo_sandbox.new_branch("master")
.commit()
)
rewrite_branch_layout_file("master")
self.patch_symbol(mocker, "builtins.input", mock_input_returning_y)
assert_success(
['add', 'foo'],
"A local branch foo does not exist. Create out of the current HEAD? (y, N)\n"
"Added branch foo onto master\n"
)
def test_add_new_branch_when_cannot_infer_parent(self, mocker: MockerFixture) -> None:
(
self.repo_sandbox.new_branch("master")
.commit()
.new_branch("develop")
.commit()
.check_out("master")
)
rewrite_branch_layout_file("develop")
self.patch_symbol(mocker, "builtins.input", mock_input_returning_y)
assert_failure(
['add', 'foo'],
"""
Could not automatically infer upstream (parent) branch for foo.
You can either:
1) specify the desired upstream branch with --onto or
2) pass --as-root to attach foo as a new root or
3) edit the branch layout file manually with git machete edit"""
)
def test_add_already_managed_branch(self) -> None:
(
self.repo_sandbox.new_branch("master")
.commit("master commit.")
.new_branch("develop")
.commit("develop commit.")
)
rewrite_branch_layout_file("master\n develop")
assert_failure(['add', 'develop'], 'Branch develop already exists in the tree of branch dependencies')
def test_add_onto_non_existent_branch(self) -> None:
(
self.repo_sandbox.new_branch("master")
.commit("master commit.")
.new_branch("develop")
.commit("develop commit.")
)
rewrite_branch_layout_file("master")
assert_failure(
['add', 'develop', '--onto', 'foo'],
"Branch foo not found in the tree of branch dependencies.\n"
"Use git machete add foo or git machete edit."
)
def test_add_as_root_with_onto(self) -> None:
assert_failure(
['add', '--onto', 'foo', '--as-root'],
"Option -R/--as-root cannot be specified together with -o/--onto."
)
|
UTF-8
|
Python
| false | false | 6,323 |
py
| 211 |
test_add.py
| 118 | 0.555907 | 0.555433 | 0 | 176 | 34.926136 | 124 |
xghjbvvg/economic-analysis
| 3,341,484,590,422 |
479979c3ee3ac19eba9d3cd6bbfe068a6ef2cac1
|
ac787253b4b44c91afefb5e630281d37ca5c5e8d
|
/main/dto/userDto.py
|
d19d05a215ea1a709dcbbc70e0781a3a5c71d09f
|
[] |
no_license
|
https://github.com/xghjbvvg/economic-analysis
|
14f3e580f433583ff57dc77def38e9561d8eb02b
|
197ad32148acbd9a7fe1d6a7dc39e02de821dc70
|
refs/heads/main
| 2023-01-13T13:15:23.648461 | 2020-11-28T13:06:27 | 2020-11-28T13:06:27 | 311,667,694 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from collections import OrderedDict
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Column, Integer, String
Base = declarative_base()
class UserDto(Base):
# def __init__(self, id, name, password):
# self.id = id
# self.name = name
# self.password = password
__tablename__ = 'user'
id = Column(Integer, primary_key=True)
name = Column(String(12))
password = Column(String(12))
def __repr__(self):
return OrderedDict(id=self.id,
name=self.name,
password=self.password)
|
UTF-8
|
Python
| false | false | 609 |
py
| 16 |
userDto.py
| 15 | 0.607553 | 0.600985 | 0 | 18 | 32.833333 | 55 |
js345/AutomaticQA
| 11,407,433,149,437 |
b13a20dccb30a7682d23641cbc1e598872d3017e
|
ed28b14733e6f4dd5b923da9beb9b6517fd8780b
|
/main.py
|
101711d9298dad692a7664182e0e4ce45999ffe5
|
[
"MIT"
] |
permissive
|
https://github.com/js345/AutomaticQA
|
dfcc3a75e691b75d206e5d63323cb8d86d2bdba5
|
4552a63e01c0d521f93159baacc0cff5f525d595
|
refs/heads/master
| 2020-06-21T13:51:27.766513 | 2016-12-14T04:16:20 | 2016-12-14T04:16:20 | 74,786,121 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''
AutomaticQA main
Created on 11/25/16
@author: xiaofo
'''
from src.AutomaticQA import AutomaticQA
from util.Dataloader import load
from util.PrintFunctions import show_relevant_questions
path = 'data/QueryResults.csv'
data = load(path)
automaticQA = AutomaticQA(data, 50)
#automaticQA.train()
#automaticQA.save_info()
#pairs = automaticQA.find_relevant_questions(0.05)
automaticQA.load_info()
pairs = automaticQA.find_relevant_questions(0.05)
print(automaticQA.fetch_answer("Why does this try/except/else function return None?"))
#show_relevant_questions(pairs, automaticQA)
|
UTF-8
|
Python
| false | false | 584 |
py
| 13 |
main.py
| 11 | 0.782534 | 0.758562 | 0 | 24 | 23.333333 | 86 |
doutriaux1/uvcdat
| 9,474,697,867,007 |
1b403f6a97d0e81d1ac6a5c5e2fea0c09483c403
|
638af6b8c580eeae23fc1034882c4b514195137a
|
/Packages/vcs/Lib/textcombined.py
|
4286b951afc246588c5f41f2c621bb9c97c6c9d5
|
[] |
no_license
|
https://github.com/doutriaux1/uvcdat
|
83684a86b514b8cac4d8900a503fc13d557fc4d2
|
37e9635f988696c346b4c3cdb49144d1e21dab5d
|
refs/heads/master
| 2021-01-17T07:57:22.897539 | 2015-02-02T22:52:12 | 2015-02-02T22:52:12 | 14,878,320 | 1 | 0 | null | true | 2015-02-19T20:54:25 | 2013-12-02T23:44:46 | 2015-02-02T22:52:25 | 2015-02-19T20:54:25 | 291,913 | 0 | 0 | 0 |
C
| null | null |
import vcs
# Adapted for numpy/ma/cdms2 by convertcdms.py
"""
# Text Combined (Tc) module
"""
###############################################################################
# #
# Module: textcombined (Tc) module #
# #
# Copyright: 2000, Regents of the University of California #
# This software may not be distributed to others without #
# permission of the author. #
# #
# Author: PCMDI Software Team #
# Lawrence Livermore NationalLaboratory: #
# support@pcmdi.llnl.gov #
# #
# Description: Python command wrapper for VCS's text combined secondary #
# object. Can be thought of as the combination of text table #
# and text orientation secondary objects. #
# #
# Version: 4.0 #
# #
###############################################################################
#
#
#
###############################################################################
# #
# Import: Text table (Tt), and Text orientation (To) #
# #
###############################################################################
import texttable
import textorientation
from types import *
import VCS_validation_functions
#############################################################################
# #
# Text Combined (Tc) Class. #
# #
#############################################################################
class Tc(object):
"""
Class: Tc # Text Combined
Description of Tc Class:
The (Tc) Text Combined class will combine a text table class and a text orientation
class together. From combining the two classess, the user will be able to set
attributes for both classes (i.e., define the font, spacing, expansion, color
index, height, angle, path, vertical alignment, and horizontal alignment).
This class is used to define and list a combined text table and text orientation
entry used in VCS.
Other Useful Functions:
a=vcs.init() # Constructor
a.show('texttable') # Show predefined text table objects
a.show('textorientation') # Show predefined text orientation objects
a.update() # Updates the VCS Canvas at user's request
a.mode=1, or 0 # If 1, then automatic update, else if
0, then use update function to
update the VCS Canvas.
Example of Use:
a=vcs.init()
To Create a new instance of text table use:
tc=a.createtextcombined('new_tt','std','new_to','7left') # Copies content of
# 'std' to 'new_tt' and '7left' to 'new_to'
To Modify an existing texttable use:
tc=a.gettextcombined('std','7left')
tc.list() # Will list all the textcombined attribute values
# (i.e., texttable and textorientation attributes
Specify the text font type:
tc.font=1 # The font value must be in the range 1 to 9
Specify the text spacing:
tc.spacing=2 # The spacing value must be in the range -50 to 50
Specify the text expansion:
tc.expansion=100 # The expansion value ranges from 50 to 150
Specify the text color:
tc.color=241 # The text color value ranges from 1 to 257
Specify the graphics text priority on the VCS Canvas:
tt.priority = 1
Specify the viewport and world coordinate:
tt.viewport=[0, 1.0, 0,1.0] # FloatType [0,1]x[0,1]
tt.worldcoordinate=[0,1.0,0,1.0] # FloatType [#,#]x[#,#]
Specify the location of the text:
tt.x=[[0,.1,.2], [.3,.4,.5]] # List of FloatTypes
tt.y=[[.5,.4,.3], [.2,.1,0]] # List of FloatTypes
Specify the text height:
tc.height=20 # The height value must be an integer
Specify the text angle:
tc.angle=0 # The angle value ranges from 0 to 360
Specify the text path:
tc.path='right' # Same as tc.path=0
tc.path='left' # Same as tc.path=1
tc.path='up' # Same as tc.path=2
tc.path='down' # Same as tc.path=3
Specify the text horizontal alignment:
tc.halign='right' # Same as tc.halign=0
tc.halign='center' # Same as tc.halign=1
tc.halign='right' # Same as tc.halign=2
Specify the text vertical alignment:
tc.valign='tcp' # Same as tcvalign=0
tc.valign='cap' # Same as tcvalign=1
tc.valign='half' # Same as tcvalign=2
tc.valign='base' # Same as tcvalign=3
tc.valign='bottom' # Same as tcvalign=4
"""
__slots__=[
's_name',
'name',
'Tt_name',
'To_name',
'To',
'Tt',
'color',
'fillincolor',
'priority',
'font',
'string',
'spacing',
'expansion',
'viewport',
'worldcoordinate',
'x',
'y',
'projection',
'height',
'angle',
'path',
'halign',
'valign',
]
def _getTtname(self):
return self.Tt.name
def _setTtname(self,value):
self.Tt.name = value
Tt_name = property(_getTtname,_setTtname)
def _getToname(self):
return self.To.name
def _setToname(self,value):
self.To.name = value
To_name = property(_getToname,_setToname)
def _getcolor(self):
return self.Tt.color
def _setcolor(self,value):
self.Tt.color=value
color = property(_getcolor,_setcolor)
def _getfcolor(self):
return self.Tt.fillincolor
def _setfcolor(self,value):
self.Tt.fillincolor=value
fillincolor = property(_getfcolor,_setfcolor)
def _getpriority(self):
return self.Tt.priority
def _setpriority(self,value):
self.Tt.priority=value
priority = property(_getpriority,_setpriority)
def _getfont(self):
return self.Tt.font
def _setfont(self,value):
self.Tt.font=value
font = property(_getfont,_setfont)
def _getstring(self):
return self.Tt.string
def _setstring(self,value):
self.Tt.string=value
string = property(_getstring,_setstring)
def _getspacing(self):
return self.Tt.spacing
def _setspacing(self,value):
self.Tt.spacing=value
spacing = property(_getspacing,_setspacing)
def _getexpansion(self):
return self.Tt.expansion
def _setexpansion(self,value):
self.Tt.expansion=value
expansion = property(_getexpansion,_setexpansion)
def _getx(self):
return self.Tt.x
def _setx(self,value):
self.Tt.x=value
x = property(_getx,_setx)
def _gety(self):
return self.Tt.y
def _sety(self,value):
self.Tt.y=value
y = property(_gety,_sety)
def _getviewport(self):
return self.Tt.viewport
def _setviewport(self,value):
self.Tt.viewport=value
viewport = property(_getviewport,_setviewport)
def _getworldcoordinate(self):
return self.Tt.worldcoordinate
def _setworldcoordinate(self,value):
self.Tt.worldcoordinate=value
worldcoordinate = property(_getworldcoordinate,_setworldcoordinate)
def _getprojection(self):
return self.Tt.projection
def _setprojection(self,value):
self.Tt.projection=value
projection = property(_getprojection,_setprojection)
def _getheight(self):
return self.To.height
def _setheight(self,value):
self.To.height=value
height = property(_getheight,_setheight)
def _getangle(self):
return self.To.angle
def _setangle(self,value):
self.To.angle=value
angle = property(_getangle,_setangle)
def _getpath(self):
return self.To.path
def _setpath(self,value):
self.To.path=value
path = property(_getpath,_setpath)
def _gethalign(self):
return self.To.halign
def _sethalign(self,value):
self.To.halign=value
halign = property(_gethalign,_sethalign)
def _getvalign(self):
return self.To.valign
def _setvalign(self,value):
self.To.valign=value
valign = property(_getvalign,_setvalign)
#############################################################################
# #
# Initialize the text combine attributes. #
# #
#############################################################################
def __init__(self, Tt_name=None, Tt_name_src='default', To_name=None, To_name_src='default'):
import vcs
if (Tt_name == None):
raise ValueError, 'Must provide a text table name.'
if (To_name == None):
To_name = Tt_name # Uses the same name than Tt
if Tt_name in vcs.elements["texttable"]:
raise Exception,"Error texttable object: '%s' already exists" % Tt_name
if To_name in vcs.elements["textorientation"]:
raise Exception,"Error textorientation object: '%s' already exists" % To_name
# #
###################################################################
# Inherits texttable and textorientation secondary sub-classes. #
###################################################################
# #
self.Tt = texttable.Tt(Tt_name, Tt_name_src)
self.To = textorientation.To(To_name, To_name_src)
self.name = "%s:::%s" % (Tt_name,To_name)
self.s_name = 'Tc'
vcs.elements["textcombined"][self.name] = self
# #
###########################################################
# Save the parent class. #
###########################################################
# #
## #############################################################################
## # #
## # Set text table and text orientation attributes. #
## # #
## #############################################################################
## def __setattr__(self, name, value):
## if ((self.Tt_name == '__removed_from_VCS__') or
## (self.To_name == '__removed_from_VCS__')):
## raise ValueError, 'This instance has been removed from VCS.'
## # Set the name to the appropriate class name (i.e., Tt or To)
## if ((name in ('Tt_name','font','spacing','expansion','color', 'string',
## 'priority', 'viewport','worldcoordinate','x','y','projection')) and
## (self.__dict__['Tt_name'] == 'default')):
## raise ValueError, 'The default attributes must not be changed.'
## elif ((name in ('To_name','height','angle','path','halign','valign')) and
## (self.__dict__['To_name'] == 'default')):
## raise ValueError, 'The default attributes must not be changed.'
## elif (name in ('Tt_name','font','spacing','expansion','color', 'string',
## 'priority', 'viewport','worldcoordinate','x','y','projection')):
## self.__dict__['name'] = self.__dict__['Tt_name']
## elif (name in ('To_name','height','angle','path','halign','valign')):
## self.__dict__['name'] = self.__dict__['To_name']
## else:
## raise ValueError, 'This attribute is not valid.'
## # Change Text Table attributes
## if (name == 'Tt_name'):
## if (type(value) == StringType):
## renameTt(self,self.name, value)
## self.__dict__['Tt_name']=value
## else:
## raise ValueError, 'The name attribute must be a string.'
## elif (name == 'font'):
## value = VCS_validation_functions.checkFont('',name,value)
## self.__dict__['font']=value
## setTtmember(self,'font',self.font) # update the plot
## elif (name == 'spacing'):
## if (value == None):
## self.__dict__['spacing']=None
## setTtmember(self,'spacing',self.spacing) # update the plot
## elif (isinstance(value, IntType)):
## if value not in range(-50,51): # must be an integer
## raise ValueError, 'The spacing value must be in the range -50 to 50.'
## else:
## self.__dict__['spacing']=value
## setTtmember(self,'spacing',self.spacing) # update the plot
## else:
## raise ValueError, 'The spacing attribute values must be an integer.'
## elif (name == 'expansion'):
## if (value == None):
## self.__dict__['expansion']=None
## setTtmember(self,'expansion',self.expansion) # update the plot
## elif (isinstance(value, IntType)):
## if value not in range(50,151): # must be an integer
## raise ValueError, 'The expansion value must be in the range 50 to 150.'
## else:
## self.__dict__['expansion']=value
## setTtmember(self,'expansion',self.expansion) # update the plot
## else:
## raise ValueError, 'The expansion attribute value must be an integer.'
## elif (name == 'color'):
## if (value == None):
## self.__dict__['color']=None
## setTtmember(self,'color',self.color) # update the plot
## elif (isinstance(value, IntType)):
## if value not in range(0,256): # must be an integer
## raise ValueError, 'The text table color value must be in the range 0 to 255.'
## else:
## self.__dict__['color']=value
## setTtmember(self,'color',self.color) # update the plot
## else:
## raise ValueError, 'The color attribute value must be an integer in the range 0 to 256.'
## elif (name == 'string'): # Set the string
## if (type(value) == StringType):
## l = []
## l.append( value )
## self.__dict__[name]=l
## setTtmember(self,name,l) # update the plot
## elif ( (type(value) in (ListType, TupleType)) and (value not in [ [], () ]) ):
## value=list(value)
## for x in value:
## if type(x) != StringType:
## raise ValueError, 'List must contain strings only.'
## break
## self.__dict__[name]=value
## setTtmember(self,name,value) # update the plot
## elif value is None:
## self.__dict__[name]=value
## setTtmember(self,name,value) # update the plot
## else:
## raise ValueError, 'Must be a string or a list of strings.'
## return
## elif (name == 'priority'):
## if (value == None):
## self.__dict__['priority']=None
## setTtmember(self,'priority',self.priority) # update the plot
## elif (isinstance(value, IntType)):
## self.__dict__['priority']=value
## setTtmember(self,'priority',self.priority) # update the plot
## else:
## raise ValueError, 'The priority attribute value must be an integer.'
## elif (name == 'viewport'):
## if (value == None):
## self.__dict__[name]= [0.0, 1.0, 0.0, 1.0]
## setTtmember(self,name,[0.0, 1.0, 0.0, 1.0]) # update the plot
## else:
## if (type(value) in (ListType, TupleType)):
## value = list(value) # make sure that values list is a list
## if len(value) != 4:
## self.__dict__[name]= [0.0, 1.0, 0.0, 1.0]
## raise ValueError, 'Viewport must contain 4 integer or float values.'
## else:
## self.__dict__[name]=value
## setTtmember(self,name,value) # update the plot
## else:
## raise ValueError, 'The viewport attribute must be a tuple or list of values.'
## elif (name == 'worldcoordinate'):
## if (value == None):
## self.__dict__[name]= [0.0, 1.0, 0.0, 1.0]
## setTtmember(self,name,[0.0, 1.0, 0.0, 1.0]) # update the plot
## else:
## if (type(value) in (ListType, TupleType)):
## value = list(value) # make sure that values list is a list
## if len(value) != 4:
## self.__dict__[name]= [0.0, 1.0, 0.0, 1.0]
## raise ValueError, 'World coordinates must contain 4 integer or float values.'
## else:
## self.__dict__[name]=value
## setTtmember(self,name,value) # update the plot
## else:
## raise ValueError, 'The world coordinates attribute must be a tuple or list of values.'
## elif (name == 'x'):
## if (value == None):
## self.__dict__[name] = None
## setTtmember(self,name,value) # update the plot
## else:
## if (type(value) in (ListType, TupleType)):
## value = list(value) # make sure that values list is a list
## self.__dict__[name]=value
## setTtmember(self,name,value) # update the plot
## else:
## raise ValueError, 'The x attribute must be a tuple or list of values.'
## elif (name == 'y'):
## if (value == None):
## self.__dict__[name] = None
## setTtmember(self,name,value) # update the plot
## else:
## if (type(value) in (ListType, TupleType)):
## value = list(value) # make sure that values list is a list
## self.__dict__[name]=value
## setTtmember(self,name,value) # update the plot
## else:
## raise ValueError, 'The y attribute must be a tuple or list of values.'
## elif (name == 'projection'):
## if value is None:
## self.__dict__['projection'] = 'default'
## else:
## value=VCS_validation_functions.checkProjection(self,'projection',value)
## self.__dict__[name]= value
## setTtmember(self,name,value) # update the plot
## # Change Text orientation attributes
## elif (name == 'To_name'):
## if (type(value) == StringType):
## renameTo(self,self.name, value)
## self.__dict__['To_name']=value
## else:
## raise ValueError, 'The name attribute must be a string.'
## elif (name == 'height'):
## if (value == None):
## self.__dict__['height']=None
## setTomember(self,'height',self.height) # update the plot
## elif (type(value) in [IntType, FloatType]):
## # if value not in range(1,1001): # must be an integer
## # raise ValueError, 'The height value must be in the range 1 to 1000.'
## # else:
## value = float( value )
## self.__dict__['height']=value
## setTomember(self,'height',self.height) # update the plot
## else:
## raise ValueError, 'The height attribute value must be an integer or float.'
## elif (name == 'angle'):
## if (value == None):
## self.__dict__['angle']=None
## setTomember(self,'angle',self.angle) # update the plot
## elif (isinstance(value, IntType)):
## if value not in range(-360,361): # must be an integer
## raise ValueError, 'The angle value must be in the range -360 to 360.'
## else:
## self.__dict__['angle']=value
## setTomember(self,'angle',self.angle) # update the plot
## else:
## raise ValueError, 'The angle attribute value must be an integer.'
## elif (name == 'path'):
## if (value in ('right', 'left', 'up', 'down', 0, 1, 2, 3)):
## if value in ('right', 0):
## value='right'
## elif value in ('left', 1):
## value='left'
## elif value in ('up', 2):
## value='up'
## elif value in ('down', 3):
## value='down'
## self.__dict__['path']=value
## setTomember(self,'path',self.path) # update the plot
## else:
## raise ValueError, 'The path attribute must be either ("right","left","up","down") or (0,1,2,3).'
## elif (name == 'halign'):
## if (value in ( 'left', 'center', 'right', 0, 1, 2)):
## if value in ('left', 0):
## value='left'
## elif value in ('center', 1):
## value='center'
## elif value in ('right', 2):
## value='right'
## self.__dict__['halign']=value
## setTomember(self,'halign',self.halign) # update the plot
## else:
## raise ValueError, 'The halign attribute must be either ("left","center","right") or (0,1,2).'
## elif (name == 'valign'):
## if (value in ('top', 'cap', 'half', 'base', 'bottom', 0, 1, 2, 3, 4)):
## if value in ('top', 0):
## value='top'
## elif value in ('cap', 1):
## value='cap'
## elif value in ('half', 2):
## value='half'
## elif value in ('base', 3):
## value='base'
## elif value in ('bottom', 4):
## value='bottom'
## self.__dict__['valign']=value
## setTomember(self,'valign',self.valign) # update the plot
## else:
## raise ValueError, 'The valign attribute must be either ("top","cap","half","base","bottom") or (0,1,2,3,4).'
#############################################################################
# #
# List out text combined members (attributes). #
# #
#############################################################################
def list(self):
if ((self.Tt_name == '__removed_from_VCS__') or
(self.To_name == '__removed_from_VCS__')):
raise ValueError, 'This instance has been removed from VCS.'
print "","----------Text combined (Tc) member (attribute) listings ----------"
print "secondary method =", self.s_name
print "","----------Text Table (Tt) member (attribute) listings ----------"
print "Tt_name =",self.Tt_name
print "font =", self.font
print "spacing =", self.spacing
print "expansion =", self.expansion
print "color =", self.color
print "fillincolor =", self.fillincolor
print "priority =", self.priority
print "string =", self.string
print "viewport =", self.viewport
print "worldcoordinate =", self.worldcoordinate
print "x =", self.x
print "y =", self.y
print "projection =", self.projection
print "","----------Text Orientation (To) member (attribute) listings ----------"
print "To_name =",self.To_name
print "height =", self.height
print "angle =", self.angle
print "path =", self.path
print "halign =", self.halign
print "valign =", self.valign
#############################################################################
# #
# Script out secondary text table and orientation methods in VCS to a file. #
# #
#############################################################################
def script(self, script_filename=None,mode=None):
'''
Function: script # Calls _vcs.scripTo
Description of Function:
Saves out a text table and text orientation graphics method in Python or
VCS script form to a designated file.
Example of Use:
script(scriptfile_name,mode)
where: scriptfile_name is the output name of the script file.
mode is either "w" for replace or "a" for append.
Note: If the the filename has a ".py" at the end, it will produce a
Python script. If the filename has a ".scr" at the end, it will
produce a VCS script. If neither extensions are give, then by
default a Python script will be produced.
a=vcs.init()
tc=a.createtextcombined('new_tt','std','new_to','7left')
tc.script('filename.py') # Append to a Python file "filename.py"
tc.script('filename.scr') # Append to a VCS file "filename.scr"
tc.script('filename','w') # Create or overwrite to a Python file "filename.py"
'''
if (script_filename == None):
raise ValueError, 'Error - Must provide an output script file name.'
if (mode == None):
mode = 'a'
elif (mode not in ('w', 'a')):
raise ValueError, 'Error - Mode can only be "w" for replace or "a" for append.'
# By default, save file in json
scr_type = script_filename.split(".")
if len(scr_type)==1 or len(scr_type[-1])>5:
scr_type= "json"
if script_filename!="initial.attributes":
script_filename+=".json"
else:
scr_type = scr_type[-1]
if scr_type == '.scr':
raise DeprecationWarning("scr script are no longer generated")
elif scr_type == "py":
mode = mode + '+'
py_type = script_filename[len(script_filename)-3:len(script_filename)]
if (py_type != '.py'):
script_filename = script_filename + '.py'
# Write to file
fp = open(script_filename,mode)
if (fp.tell() == 0): # Must be a new file, so include below
fp.write("#####################################\n")
fp.write("# #\n")
fp.write("# Import and Initialize VCS #\n")
fp.write("# #\n")
fp.write("#############################\n")
fp.write("import vcs\n")
fp.write("v=vcs.init()\n\n")
unique_name = '__Tt__' + self.Tt_name
fp.write("#----------Text Table (Tt) member (attribute) listings ----------\n")
fp.write("tt_list=v.listelements('texttable')\n")
fp.write("if ('%s' in tt_list):\n" % self.Tt_name)
fp.write(" %s = v.gettexttable('%s')\n" % (unique_name, self.Tt_name))
fp.write("else:\n")
fp.write(" %s = v.createtexttable('%s')\n" % (unique_name, self.Tt_name))
fp.write("%s.font = %g\n" % (unique_name, self.font))
fp.write("%s.spacing = %g\n" % (unique_name, self.spacing))
fp.write("%s.expansion = %g\n" % (unique_name, self.expansion))
fp.write("%s.color = %g\n\n" % (unique_name, self.color))
fp.write("%s.fillincolor = %g\n\n" % (unique_name, self.fillincolor))
fp.write("%s.priority = %d\n" % (unique_name, self.priority))
fp.write("%s.viewport = %s\n" % (unique_name, self.viewport))
fp.write("%s.worldcoordinate = %s\n" % (unique_name, self.worldcoordinate))
fp.write("%s.x = %s\n" % (unique_name, self.x))
fp.write("%s.y = %s\n\n" % (unique_name, self.y))
fp.write("%s.projection = %s\n\n" % (unique_name, self.projection))
unique_name = '__To__' + self.To_name
fp.write("#----------Text Orientation (To) member (attribute) listings ----------\n")
fp.write("to_list=v.listelements('textorientation')\n")
fp.write("if ('%s' in to_list):\n" % self.To_name)
fp.write(" %s = v.gettextorientation('%s')\n" % (unique_name, self.To_name))
fp.write("else:\n")
fp.write(" %s = v.createtextorientation('%s')\n" % (unique_name, self.To_name))
fp.write("%s.height = %g\n" % (unique_name, self.height))
fp.write("%s.angle = %g\n" % (unique_name, self.angle))
fp.write("%s.path = '%s'\n" % (unique_name, self.path))
fp.write("%s.halign = '%s'\n" % (unique_name, self.halign))
fp.write("%s.valign = '%s'\n\n" % (unique_name, self.valign))
fp.close()
else:
#Json type
mode+="+"
f = open(script_filename,mode)
vcs.utils.dumpToJson(self.To,f)
f.close()
f = open(script_filename,'a+')
vcs.utils.dumpToJson(self.Tt,f)
f.close()
#################################################################################
# END OF FILE #
#################################################################################
|
UTF-8
|
Python
| false | false | 31,346 |
py
| 1,284 |
textcombined.py
| 685 | 0.458272 | 0.450871 | 0 | 673 | 45.576523 | 125 |
nunezpaul/MNIST
| 2,216,203,176,232 |
17dcbcf5ac106ba77a5176a8ed1d65ecd5985489
|
ea9b8da85febe980d9155f8799d5dd4abd15bad8
|
/MoS_model.py
|
7a5e8ca9bcd3bef99d80765b239feb2d1e28e49d
|
[
"MIT"
] |
permissive
|
https://github.com/nunezpaul/MNIST
|
e18406b9a3df822975738b5b96435b467dd2b373
|
6d6accb80a2c1614fee93515c38f88c849937c01
|
refs/heads/master
| 2020-03-29T01:28:11.128460 | 2019-01-27T00:38:56 | 2019-01-27T00:38:56 | 149,391,035 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import tensorflow as tf
from basic_model import BasicModel, TrainLoss, TrainRun
from cnn_model import CNNModel
from config import Config
class MOSModel(CNNModel):
def __init__(self):
super(MOSModel, self).__init__()
self.num_components = 3
self.name = 'MoS'
self.dense_mixture_weight = tf.layers.Dense(self.num_components, name='mixture_weight')
self.dense_projected = tf.layers.Dense(self.flat_size * self.num_components, activation=tf.nn.tanh, name='proj')
def _forward(self, img):
# Project flattened img into different spaces
flat = tf.layers.flatten(img)
proj_flat = self.dense_projected(flat)
n_flat = tf.split(proj_flat, self.num_components, axis=-1)
proj_imgs = [tf.reshape(element, (-1, self.img_size[0], self.img_size[1])) for element in n_flat]
# Determine the logits from using the basic model
n_img_embeds = tf.stack([super(MOSModel, self)._forward(proj_img) for proj_img in proj_imgs], axis=-1)
# Calculate component weighting for each sample (also known as gating function)
comp_logits = self.dense_mixture_weight(flat)
normalized_comp_logits = comp_logits - tf.expand_dims(tf.reduce_max(comp_logits, axis=-1), axis=-1)
comp_weight = tf.nn.softmax(normalized_comp_logits)
# Combine img_embeds since they have gone through non-linear units
img_embed = tf.einsum('bcn,bn->bc', n_img_embeds, comp_weight)
# Check that the shapes are as we would expect
assert img.shape[1:] == self.img_size
assert flat.shape[1:] == self.img_size[0] * self.img_size[1]
assert proj_flat.shape[1:] == self.img_size[0] * self.img_size[1] * self.num_components
assert n_flat[0].shape[1:] == n_flat[1].shape[1:] == (self.img_size[0] * self.img_size[1])
assert comp_weight.shape[1:] == self.num_components
assert n_img_embeds.shape[1:] == (self.num_classes, self.num_components)
assert img_embed.shape[1:] == self.num_classes
self.comp_weight = comp_weight
return img_embed
class TrainLoss(TrainLoss):
def __init__(self, model, train_data, test_data=None):
super(TrainLoss, self).__init__(self, model, train_data, test_data=None)
def eval(self, img, label):
metrics = {}
# Loss will be on the negative log likelihood that the img embed belongs to the correct class
logits = self.model(img)
# Determine the output of the component weights
avg_comp_weight = tf.reduce_mean(self.model.comp_weight, axis=0)
for i in range(self.model.num_components):
metrics['Comp_weight_{num}'.format(num=i)] = avg_comp_weight[i]
# Determine the log loss and probability of positive sample
metrics['Log_loss'] = tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(logits=logits, labels=lbl))
metrics['Neg_prob'] = 1 - tf.exp(-metrics['Log_loss'])
# Get the accuracy of prediction from logits compared to the label
prediction = tf.argmax(logits, -1)
metrics['Inaccuracy'] = tf.reduce_mean(tf.to_float(tf.not_equal(prediction, lbl)))
# Check that shapes are as expected
assert logits.shape[1:] == self.model.num_classes
assert prediction.shape[1:] == label.shape[1:]
assert metrics['Log_loss'].shape == ()
assert metrics['Neg_prob'].shape == ()
assert metrics['Inaccuracy'].shape == ()
return metrics, prediction, label
class TrainRun(TrainRun):
def __init__(self, model, sess, load_dir, lr=0.001):
super(TrainRun, self).__init__(model, sess, load_dir, lr)
if __name__ == '__main__':
config = Config()
with tf.Session() as sess:
model = MOSModel()
tr = TrainRun(model=model, sess=sess, load_dir=config.params['load_dir'], lr=config.params['lr'])
tr.train(sess, save_dir=config.params['save_dir'])
|
UTF-8
|
Python
| false | false | 3,951 |
py
| 8 |
MoS_model.py
| 7 | 0.641863 | 0.633257 | 0 | 90 | 42.9 | 120 |
git-disl/LRBench
| 10,780,367,948,258 |
1d5075563ec8fb1a572f40f71a08c4071a16319d
|
6473994584abcd49c775f30ec0311b2338002e8e
|
/LRBench/database/DBDriver.py
|
53599255eddad0538109a63fd5c07cbdf7460b98
|
[
"Apache-2.0"
] |
permissive
|
https://github.com/git-disl/LRBench
|
e045e50bb2a81caec18be597ca85f129b82a6a2f
|
c1e541b48bf1b48a34f401dbbe7db96c2c6de7a4
|
refs/heads/master
| 2023-05-27T19:24:28.099679 | 2023-05-19T18:18:47 | 2023-05-19T18:18:47 | 201,529,964 | 16 | 2 | null | false | 2023-05-19T18:12:24 | 2019-08-09T19:37:58 | 2023-03-23T04:29:13 | 2023-05-19T18:12:23 | 11,532 | 18 | 1 | 0 |
Python
| false | false |
# DeepLR Database Module
class DBDriver(object):
def __init__(self, _dbName, _dbUser, _dbAddr, _dbPasswd):
self.__dbName__ = _dbName
self.__dbUser__ = _dbUser
self.__dbAddr__ = _dbAddr
self.__dbPasswd__ = _dbPasswd
self.__dbStatus__ = 'init'
self.__dbCursor__ = None
def connect(self):
raise NotImplementedError
def getCursor(self):
raise NotImplementedError
def execute(self, _sqlStateMent):
raise NotImplementedError
# LRBench APIs
def insertLR(self, dataset, network, lrPolicy):
raise NotImplementedError
def queryLRs(self, dataset, network, lrPolicy):
raise NotImplementedError
|
UTF-8
|
Python
| false | false | 720 |
py
| 48 |
DBDriver.py
| 31 | 0.606944 | 0.606944 | 0 | 26 | 26.692308 | 61 |
MinhNghiaD/Smart_building
| 2,216,203,134,111 |
29149742dc54fe0d89347d50690fe97693ebe845
|
110dc86ca5869f359593e6aade349e63fcd77f52
|
/ThingAdaptor/mqtt_adapter.py
|
4cac0d5292ace584cf06aa66682c143a1f705616
|
[] |
no_license
|
https://github.com/MinhNghiaD/Smart_building
|
7d05f0dee8db0e54f313de4386133cd12fa056d3
|
83604e593215c853d8d7e12721f9460220ee6bc4
|
refs/heads/master
| 2020-04-11T18:52:43.978724 | 2019-06-17T19:36:51 | 2019-06-17T19:36:51 | 162,014,804 | 1 | 0 | null | false | 2019-01-10T07:33:18 | 2018-12-16T15:30:33 | 2018-12-16T15:43:14 | 2019-01-10T07:33:18 | 12 | 0 | 0 | 0 |
C++
| false | null |
import paho.mqtt.client as mqtt
import threading
import time
def on_connect(client, userdata, returnCode):
if returnCode == 0:
self.client.connected_flag = True
print("connected")
else:
print("bad connection, return code: {}".fromat(returnCode))
def on_disconnect(client, userdata, returnCode=0):
logging.debug("Disconnected result code : " + str(returnCode))
client.loop_stop()
class Reactor:
'''contains list of controller (object that used control channels)
when a message arrive, it distributes the message and to topic to all controller
a controller can be activate or desactivate'''
def __init__(self):
self.activeControllers = []
self.inactiveControllers = []
def addController(self, controller):
self.activeControllers.append(controller)
def desactivateController(self, controller):
self.inactiveControllers.append(controller)
self.activeControllers.remove(controller)
def activateController(self, controller):
self.activeControllers.append(controller)
self.inactiveControllers.remove(controller)
def removeController(self, controller):
self.activeControllers.remove(controller)
self.inactiveController.remove(controller)
def getController(self, id):
for controller in self.activeControllers:
if controller.id == id :
return controller
for controller in self.inactiveControllers:
if controller.id == id:
return controller
def reactToCommand(self, client, userdata, message):
for controller in self.activeControllers:
controller.execute(message)
class Controller:
'''define syntax control in mainFunc passed as an argument'''
def __init__(self, id, topic, mainFunc):
self.id = id
self.topic = topic
self.mainFunc = mainFunc
def execute(self, message):
if (message.topic = self.topic):
self.mainFunc(message.payload.decode("utf-8")) #if match the topic -> pass the command message to mainFunc to execute
class Sender:
'''Regroup message and send it to the broker'''
def __init__(self):
self.channels = []
def addChannel(self, name, topic, channel, interval=10):
timer = threading.Timer()
class SendMessage:
def __init__(self, topic, channel):
self.channel =
mqtt.Client.connected_flag = False
class MQTTAdaptor:
def __init__(self, clientID, clean_session=True, userdata=None, brokerIP="127.0.0.1"):
self.client = mqtt.Client(clientID, clean_session, userdata)
self.reactor = Reactor()
self.client.on_connect = on_connect #on_connect callback
self.client.on_disconnect = on_disconnect
self.client.on_message = self.reactor.reactToCommand
self.client.loop_start()
print("connecting to MQTT broker " + brokerIP)
self.client.connect(brokerIP)
while not client.connected_flag:
time.sleep(1)
def __del__(self):
self.client.loop_stop()
self.client.disconnect()
def addController(self, name, topic, mainFunc):
try:
self.client.subscribe(topic)
self.reactor.addController(Controller(name, topic, mainFunc))
except:
print("error occurs when adding new controller")
def removeController(self, name):
controller = self.reactor.getController(name)
self.reactor.removeController(controller)
|
UTF-8
|
Python
| false | false | 3,528 |
py
| 18 |
mqtt_adapter.py
| 5 | 0.663265 | 0.659864 | 0 | 96 | 35.75 | 129 |
sn94/learn-python
| 1,322,849,940,086 |
f2ced38eda7054f44a56ad874342fe3b7c473059
|
fa7ad04e29325c69a49a8cf068f144f6eacb1e91
|
/VC/image-proccesing/image-treatment/canal_rojo.py
|
4fbc59ee9cb566a7c0d467f879216c98c13e64e9
|
[] |
no_license
|
https://github.com/sn94/learn-python
|
d9ac60ffa1d9e95b81285efb5415bc6b53ff3f40
|
aadcdb82208497e615ed3d3cbf87d727c2fd3281
|
refs/heads/master
| 2020-03-23T02:48:07.149647 | 2018-12-09T01:15:51 | 2018-12-09T01:15:51 | 140,992,842 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from scipy import misc
import numpy as np
face= misc.imread( '../../imagenes/Fruit/pera/260px-Pera.jpg' )
#np.set_printoptions( threshold= np.NaN)
print( face[:,:,2] )
|
UTF-8
|
Python
| false | false | 172 |
py
| 70 |
canal_rojo.py
| 50 | 0.686047 | 0.662791 | 0 | 7 | 23.714286 | 65 |
leblowl/terraform-example
| 13,743,895,364,125 |
da984957be383e1c927ab6f129c87b52d105d1f5
|
74738a6d6f4c371e6c6b068ba8328d8c3d455d82
|
/update-circleci-env.py
|
5de8f58ca7546cc7b7419454f347fc031ef47b09
|
[
"LicenseRef-scancode-warranty-disclaimer"
] |
no_license
|
https://github.com/leblowl/terraform-example
|
4f754d2fcdd06db82e80deeae37c13bf30600ee5
|
fa166659078284f9b8bbd4bc0a2c9bd43431b5ba
|
refs/heads/master
| 2021-05-01T03:23:41.170471 | 2016-10-27T12:33:54 | 2016-10-27T12:33:54 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
import requests
import argparse
import json
import yaml
address = "https://circleci.com/api/v1.1/project/github/xxx"
def set_env_var_for_project(token, project, name, value):
api_request = "{}/{}/envvar?circle-token={}".format(address, project, token)
var = {'name': name, 'value': value}
print("-> {}\n{}".format(api_request, json.dumps(var)))
r = requests.post(api_request, data=json.dumps(var), headers={'content-type': 'application/json'})
r.raise_for_status()
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('circleci_token', help='CircleCI API token.')
args = parser.parse_args()
with open('circleci.env.yml', "r") as f:
vars = yaml.load(f)
with open('circleci.projects.yml', "r") as f:
projects = yaml.load(f).get('projects')
for project in projects:
for key in vars:
set_env_var_for_project(args.circleci_token, project, key, vars.get(key))
|
UTF-8
|
Python
| false | false | 995 |
py
| 104 |
update-circleci-env.py
| 30 | 0.641206 | 0.639196 | 0 | 33 | 29.151515 | 102 |
pomodorox/forch
| 7,095,285,990,130 |
24ccf30c3670335117c089b655083c503bb41cf4
|
a1b97d8b702d466fd0e1fbf8b66243de24f73b4a
|
/forch/device_report_server.py
|
8ded668364865cb3e824218b1de82525066d8238
|
[
"Apache-2.0"
] |
permissive
|
https://github.com/pomodorox/forch
|
a1ae7bcd4e007bee696ac20062b88fb0261a9663
|
13816a4b29b34cb6e6994c22f2f3b35b7ba7e482
|
refs/heads/master
| 2023-07-20T02:18:07.187923 | 2021-05-11T05:09:53 | 2021-05-11T05:09:53 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""gRPC server to receive devices state"""
from concurrent import futures
from queue import Queue
import threading
import grpc
from forch.base_classes import DeviceStateReporter
from forch.utils import get_logger
import forch.proto.grpc.device_report_pb2_grpc as device_report_pb2_grpc
from forch.proto.shared_constants_pb2 import Empty, PortBehavior
from forch.proto.devices_state_pb2 import DevicePortEvent
DEFAULT_ADDRESS = '0.0.0.0'
DEFAULT_PORT = 50051
DEFAULT_MAX_WORKERS = 10
class DeviceEntry:
"""Utility class for device entries"""
mac = None
vlan = None
assigned = None
port_up = None
class DeviceReportServicer(device_report_pb2_grpc.DeviceReportServicer):
"""gRPC servicer to receive devices state"""
def __init__(self, on_receiving_result):
super().__init__()
self._on_receiving_result = on_receiving_result
self._logger = get_logger('drserver')
self._port_device_mapping = {}
self._port_events_listeners = {}
self._mac_assignments = {}
self._lock = threading.Lock()
def _get_port_event(self, device):
port_state = PortBehavior.PortState.up if device.port_up else PortBehavior.PortState.down
return DevicePortEvent(state=port_state, device_vlan=device.vlan,
assigned_vlan=device.assigned)
def _get_device(self, mac_addr):
for device in self._port_device_mapping.values():
if device.mac == mac_addr:
return device
return None
def _send_device_port_event(self, device):
if not device or device.mac not in self._port_events_listeners:
return
port_event = self._get_port_event(device)
self._logger.info('Sending %d DevicePortEvent %s %s %s %s',
len(self._port_events_listeners[device.mac]), device.mac,
# pylint: disable=no-member
port_event.state, device.vlan, device.assigned)
for queue in self._port_events_listeners[device.mac]:
queue.put(port_event)
def process_port_state(self, dp_name, port, state):
"""Process faucet port state events"""
with self._lock:
device = self._port_device_mapping.setdefault((dp_name, port), DeviceEntry())
device.port_up = state
if not state:
device.assigned = None
device.vlan = None
self._send_device_port_event(device)
def process_port_learn(self, dp_name, port, mac, vlan):
"""Process faucet port learn events"""
with self._lock:
device = self._port_device_mapping.setdefault((dp_name, port), DeviceEntry())
device.mac = mac
device.vlan = vlan
device.port_up = True
device.assigned = self._mac_assignments.get(mac)
self._send_device_port_event(device)
def process_port_assign(self, mac, assigned):
"""Process assigning a device to a vlan"""
self._mac_assignments[mac] = assigned
with self._lock:
for mapping in self._port_device_mapping:
device = self._port_device_mapping.get(mapping)
if device.mac == mac:
device.assigned = assigned
if not assigned:
device.vlan = None
device.port_up = False
self._send_device_port_event(device)
return
# pylint: disable=invalid-name
def ReportDevicesState(self, request, context):
"""RPC call for client to send devices state"""
if not request:
self._logger.warning('Received empty request in gRPC ReportDevicesState')
return Empty()
self._logger.info(
'Received DevicesState of %d devices', len(request.device_mac_behaviors))
# Closes DevicePortEvent streams in GetPortState
for mac in request.device_mac_behaviors.keys():
for queue in self._port_events_listeners.get(mac, []):
queue.put(False)
self._on_receiving_result(request)
return Empty()
# pylint: disable=invalid-name
def GetPortState(self, request, context):
listener_q = Queue()
self._logger.info('Attaching response channel for device %s', request.mac)
self._port_events_listeners.setdefault(request.mac, []).append(listener_q)
device = self._get_device(request.mac)
if device:
yield self._get_port_event(device)
while True:
item = listener_q.get()
if item is False:
break
yield item
self._port_events_listeners[request.mac].remove(listener_q)
class DeviceReportServer(DeviceStateReporter):
"""Devices state server"""
def __init__(self, on_receiving_result, address=None, port=None, max_workers=None):
self._server = grpc.server(
futures.ThreadPoolExecutor(max_workers=max_workers or DEFAULT_MAX_WORKERS))
self._servicer = DeviceReportServicer(on_receiving_result)
device_report_pb2_grpc.add_DeviceReportServicer_to_server(self._servicer, self._server)
server_address_port = f'{address or DEFAULT_ADDRESS}:{port or DEFAULT_PORT}'
self._server.add_insecure_port(server_address_port)
def disconnect(self, mac):
"""Process a port disconnect"""
self._servicer.process_port_assign(mac, None)
def process_port_state(self, dp_name, port, state):
"""Process faucet port state events"""
self._servicer.process_port_state(dp_name, port, state)
def process_port_learn(self, dp_name, port, mac, vlan):
"""Process faucet port learn events"""
self._servicer.process_port_learn(dp_name, port, mac, vlan)
def process_port_assign(self, mac, vlan):
"""Process faucet port vlan assignment"""
self._servicer.process_port_assign(mac, vlan)
def start(self):
"""Start the server"""
self._server.start()
def stop(self):
"""Stop the server"""
self._server.stop(grace=None)
|
UTF-8
|
Python
| false | false | 6,134 |
py
| 12 |
device_report_server.py
| 8 | 0.621128 | 0.618357 | 0 | 165 | 36.175758 | 97 |
wangkai111/supermarket
| 13,091,060,353,944 |
18520aef84fd623661f96d6f9e00ccf2ed307aa9
|
dbbbc7adc2973acdd0825b7958dc3c3403435807
|
/market/apps/order/migrations/0001_initial.py
|
9f6fcb03404cf5d82484f615aa14256cfc7784c7
|
[] |
no_license
|
https://github.com/wangkai111/supermarket
|
56f8ab68d785a791b6854dc031aae054e201151d
|
e795a2f5012be1fb72fa98aef8577e1f321140af
|
refs/heads/master
| 2020-04-07T04:34:45.333666 | 2018-12-17T07:54:02 | 2018-12-17T07:54:02 | 158,058,367 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-11-30 19:03
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Transport',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_time', models.DateTimeField(auto_now_add=True, verbose_name='注册时间')),
('update_time', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('isDelete', models.BooleanField(default=False, verbose_name='是否删除')),
('tran_name', models.CharField(max_length=50, verbose_name='运输名字')),
('tran_price', models.DecimalField(decimal_places=2, max_digits=9, verbose_name='运费')),
],
options={
'verbose_name': '运输方式',
'verbose_name_plural': '运输方式',
'db_table': 'transport',
},
),
]
|
UTF-8
|
Python
| false | false | 1,171 |
py
| 39 |
0001_initial.py
| 28 | 0.559428 | 0.541555 | 0 | 32 | 33.96875 | 114 |
eronekogin/leetcode
| 7,146,825,604,182 |
0b13f52ce0f50c4612220180ffd66324276696a9
|
1b8a99a4ff80da51dc81dd8354bf9bf1cbd25a8b
|
/2021/insufficient_nodes_in_root_to_leaf_paths.py
|
86fa1ce095421fb2f3e2133ab5e106814fdc6ca2
|
[] |
no_license
|
https://github.com/eronekogin/leetcode
|
ea639eebe0cd70af9eb4cba59bc68f636d7b3e0c
|
edb870f83f0c4568cce0cacec04ee70cf6b545bf
|
refs/heads/master
| 2023-08-16T10:35:57.164176 | 2023-08-14T11:25:33 | 2023-08-14T11:25:33 | 163,679,450 | 0 | 0 | null | false | 2021-09-09T12:04:44 | 2018-12-31T15:33:06 | 2021-09-09T12:04:01 | 2021-09-09T12:03:58 | 3,671 | 0 | 0 | 0 |
Python
| false | false |
"""
https://leetcode.com/problems/insufficient-nodes-in-root-to-leaf-paths/
"""
from test_helper import TreeNode
class Solution:
def sufficientSubset(self, root: TreeNode, limit: int) -> TreeNode:
if not root.left and not root.right: # A leaf node.
if root.val < limit:
return None
return root
if root.left: # Calculate new left.
root.left = self.sufficientSubset(root.left, limit - root.val)
if root.right: # Calculate new right.
root.right = self.sufficientSubset(root.right, limit - root.val)
if root.left or root.right: # If having any new left or new right.
return root
return None # If the current node is removed.
|
UTF-8
|
Python
| false | false | 755 |
py
| 1,456 |
insufficient_nodes_in_root_to_leaf_paths.py
| 1,454 | 0.618543 | 0.618543 | 0 | 26 | 28.038462 | 76 |
TravisN959/Rally
| 16,441,134,823,297 |
a7f13053247e2c13117a90b3d7caddfd1043b74d
|
2ebea0cff7136f8d811b621c4df9dafaead097ee
|
/accounts.py
|
a4d9063a4bde36a82cd058b8a2f7179291c195d7
|
[] |
no_license
|
https://github.com/TravisN959/Rally
|
290255efd26d31f4cd846ffc6fd2d09f8206a227
|
68d6ba9c5a048995c709982932af3818b0e5c7db
|
refs/heads/main
| 2023-03-04T06:06:56.258504 | 2021-02-20T00:40:14 | 2021-02-20T00:40:14 | 339,605,626 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import pymongo
import mongoKEYS
client = pymongo.MongoClient(mongoKEYS.getKEY())
# from boto.s3.connection import S3Connection
# client = S3Connection(os.environ['MONGO_KEY'])
database = client["Rally"]
collection = database["AccountLogin"]
def checkDuplicateUsername(username):
query = {
"username" : username
}
found = collection.count_documents(query, limit = 1)
if found != 0:
return True
else:
return False
def addAccount(username, password):
info = {
'username' : username,
'password' : password
}
collection.insert_one(info)
return True
def validateLogin(username, password):
query = {
"username": username,
"password": password
}
found = collection.count_documents(query, limit = 1)
if found != 0:
return True
else:
return False
|
UTF-8
|
Python
| false | false | 870 |
py
| 20 |
accounts.py
| 7 | 0.63908 | 0.631034 | 0 | 38 | 21.894737 | 56 |
alvinwan/Puhjiii
| 14,980,845,943,721 |
24b7ee30907f2ca55ea786087eac044a52cfa365
|
028ef598a1da5e865691db12a92a7e894d6a20ed
|
/server/__init__.py
|
5c1c8bea10d9478b789f64fb259724a2ddf75620
|
[] |
no_license
|
https://github.com/alvinwan/Puhjiii
|
e051b39f236525cb8e3cfbdc24fe4bb243708996
|
bf633015fe13a7a60cd1137b16b8d2cec6b09d39
|
refs/heads/master
| 2021-01-22T09:47:55.058092 | 2015-07-10T17:49:43 | 2015-07-10T17:49:43 | 37,454,548 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from flask import Flask
from flask_mongoengine import MongoEngine
from flask_login import LoginManager
from flask_bcrypt import Bcrypt
from flask_kvsession import KVSessionExtension
from simplekv.db.mongo import MongoStore
from simplekv import KeyValueStore
import config
# Create and name app
app = Flask(__name__,
static_folder=config.STATIC_PATH)
# database connection
app.config['MONGODB_SETTINGS'] = {'DB': config.DB}
app.config['SECRET_KEY'] = config.SECRET_KEY
app.debug = config.DEBUG
# initialize MongoEngine with app
db = MongoEngine()
db.init_app(app)
store = MongoStore(
getattr(db.connection, config.DB),
config.SESSION_STORE)
session = KeyValueStore()
# Substitute client-side with server-side sessions
kv_session = KVSessionExtension()
kv_session.init_app(app, store)
# initialize Flask-Login with app
login_manager = LoginManager()
login_manager.init_app(app)
# initialize encryption mechanism
bcrypt = Bcrypt()
bcrypt.init_app(app)
from server.auth.views import mod_auth
from server.nest.views import mod_nest
from server.public.views import mod_public
from server.nest.libs import Plugin
Plugin.load_views()
# Register blueprints
app.register_blueprint(mod_auth)
app.register_blueprint(mod_nest)
app.register_blueprint(mod_public)
# add filters
from server import filters
|
UTF-8
|
Python
| false | false | 1,316 |
py
| 107 |
__init__.py
| 68 | 0.783435 | 0.783435 | 0 | 53 | 23.849057 | 50 |
rchicoli/ispycode-python
| 14,259,291,461,186 |
a89481f068717aa4032d2100258999db06c99fc0
|
f7c07caa1210d2a08e8433cdd854b1232efa88e3
|
/Date-And-Time-Modules/Calendar-Module/Iterate-Days-In-A-Month.py
|
446e24504e6bc818c8d301ed2d0561617037c16e
|
[] |
no_license
|
https://github.com/rchicoli/ispycode-python
|
c2fbecc28bf32933150986d24f77b7297f50b78e
|
fa27f2377943ac2e4d983065406578151091e3f5
|
refs/heads/master
| 2020-03-20T11:34:59.698618 | 2018-06-14T21:14:02 | 2018-06-14T21:14:02 | 137,407,150 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import calendar
c = calendar.Calendar()
for date in c.itermonthdates(2016, 7):
print(date)
|
UTF-8
|
Python
| false | false | 100 |
py
| 663 |
Iterate-Days-In-A-Month.py
| 663 | 0.69 | 0.64 | 0 | 5 | 17.8 | 38 |
eunice730711/Data-Analysis-with-Python
| 11,579,231,863,547 |
3409e0c3806781893d29d0a63d40ad13ba221ebe
|
5113f5a8b1d8777b637c20dbf2270fba7fa12ec2
|
/semi-conductor data analysis/q5.py
|
7fb079d0e74edf1062fc3dcefed06574c13e642c
|
[] |
no_license
|
https://github.com/eunice730711/Data-Analysis-with-Python
|
a13bdd46f22a370c6d53da79f2533ece6e882872
|
8a5e5f8fbe18057df22113cc0b72cc2ee213172b
|
refs/heads/master
| 2020-05-25T14:55:38.444201 | 2017-02-14T14:51:49 | 2017-02-14T14:51:49 | 69,885,482 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import numpy as np
import csv
def check(filename):
f = open(filename,"r")
csvreader = csv.reader(f)
tool = list(sorted(csvreader, key=lambda row: row[7]))
#tool = list(csvreader)
number = 0
maxn = 0
s = set()
for items in tool:
if items[8] == "TOOL_PM_START":
number = len(s)
s.clear()
if number > maxn:
maxn = number
elif items[8] != "TOOL_PM_FINISH":
s.add(items[4][:6])
print(maxn)
return maxn
ans = 0
for i in range(4):
name = "ToolLog_Tool_J_" + str(i+1) + ".csv"
n = check(name)
if n > ans:
ans = n
print(ans)
|
UTF-8
|
Python
| false | false | 555 |
py
| 24 |
q5.py
| 23 | 0.605405 | 0.587387 | 0 | 30 | 17.533333 | 55 |
qfxlcyc/nn-depency-parser
| 15,702,400,451,850 |
0ef9c998d9b263514e84a351f8f1fd4ba84a5838
|
fad029294ccc1c7e58e4ac643eb7d6fc1de4a3e1
|
/preprocess.py
|
7f87afdcb6b94ef0bab328bab6ce744e27071402
|
[
"Apache-2.0"
] |
permissive
|
https://github.com/qfxlcyc/nn-depency-parser
|
60e5e587da965c0517e163b9d9dddf4ec29b90d0
|
add939442d75f54609324b4871a94d677b8e41ce
|
refs/heads/master
| 2020-05-27T23:01:04.999863 | 2018-10-08T23:20:25 | 2018-10-08T23:20:25 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import tensorflow as tf
import numpy as np
import argparse
import trainer.data_helper as data_helper
from trainer.parser import Parser
import os
OOV = "--OOV--"
NULL = "--NULL--"
def load_data(path):
""" example of a record:
1 The _ DET DT _ 4 det _ _
2 complex _ ADJ JJ _ 4 amod _ _
3 financing _ NOUN NN _ 4 compound _ _
4 plan _ NOUN NN _ 10 nsubj _ _
5 in _ ADP IN _ 9 case _ _
6 the _ DET DT _ 9 det _ _
7 S&L _ NOUN NN _ 9 compound _ _
8 bailout _ NOUN NN _ 9 compound _ _
9 law _ NOUN NN _ 4 nmod _ _
10 includes _ VERB VBZ _ 0 root _ _
11 raising _ VERB VBG _ 10 xcomp _ _
12 $ _ SYM $ _ 11 dobj _ _
13 30 _ NUM CD _ 14 compound _ _
14 billion _ NUM CD _ 12 nummod _ _
15 from _ ADP IN _ 16 case _ _
16 debt _ NOUN NN _ 11 nmod _ _
17 issued _ VERB VBN _ 16 acl _ _
18 by _ ADP IN _ 22 case _ _
19 the _ DET DT _ 22 det _ _
20 newly _ ADV RB _ 21 advmod _ _
21 created _ VERB VBN _ 22 amod _ _
22 RTC _ PROPN NNP _ 17 nmod _ _
23 . _ PUNCT . _ 10 punct _ _
"""
with open(path, 'r') as f:
data = f.read().split('\n\n')
data = [record.split('\n') for record in data]
data = [[l.split('\t') for l in record] for record in data]
sentence, pos, arc_labels, trans = [], [], [], []
for record in data:
min_len = min(len(l) for l in record)
if min_len < 7:
print "record with invalid line"
print record
continue
sentence.append([l[1].lower() for l in record])
pos.append([l[4] for l in record])
arc_labels.append([l[7] for l in record])
trans.append([(int(l[0]), int(l[6])) for l in record])
return sentence, pos, arc_labels, trans
def load_embed(path):
with open(path, 'r') as f:
embed = f.readlines()
return embed
def write_embed(path, embed):
with open(path, 'w') as f:
for e in embed:
f.write(e)
class Preprocess:
def __init__(self, config):
self.config = config
def load(self, file_path):
self.sentences, self.pos, self.arc_labels, self.arcs = load_data(file_path)
def fit(self):
config = self.config
arc2id_dict = create_item2id_dict(self.arc_labels)
pos2id_dict = create_item2id_dict(self.pos)
data_helper.write_json_to_gs(arc2id_dict, config.arc2id_file)
data_helper.write_json_to_gs(pos2id_dict, config.pos2id_file)
embed = load_embed(config.embedding_file)
print "convert words to indices"
word2id_dict = create_word2id_dict(embed)
# # print "add --OOV-- and --NULL-- to embeddings"
# print "add --NULL-- to embeddings"
# embed_size = len(embed[0].split(' '))
# print embed_size
# # added = ['\t'.join(["%s" % (word)] + [' '.join([str(0.)]*embed_size)]) + '\n' for word in [OOV, NULL]]
# added = ['\t'.join(["%s" % (NULL)] + [' '.join([str(0.)]*embed_size)]) + '\n']
# write_embed(config.embedding_file, added + embed)
print "create id2word dict for prediction use"
id2word_dict = {i:w for w, i in word2id_dict.items()}
data_helper.write_json_to_gs(id2word_dict, config.id2word_file)
data_helper.write_json_to_gs(word2id_dict, config.word2id_file)
print "create label2id dict"
label2id_dict = create_ulabel2id_dict(arc2id_dict)
data_helper.write_json_to_gs(label2id_dict, config.label2id_file)
print "create meta file"
meta = {
"label_dim": len(label2id_dict),
"word_feature_dim": 18,
"pos_feature_dim": 18,
"arc_feature_dim": 12,
"num_pos_class": len(pos2id_dict),
"num_arc_class": len(arc2id_dict)
}
data_helper.write_json_to_gs(meta, config.meta_file)
def transform(self, output_path):
config = self.config
arc2id_dict = data_helper.load_json_from_gs(config.arc2id_file)
pos2id_dict = data_helper.load_json_from_gs(config.pos2id_file)
label2id_dict = data_helper.load_json_from_gs(config.label2id_file)
word2id_dict = data_helper.load_json_from_gs(config.word2id_file)
print "convert arc labels, pos and words to indexes"
arc_labels = list2id(self.arc_labels, arc2id_dict)
pos = list2id(self.pos, pos2id_dict)
sentences = list2id(self.sentences, word2id_dict)
print "generate labels"
parser = Parser(skip2shifts=False)
labels = [parser.create_labels(arc, arc_la, label2id_dict) for arc, arc_la in zip(self.arcs, self.arc_labels)]
print "create dataset"
create_data_for_model(output_path, parser, sentences, pos, arc_labels, labels, label2id_dict)
def _create_x2id_dict_with_default(item_set):
x2id_dict = {e:i for i, e in enumerate(item_set, 1)}
# x2id_dict[OOV] = 0
x2id_dict[NULL] = 0
return x2id_dict
def create_word2id_dict(embed):
def extract_word(e):
return e.split('\t')[0]
vocab = [extract_word(e) for e in embed]
return _create_x2id_dict_with_default(set(vocab))
def create_label2id_dict(arc2id_dict):
label2id_dict = {'SHIFT': 0}
i = 0
for k in arc2id_dict:
# if k in [OOV, NULL]: continue
if k == NULL: continue
for j, direction in enumerate(['LEFT', 'RIGHT']):
label2id_dict["%s_%s" % (direction, k)] = 2 * i + j + 1
i += 1
return label2id_dict
def create_ulabel2id_dict(arc2id_dict):
label2id_dict = {'SHIFT': 0, 'LEFT': 1, 'RIGHT': 2}
return label2id_dict
def create_item2id_dict(items):
item_set = set()
for l in items:
item_set = item_set.union(set(l))
return _create_x2id_dict_with_default(item_set)
def list2id(lists, item2id_dict, default=0):
list_ids = []
for l in lists:
list_ids.append([item2id_dict.get(i, default) for i in l])
return list_ids
def create_data_for_model(data_path, parser, sentence_ids, pos_ids, arc_ids, labels, label2id_dict):
with open(data_path, 'w') as f:
count = 0
num_class = len(label2id_dict)
for i, (sent, pos, arc, la) in enumerate(zip(sentence_ids, pos_ids, arc_ids, labels)):
parser.fit(len(sent))
# for l in la[2:-1]: # skip first two 'shift' and last 'right-arc' actions
for l in la:
parser.step(l)
one_hot_label = [0]*num_class
one_hot_label[l] = 1
row = [i] + parser.create_features(sent, pos, arc) + one_hot_label
# print row
f.write(','.join(str(e) for e in row)+'\n')
count += 1
if count % 100 == 0:
print "created data from %s sentences" % (count)
if __name__ == '__main__':
project_dir = "./"
data_dir = os.path.join(project_dir, "data")
arc2id_file = os.path.join(data_dir, "arc2id.json")
pos2id_file = os.path.join(data_dir, "pos2id.json")
word2id_file = os.path.join(data_dir, "word2id.json")
id2word_file = os.path.join(data_dir, "id2word.json")
label2id_file = os.path.join(data_dir, "label2id.json")
embedding_file = os.path.join(data_dir, "en-cw.txt")
meta_file = os.path.join(data_dir, "meta.json")
train_file = os.path.join(data_dir, "train.gold.conll")
dev_file = os.path.join(data_dir, "dev.gold.conll")
test_file = os.path.join(data_dir, "test.gold.conll")
train_output_file = os.path.join(data_dir, "train")
dev_output_file = os.path.join(data_dir, "dev")
test_output_file = os.path.join(data_dir, "test")
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--arc2id_file', type=str, default=arc2id_file,
help='file storing arc class')
parser.add_argument('--pos2id_file', type=str, default=pos2id_file,
help='file storing pos class')
parser.add_argument('--word2id_file', type=str, default=word2id_file,
help='file storing arc class')
parser.add_argument('--id2word_file', type=str, default=id2word_file,
help='file storing pos class')
parser.add_argument('--label2id_file', type=str, default=label2id_file,
help='file storing pos class')
parser.add_argument('--embedding_file', type=str, default=embedding_file,
help='file storing embeddings')
parser.add_argument('--meta_file', type=str, default=meta_file,
help='file storing embeddings')
parser.add_argument('--data_file', type=str, default=train_file,
help='input data for preprocessing')
parser.add_argument('--output_file', type=str, default=train_output_file,
help='input data for preprocessing')
parser.add_argument('--fit', action="store_true",
help='input data for preprocessing')
args = parser.parse_args()
proc = Preprocess(args)
print "load data from %s" % (args.data_file)
proc.load(args.data_file)
if args.fit:
print "write x2id files"
proc.fit()
print "write transformed data to %s" % (args.output_file)
proc.transform(args.output_file)
|
UTF-8
|
Python
| false | false | 9,755 |
py
| 11 |
preprocess.py
| 9 | 0.560943 | 0.540236 | 0 | 254 | 37.377953 | 118 |
michalj11121/Wd-155280
| 12,025,908,466,597 |
6778765c274f271b752acf06039946cc64551bf6
|
e1ec811a380d624b3c3c95c88f6369248862ca52
|
/matplotlib/Zadanie 10.py
|
3e302c4e27853840e5206662458cd1a724384e08
|
[] |
no_license
|
https://github.com/michalj11121/Wd-155280
|
ac53e138de089d9a53fc287582052ccd9ed224a2
|
7eee6bf2334c39ddf0eb93a555df40f1c241ea1a
|
refs/heads/master
| 2022-08-21T16:27:27.506633 | 2020-05-31T17:43:01 | 2020-05-31T17:43:01 | 245,125,036 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
x=np.arange(1,21)
fig, ax = plt.subplots()
ax.plot(x,1/x,label='f(x) = 1/x')
ax.annotate('f(5)=0.2',
xy=(5, 0.2), xycoords='data',
xytext=(5,0.6), textcoords='data',
arrowprops=dict(facecolor='red', shrink=0.05),
horizontalalignment='center', verticalalignment='top')
plt.xlabel('x')
plt.ylabel('f(x)')
plt.legend()
plt.axis([1, len(x), 0, 1])
plt.show()
x=np.arange(0,30.1,0.1)
s=np.sin(x)
c=np.cos(x)
fig, ax = plt.subplots()
ax.plot(x,s,'-r',label='sin(x)')
ax.plot(x,c,'--b',label='cos(x)')
ax.annotate('sin(0)',
xy=(0,0), xycoords='data',
xytext=(0,-0.5), textcoords='data',
arrowprops=dict(facecolor='red', shrink=0.05),
horizontalalignment='center', verticalalignment='top')
ax.annotate('cos(0)',
xy=(0,1), xycoords='data',
xytext=(0,0.5), textcoords='data',
arrowprops=dict(facecolor='red', shrink=0.05),
horizontalalignment='center', verticalalignment='top')
plt.title("sin(x) i cos(x) dla x[0,30] z krokiem 0.1")
plt.xlabel('x')
plt.ylabel('sin(x) i cos(x)')
plt.xticks(np.arange(0,31))
plt.legend()
plt.show()
|
UTF-8
|
Python
| false | false | 1,294 |
py
| 52 |
Zadanie 10.py
| 52 | 0.567233 | 0.527048 | 0 | 49 | 24.44898 | 66 |
phamtienthanhcong/Python_beginer
| 1,108,101,604,101 |
84e1a6e6f44baa5370d6ef5c35b6c3e6eea19c44
|
6dbcecb35c51285aa40248e8135611c83b12e37c
|
/app_suport_beginer/app auto/auto_post_fb/LoginFaceBook.py
|
4a4502f54e391d5ace5f1a55aa2cf608d3a52fbf
|
[] |
no_license
|
https://github.com/phamtienthanhcong/Python_beginer
|
203972928310a08d28a858fbe542b1671c5702df
|
11640bbbf8e2f2b7a825bfd51fb873f9c17e552c
|
refs/heads/master
| 2023-08-08T05:54:26.384383 | 2022-11-08T10:28:45 | 2022-11-08T10:28:45 | 399,650,614 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from selenium import webdriver
from time import sleep
from selenium.webdriver.common import keys
from selenium.webdriver.common.keys import Keys
from extend.data import*
import pyautogui as df
def setup():
chrome_options = webdriver.ChromeOptions()
chrome_options.add_argument("--start-maximized")
prefs = {"profile.default_content_setting_values.notifications": 2}
chrome_options.add_experimental_option("prefs", prefs)
broser = webdriver.Chrome(chrome_options=chrome_options, executable_path="extend\chromedriver.exe")
return broser
broser = setup()
class facebook():
def __init__(self,user,password,post,mapfile,namephoto):
self.user = user
self.password = password
self.post = post
self.mapfile = mapfile
self.namephoto = namephoto
def login(self):
broser.get("https://www.facebook.com/")
user = broser.find_element_by_id("email")
passworld = broser.find_element_by_id("pass")
user.send_keys(self.user)
passworld.send_keys(self.password)
passworld.send_keys(Keys.RETURN)
sleep(5)
def KeyChosePhoto(self):
# change map file
df.keyDown('alt')
df.press('d')
df.keyUp('alt')
df.typewrite(self.mapfile)
df.press('enter')
# chose file
df.keyDown('alt')
df.press('n')
df.keyUp('alt')
df.typewrite(self.namephoto)
df.press('enter')
def postconten(self):
clickpost = broser.find_element_by_xpath('/html/body/div[1]/div/div[1]/div/div[3]/div/div/div[1]/div[1]/div/div[2]/div/div/div[3]/div/div[2]/div/div/div/div[1]/div')
clickpost.click()
sleep(2)
writepost = broser.find_element_by_xpath('/html/body/div[1]/div/div[1]/div/div[4]/div/div/div[1]/div/div[2]/div/div/div/form/div/div[1]/div/div/div/div[2]/div[1]/div[1]/div[1]/div/div/div/div/div[2]/div/div/div/div')
writepost.send_keys(self.post)
sleep(1)
postimg = broser.find_element_by_xpath('/html/body/div[1]/div/div[1]/div/div[4]/div/div/div[1]/div/div[2]/div/div/div/form/div/div[1]/div/div/div/div[3]/div[1]/div[2]/div/div[1]/div/span/div')
postimg.click()
sleep(1)
facebook.KeyChosePhoto(self)
sleep(3)
puts = broser.find_element_by_xpath('/html/body/div[1]/div/div[1]/div/div[4]/div/div/div[1]/div/div[2]/div/div/div/form/div/div[1]/div/div/div/div[3]/div[2]/div/div')
puts.click()
def PostMarket():
pass
def closefacebook():
broser.close()
def choseclick(link,NumberOrClick):
autoclick = broser.find_element_by_xpath(link)
autoclick.click()
sleep(1)
if (type(NumberOrClick)==str and DataText!="none"):
autoclick.send_keys(NumberOrClick)
sleep(1)
elif (type(NumberOrClick)==str and DataText =="none"):
sleep(1)
fb.KeyChosePhoto()
sleep(2)
elif (type(NumberOrClick) == int and NumberOrClick > 0):
for i in range(1,NumberOrClick):
df.press('dowm')
df.press('enter')
sleep(1)
elif (type(NumberOrClick) == int and NumberOrClick < 0):
for i in range(1,NumberOrClick):
df.press('tab')
df.press('enter')
sleep(1)
fb=facebook(username,password,conten_post,mapfile,namephoto)
fb.login()
sleep(1)
broser.get("https://www.facebook.com/marketplace/create/item")
sleep(3)
for i in range(1,12):
choseclick(XpathText[i],DataText[i])
|
UTF-8
|
Python
| false | false | 3,487 |
py
| 44 |
LoginFaceBook.py
| 30 | 0.630915 | 0.613995 | 0 | 99 | 34.232323 | 224 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.