repo_name
stringlengths 7
111
| __id__
int64 16.6k
19,705B
| blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 5
151
| content_id
stringlengths 40
40
| detected_licenses
list | license_type
stringclasses 2
values | repo_url
stringlengths 26
130
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
42
| visit_date
timestamp[ns] | revision_date
timestamp[ns] | committer_date
timestamp[ns] | github_id
int64 14.6k
687M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 12
values | gha_fork
bool 2
classes | gha_event_created_at
timestamp[ns] | gha_created_at
timestamp[ns] | gha_updated_at
timestamp[ns] | gha_pushed_at
timestamp[ns] | gha_size
int64 0
10.2M
⌀ | gha_stargazers_count
int32 0
178k
⌀ | gha_forks_count
int32 0
88.9k
⌀ | gha_open_issues_count
int32 0
2.72k
⌀ | gha_language
stringlengths 1
16
⌀ | gha_archived
bool 1
class | gha_disabled
bool 1
class | content
stringlengths 10
2.95M
| src_encoding
stringclasses 5
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 10
2.95M
| extension
stringclasses 19
values | num_repo_files
int64 1
202k
| filename
stringlengths 4
112
| num_lang_files
int64 1
202k
| alphanum_fraction
float64 0.26
0.89
| alpha_fraction
float64 0.2
0.89
| hex_fraction
float64 0
0.09
| num_lines
int32 1
93.6k
| avg_line_length
float64 4.57
103
| max_line_length
int64 7
931
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
Lillianaloha/reinforcement-learning | 6,176,163,012,705 | 5c904a626e25862fadded6283978f3fc040c85f8 | 00472201d7237bcf4e06c7617b54b5c25e5f9422 | /result.py | 5864026dfcd9506e7d406a3c1941660b562fdf1e | []
| no_license | https://github.com/Lillianaloha/reinforcement-learning | 23753bd9d4fa1e2c89d3c0ed791d3f1b5eab0835 | 62040423d064f2ce8e530a13a989daa868220c14 | refs/heads/master | 2021-04-30T10:03:19.945220 | 2018-02-13T01:41:34 | 2018-02-13T01:41:34 | 121,325,279 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | def result(action1p, action2p, winnerp):
global resultB
global winner
money1 = -10
money2 = -10
deltamoney = 10
moneypond = 20
# i = 0
# action1 = []
# for item in action1p:
# if item == 0:
# action1[i] = 'open'
# if item == 1:
# action1[i] = 'follow'
# if item == 2:
# action1[i] = 'doublefollow'
# if item == 3:
# action1[i] = 'discard'
# i = i + 1
# j = 0
# action2 = []
# for item in action2p:
# if item == 0:
# action2[j] = 'open'
# if item == 1:
# action2[j] = 'follow'
# if item == 2:
# action2[j] = 'doublefollow'
# if item == 3:
# action2[j] = 'discard'
# j = j + 1
# 0 discard 1 follow 2 doublefollow 3 open
for round in [0, 1, 2]:
#for round in [0, 1, 2, 3, 4]:
action1 = action1p[round]
# print ('player1 took: %s' %(action1))
if action1 == 0:
winner = 2
break
if action1 == 3:
money1 = money1 - 2*deltamoney
moneypond = moneypond + 2*deltamoney
winner = winnerp
break
action2 = action2p[round]
# print ('player2 took: %s' %(action2))
if action1 == 1 and action2 == 0:
money1 = money1 - deltamoney
moneypond = moneypond + deltamoney
winner = 1
break
if action1 == 1 and action2 == 3:
money1 = money1 - deltamoney
money2 = money2 - 2*deltamoney
moneypond = moneypond + deltamoney + 2*deltamoney
winner = winnerp
break
if action1 == 1 and action2 == 1:
money1 = money1 - deltamoney
money2 = money2 - deltamoney
moneypond = moneypond + deltamoney + deltamoney
if action1 == 1 and action2 == 2:
money1 = money1 - deltamoney
money2 = money2 - 2*deltamoney
moneypond = moneypond + deltamoney + 2*deltamoney
deltamoney = 2 * deltamoney
if action1 == 2 and action2 == 3:
money1 = money1 - 2*deltamoney
money2 = money2 - 4*deltamoney
moneypond = moneypond + 2*deltamoney + 4*deltamoney
winner = winnerp
break
if action1 == 2 and action2 == 0:
money1 = money1 -2*deltamoney
moneypond = moneypond + 2*deltamoney
winner = 1
break
if action1 == 2 and action2 == 2:
money1 = money1 - 2*deltamoney
money2 = money2 - 4*deltamoney
moneypond = moneypond + 2*deltamoney + 4*deltamoney
deltamoney = 4*deltamoney
if action1 == 2 and action2 == 1:
money1 = money1 - 2*deltamoney
money2 = money2 - 2*deltamoney
moneypond = moneypond + 2*deltamoney + 2*deltamoney
deltamoney = 2*deltamoney
# if action1 == 'follow' and action2 == 'discard':
# money1 = money1 - deltamoney
# moneypond = moneypond + deltamoney
# winner = 1
# if action1 == 'follow' and action2 == 'open':
# money1 = money1 -deltamoney
# money2 = money2 -2*deltamoney
# moneypond = moneypond + deltamoney + 2*deltamoney
# winner = winnerp
# break
# if action1 == 'follow' and action2 == 'follow':
# money1 = money1 - deltamoney
# money2 = money2 - deltamoney
# moneypond = moneypond + deltamoney + deltamoney
# if action1 == 'follow' and action2 == 'doublefollow':
# money1 = money1 - deltamoney
# money2 = money2 - 2*deltamoney
# moneypond = moneypond + deltamoney + 2*deltamoney
# deltamoney = 2*deltamoney
i = round
if i == 2:
winner = winnerp
# print (moneypond)
# print (money1)
# print (money2)
# print (deltamoney)
# print
# print('results:')
if winner == 1:
# print ('winner is player1, he or she winned %d dollars' %(money1 + moneypond))
# print ('loser is player2, he or she lost %d dollars' %(-money2))
resultA = money1 + moneypond
resultB = money2
if winner == 2:
# print ('winner is player2, he or she winned %d dollars' %(money2 + moneypond))
# print ('loser is player1, he or she lost %d dollars' %(-money1))
resultA = money1
resultB = money2 + moneypond
return resultB
| UTF-8 | Python | false | false | 3,748 | py | 7 | result.py | 6 | 0.627268 | 0.575774 | 0 | 145 | 24.841379 | 82 |
asheik12/skadmin | 13,683,765,815,678 | e5b9217fc9e6ef2549480c6a41cb1580722fb23f | 61c14c0c2c475d6d8ed783865a40a7990e7d150d | /employees/models.py | 3a32a239714fbbef66b8db75bed92025dbd2bb5b | []
| no_license | https://github.com/asheik12/skadmin | f0b4ee50f526a448e06afc741d2b96863f6ea37e | 82412511a58acd30003a118b98c591e444f7f358 | refs/heads/master | 2020-08-20T08:37:00.957776 | 2019-10-20T12:58:29 | 2019-10-20T12:58:29 | 216,002,440 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class Employee(models.Model):
first_name = models.CharField(max_length=80)
last_name = models.CharField(max_length=55, blank=True, null=True)
dob = models.DateField(verbose_name='Date of Birth')
doj = models.DateField(verbose_name='Date of Joining')
user = models.OneToOneField(User, on_delete=models.DO_NOTHING, blank=True, null=True)
def __str__(self):
return self.first_name
class Salary(models.Model):
employee = models.ForeignKey(Employee, on_delete=models.CASCADE)
salary = models.DecimalField(max_digits=7, decimal_places=2)
is_active = models.BooleanField(default=False)
created_on = models.DateTimeField(auto_now_add=True)
def __str__(self):
return self.employee.first_name | UTF-8 | Python | false | false | 846 | py | 15 | models.py | 12 | 0.719858 | 0.712766 | 0 | 22 | 37.5 | 89 |
lianzhang132/yhexcer | 14,250,701,526,234 | 5cd0b1215ffe6f8a9a224617225cc5f03f63ddcf | 53f1770d9e03398ab00bf805e807e777fe17901c | /python/day1/变量.py | 7f3ccb07d85eee48f08fac16d5ebf44befc49f31 | []
| no_license | https://github.com/lianzhang132/yhexcer | cbed71074336d2f660158709b06882a1392508ac | e4a3035bb86895edcc3c065bf20c447ffb91ebe6 | refs/heads/master | 2020-05-27T08:19:57.880394 | 2019-05-25T01:58:09 | 2019-05-25T01:58:09 | 188,542,687 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | a = 2
b = 5
print(a)
print(b)
print(a*b)
a=b=3
print(a)
print(b)
d,e,f,=a,b,9
print(d,e,f)
import keyword
print(keyword.kwlist) | UTF-8 | Python | false | false | 130 | py | 212 | 变量.py | 163 | 0.638462 | 0.607692 | 0 | 15 | 7.733333 | 21 |
mga44/Revising-material | 2,396,591,771,362 | 2a1c79d2adc85f173a0cf844ca8b0f5fd2324f06 | 1bc2020ba79bc6a8b57fd92365320a070d259ae9 | /dbasegen.py | a5bd239e7886694982e50e6095272dd196baa980 | []
| no_license | https://github.com/mga44/Revising-material | 84e53c5e62700f98b9cfa0c58f46366ebbc59845 | 8776e1bd395e29b8d4a515fd62bd6b8c611c9b3f | refs/heads/master | 2022-01-18T08:22:27.398900 | 2019-05-12T22:46:40 | 2019-05-12T22:46:40 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import sqlite3
subjectname = "durability_of_the_materials"
db = sqlite3.connect('testing.db')
curs = db.cursor()
curs.execute('DROP TABLE '+subjectname)
curs.execute('''CREATE TABLE IF NOT EXISTS '''+subjectname+''' ( id INT PRIMARY KEY, name VARCHAR, rate INT)''')
curs.execute('''INSERT INTO '''+subjectname+''' VALUES (1, "Durability od rod systems", 4)''')
curs.execute('''INSERT INTO '''+subjectname+''' VALUES (2, "Basics of calculations of statically indeterminate systems", 2)''')
curs.execute('''INSERT INTO '''+subjectname+''' VALUES (3, "Basics of linear theory of elasticity", 0)''')
curs.execute('''INSERT INTO '''+subjectname+''' VALUES (4, "Strength of circular plates and axially symmetrical coatings", 4)''')
curs.execute('''INSERT INTO '''+subjectname+''' VALUES (5, "Durability hypotheses", 1)''')
db.commit()
curs.execute("SELECT * FROM "+subjectname+" ORDER BY rate")
rows = curs.fetchone()
print(rows)
print(rows) | UTF-8 | Python | false | false | 971 | py | 3 | dbasegen.py | 2 | 0.676622 | 0.664264 | 0 | 26 | 35.423077 | 129 |
yelghali/developpeursducoeur | 4,277,787,432,845 | 91ac4f71a316e048a408c054b006dd32b55ec1e6 | f381e2baac35561168bb59ebc403022984851f18 | /devheart/models/user.py | ddbf8733230806664094ffb3fdfc2eddd2074918 | []
| no_license | https://github.com/yelghali/developpeursducoeur | 739ff1e70d1677e0e8d71036762d52987956995c | 33842618df7f84b14f07db85a6d2481aa302b630 | refs/heads/master | 2021-01-10T21:58:43.793871 | 2015-07-30T19:39:45 | 2015-07-30T19:39:45 | 34,750,665 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #-*- coding: utf-8 -*-
from openerp import tools, SUPERUSER_ID
from openerp import models, api, _, fields
from openerp.osv import osv
from openerp.http import request, Response
from openerp.addons.website.models.website import slug
class devheart_partner(models.Model):
_inherit = ['res.partner','website.seo.metadata']
_name = "res.partner"
related_user = fields.Many2one("res.users", string=_('Related User'))
@api.model
def get_current_user(self):
related_user = self.env['res.users'].browse(self.env.uid) or False
if related_user:
self.related_user = related_user[0]
def get_current_user_partner_form_view(self):
context = request.context
context = {}
#return the business form view
view_id = 0
view_proxy = self.pool.get('ir.ui.view')
action = {
'type' : 'ir.actions.act_window',
'name' : 'Mon profile',
'view_type': 'form',
'view_mode': 'form',
'res_model' : 'res.users',
'res_id' : request.uid,
'context' : context,
'nodestroy':True,
}
return action
class devheart_user(models.Model):
_inherit = ['res.users','website.seo.metadata']
_name = "res.users"
_mail_post_access = 'read'
biography = fields.Html('Description')
skills = fields.Many2many('project.category', string='Skills', help="Your skills")
social_skype = fields.Char(string="Skype")
social_facebook = fields.Char(string="Facebook", default=False)
social_linkedin = fields.Char(string="=Linkedin", default=False)
social_twitter = fields.Char(string="twitter", default=False)
parent_id = fields.Many2one("res.partner", string="Organisation", domain=[ ('is_company', '=', True) ])
| UTF-8 | Python | false | false | 1,908 | py | 29 | user.py | 7 | 0.587526 | 0.584382 | 0 | 56 | 33.053571 | 117 |
jiamingli9674/Intelligent-Checkout-System | 14,156,212,217,166 | b68c1bfa4dfd0dd822d6ee75d45cf35411cf794a | 5c6cf54a357187f8a50ac6a87913b0b327c4d152 | /scripts/utility/user_info_form.py | 3e063cfb4b9b6ca3526d066113f6c17e055fb0a3 | [
"MIT"
]
| permissive | https://github.com/jiamingli9674/Intelligent-Checkout-System | 34cc5ba5a3653a4586b609d163cae9d86cbfff12 | 10c6ac761d707ce80686ae7e7f37461a8984616c | refs/heads/master | 2023-01-22T20:47:04.791240 | 2020-11-30T00:05:15 | 2020-11-30T00:05:15 | 292,162,069 | 4 | 1 | MIT | false | 2020-09-10T17:57:06 | 2020-09-02T02:48:10 | 2020-09-02T02:48:14 | 2020-09-10T17:57:06 | 1 | 0 | 0 | 0 | null | false | false | from wtforms.fields import SubmitField, StringField
from wtforms.form import Form
from wtforms.validators import DataRequired, Email
class UserInfoForm(Form):
first_name = StringField(label="Fist Name", validators=[DataRequired()])
last_name = StringField(label="Last Name", validators=[DataRequired()])
email = StringField(label="Email", validators=[Email(), DataRequired()])
submit = SubmitField(label ="Submit") | UTF-8 | Python | false | false | 431 | py | 26 | user_info_form.py | 19 | 0.75406 | 0.75406 | 0 | 9 | 47 | 76 |
jinfwd1021/ML100Days | 13,305,808,726,845 | df17d827c00b6e7dfec3cf1dac08ad9d7422f01a | 8991d519846f0c92ffe28e381b8aa6ba1a5dc900 | /Desktop/Python/HomeWork/D02.py | d1839b9fe44d76eeffb43bf6c8e3c7525f80770e | []
| no_license | https://github.com/jinfwd1021/ML100Days | 89f210003062c2a72000e845be4ca11d0c28fcc0 | cf4492c59d69166c6162a13617238026e8d71a67 | refs/heads/master | 2023-03-21T12:50:47.394055 | 2021-03-06T08:45:37 | 2021-03-06T08:45:37 | 330,093,555 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import numpy as np
array1 = np.array(range(30))
x=array1.reshape(5,6,order='F')
print(np.where(x%6==1))
| UTF-8 | Python | false | false | 104 | py | 8 | D02.py | 8 | 0.692308 | 0.615385 | 0 | 4 | 25 | 31 |
pseyfert-cern-gitlab-backup/Urania | 19,121,194,405,689 | 4b8fddd91991ca69337ac9dbb891cbd01ff1cdc6 | 71c7683331a9037fda7254b3a7b1ffddd6a4c4c8 | /Phys/Bs2MuMu/scripts/getListOfBranches.py | 577a6d971adba28c92c182ecd86c2f32938d88c5 | []
| no_license | https://github.com/pseyfert-cern-gitlab-backup/Urania | edc58ba4271089e55900f8bb4a5909e9e9c12d35 | 1b1c353ed5f1b45b3605990f60f49881b9785efd | refs/heads/master | 2021-05-18T13:33:22.732970 | 2017-12-15T14:42:04 | 2017-12-15T14:42:04 | 251,259,622 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null | #------------------------#
#This code prints all the
#branch names of an nTuple
#------------------------#
from readData import *
def getListOfBranches(nTupleName,channel, lookFor=''):
t = readData(nTupleName, {'channel':channel}).get('tree')
totBrList = t.GetListOfBranches()
found = 0
print ' The lines containing: ', lookFor
for br in totBrList:
name = br.GetName()
if name.find(lookFor)>-1 or lookFor=='fullList':
print name
found+=1
print ' Found ', found, ' lines.'
return '-- THE END --'
| UTF-8 | Python | false | false | 529 | py | 2,569 | getListOfBranches.py | 1,974 | 0.599244 | 0.593573 | 0 | 26 | 19.269231 | 58 |
hierynomus/dockercloud-haproxy | 7,808,250,569,191 | 73de8c0de555c5b41a7766586641c48562c9922c | 4a5e3aad1b2b5d6bf92f70c2a8d28eba2a5ccf2e | /haproxy/helper/update_helper.py | 7785ebb38dd9df11d06b8ef017ddca95aed4e2b5 | []
| no_license | https://github.com/hierynomus/dockercloud-haproxy | bbcecef2e746234dd5354f0a477a882b01ada2c1 | 9791b962bbb692ecab3b548768f5a196798dc68e | refs/heads/master | 2023-08-21T12:13:02.134289 | 2016-09-28T12:52:37 | 2016-09-28T12:52:37 | 69,853,727 | 1 | 0 | null | true | 2023-08-14T08:42:27 | 2016-10-03T08:37:31 | 2023-07-25T06:58:33 | 2023-08-14T08:42:26 | 196 | 1 | 0 | 3 | Python | false | false | import logging
import subprocess
import thread
from haproxy.config import HAPROXY_RUN_COMMAND
logger = logging.getLogger("haproxy")
def run_once():
logger.info("Launching HAProxy")
p = subprocess.Popen(HAPROXY_RUN_COMMAND)
logger.info("HAProxy has been launched(PID: %s)", str(p.pid))
logger.info("===========END===========")
p.wait()
def run_reload(old_process):
if old_process:
# Reload haproxy
logger.info("Reloading HAProxy")
new_process = subprocess.Popen(HAPROXY_RUN_COMMAND + ["-sf", str(old_process.pid)])
thread.start_new_thread(wait_pid, (old_process,))
logger.info("HAProxy has been reloaded(PID: %s)", str(new_process.pid))
else:
# Launch haproxy
logger.info("Launching HAProxy")
new_process = subprocess.Popen(HAPROXY_RUN_COMMAND)
logger.info("HAProxy has been launched(PID: %s)", str(new_process.pid))
return new_process
def wait_pid(process):
process.wait()
logger.info("HAProxy(PID:%s) has been terminated" % str(process.pid))
| UTF-8 | Python | false | false | 1,064 | py | 4 | update_helper.py | 4 | 0.652256 | 0.652256 | 0 | 36 | 28.555556 | 91 |
wxyang91/FQN | 9,655,086,530,545 | 1cf7697eaa14e468a3f5733e3642115e81c63b76 | 3af7dbc4b19e023274be4efa303f6aa2b17d2a14 | /test_fft.py | 8df9c4921051076128d502dc712f466bf5eefb34 | []
| no_license | https://github.com/wxyang91/FQN | 125e41766594d67fa4bee66558b676791ea15e0e | 9817bce3c0d2448ba928a99745b67d450266bf05 | refs/heads/master | 2020-03-27T04:07:53.148220 | 2018-08-31T01:01:39 | 2018-08-31T01:01:39 | 145,914,195 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import numpy as np
from numpy.fft import fft
import matplotlib.pyplot as plt
seq = 2*np.sin(2*np.arange(0,3.14,0.1))
plt.figure(1)
plt.plot(seq)
f = fft(seq)
plt.figure(2)
plt.plot(f)
plt.show() | UTF-8 | Python | false | false | 194 | py | 5 | test_fft.py | 5 | 0.716495 | 0.664948 | 0 | 10 | 18.5 | 39 |
DrMaggus/KawaFluss | 7,825,430,413,913 | 58fefce49a8e4ddc969cf0f9989f3e866d0ded99 | e4a972a511ebabd2ab8a23e83c82c0e48b171807 | /img.py | 27874e9417b1c5b422e30fdec43c04f1bea7cc1a | []
| no_license | https://github.com/DrMaggus/KawaFluss | aff4fae75f9e09e27fd1d25208f382cb634713e8 | 1f3595b0df189f0297266321f16af7dfc86587bd | refs/heads/master | 2016-09-06T15:22:04.564360 | 2014-01-14T15:22:23 | 2014-01-14T15:22:23 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # -*- coding: utf-8 -*-
"""
* Copyright (C) 2014 Matthias Eiserloh & Markus Wolf
*
* This file is part of KawaFluss.
*
* KawaFluss is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, version 3 of the License.
*
* KawaFluss is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with KawaFluss. If not, see <http://www.gnu.org/licenses/>.
"""
import pygame
class Img:
def __init__(self, posX, posY, visible, is_rotating, rot_angle, rot_object, font_object, original_object):
self.posX = posX
self.posY = posY
self.visible = visible
self.is_rotating = is_rotating
self.rot_angle = rot_angle
self.rot_object = rot_object
self.font_object = font_object
self.original_object = original_object
def getX(self):
return self.posX
def setX(self, _posX):
self.posX = _posX
def getY(self):
return self.posY
def setY(self, _posY):
self.posY = _posY
def getVisible(self):
return self.visible
def setVisible(self, _visible):
self.visible = _visible
def getIsRotating(self):
return self.is_rotating
def setIsRotating(self, _is_rotating):
self.is_rotating = _is_rotating
def getRotAngle(self):
return self.rot_angle
def setRotAngle(self, _rot_angle):
self.rot_angle = _rot_angle
def getRotObject(self):
return self.rot_object
def setRotObject(self, _rot_object):
self.rot_object = _rot_object
def getFontObject(self):
return self.font_object
def setFontObject(self, _font_object):
self.font_object = _font_object
def getOriginalObject(self):
return self.original_object
def setOriginalObject(self, _original_object):
self.original_object = _original_object
def rotate(self):
self.setRotAngle(self.getRotAngle()+2)
self.setRotObject(pygame.transform.rotate(self.font_object, self.getRotAngle()))
def place(self, mouseX, mouseY):
self.setX(mouseX - self.getRotObject().get_width()/2)
self.setY(mouseY - self.getRotObject().get_height()/2)
self.setVisible(True)
self.setIsRotating(False)
| UTF-8 | Python | false | false | 2,678 | py | 9 | img.py | 8 | 0.631068 | 0.627707 | 0 | 91 | 28.384615 | 110 |
its-Kumar/Flask | 8,899,172,279,971 | 7ebe4f87f08964012650a12cb9d7f5845da91e28 | 7e601a378a897139dcb424ba85004dc19a5748c7 | /FlaskMarket/market/routes.py | c7dd873050380828bfcc87d99feed0130bdc9771 | []
| no_license | https://github.com/its-Kumar/Flask | 58169d5d4a452090453153899d1c8f355d6a7c5f | fa5764f2997ae1fccaa4d2a1a646380a623abb8c | refs/heads/master | 2021-06-17T09:52:45.393113 | 2021-05-05T07:46:36 | 2021-05-05T07:46:36 | 195,162,632 | 2 | 2 | null | false | 2021-05-05T08:50:34 | 2019-07-04T03:27:43 | 2021-05-05T07:46:53 | 2021-05-05T08:50:34 | 59 | 1 | 1 | 1 | Python | false | false | from flask import flash, redirect, render_template, request, url_for
from flask_login import current_user, login_required, login_user
from flask_login.utils import logout_user
from market import app, db
from market.forms import (LoginForm, PurchaseItemForm, RegisterForm,
SaleItemForm)
from market.models import Item, User
# routes / views
@app.route('/')
@app.route('/home')
def home_page():
return render_template('home.html')
@app.route('/about')
def about_page():
return "<h1> About Page</h1>"
@app.route('/market', methods=["GET", "POST"])
@login_required
def market_page():
purchase_form = PurchaseItemForm()
selling_form = SaleItemForm()
if request.method == "POST":
# Purchase Item
purchased_item = request.form.get('purchased_item')
p_item_obj = Item.query.filter_by(name=purchased_item).first()
if p_item_obj:
if current_user.can_purchase(p_item_obj):
p_item_obj.buy(current_user)
flash(f"Congratulations! You have purchased {p_item_obj.name} \
for {p_item_obj.price}$",
category='success')
else:
flash(f"Unfortunately, you don't have enough money to purchase \
{p_item_obj.name}!",
category='danger')
# Sale Item
sold_item = request.form.get('sold_item')
s_item_obj = Item.query.filter_by(name=sold_item).first()
if s_item_obj:
if current_user.can_sale(s_item_obj):
s_item_obj.sell(current_user)
flash(f"Congratulations! You have sold {s_item_obj.name} back \
to the Market", category='success')
else:
flash(f"Something went wrong with selling {s_item_obj.name}",
category='danger')
return redirect(url_for('market_page'))
if request.method == "GET":
items = Item.query.filter_by(owner=None)
owned_items = Item.query.filter_by(owner=current_user.id)
return render_template(
'market.html',
items=items,
purchase_form=purchase_form,
owned_items=owned_items,
selling_form=selling_form,
)
@app.route('/register', methods=['GET', 'POST'])
def register_page():
form = RegisterForm()
if form.validate_on_submit():
user_to_create = User(username=form.username.data,
email_address=form.email_address.data,
password=form.password1.data)
db.session.add(user_to_create)
db.session.commit()
login_user(user_to_create)
flash(f"Account created successfully! You are now loged in\
as {user_to_create.username}", category='success')
return redirect(url_for('market_page'))
if form.errors != {}:
for err_msg in form.errors.values():
flash(f"There was an error with creating a user: {err_msg}",
category='danger')
return render_template('register.html', form=form)
@app.route('/login', methods=["GET", "POST"])
def login_page():
form = LoginForm()
if form.validate_on_submit():
attempted_user = User.query.filter_by(
username=form.username.data).first()
if attempted_user and attempted_user.check_password_correction(
attempted_password=form.password.data):
login_user(attempted_user)
flash(
f"Success! You have logged in as: {attempted_user.username}",
category='success')
return redirect(url_for('market_page'))
else:
flash("Username and Password not matched! Please try again",
category='danger')
return render_template('login.html', form=form)
@app.route('/logout')
def logout_page():
logout_user()
flash("You have been logout", category='info')
return redirect(url_for('home_page'))
| UTF-8 | Python | false | false | 4,036 | py | 21 | routes.py | 15 | 0.58672 | 0.585976 | 0 | 116 | 33.793103 | 80 |
GuSilva20/Primeiros-Codigos | 7,645,041,822,125 | d7912f50a4b511178b85acdcdbe1a8ee6d4e0464 | 4c75314a1de4eb0f7cbd0dba8532928722d418bf | /Ex40LendoCadastrosEClassificando.py | 6c635565b809d6c21f41fd6d3fd423c14f4cdfd4 | [
"MIT"
]
| permissive | https://github.com/GuSilva20/Primeiros-Codigos | b0902158dfdb1aa2a840cbb9cfd33afe6a86608d | 67deefb683b40c2c4fee1bb383730588635e9d85 | refs/heads/main | 2023-06-30T18:13:02.846956 | 2021-08-01T17:29:16 | 2021-08-01T17:29:16 | 391,691,670 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | for c in range(1, 5):
nome: str = str(input("Digite o nome: "))
sexo = str(input("Sexo[M][F]: "))
idade = int(input("Idade: "))
media = 0
idadehomem = 0
idademulher = 0
if sexo.upper() == "M":
if idade < idadehomem:
media += idade
elif idade > idadehomem:
nomehomem = nome
idadehomem = idade
media += idade
elif sexo.upper() == "F":
if idade < 20:
media += idade
elif idade >= 20:
idademulher += 1
media += idade
else:
print("Erro")
print("Á Média de idade do grupo é: {}".format(media / 4))
print("O nome do homem mais velho é {} com a idade {} ".format(nomehomem,idadehomem))
print("A quantidade de mulher com mais de 20 anos é {}".format(idademulher))
| UTF-8 | Python | false | false | 820 | py | 51 | Ex40LendoCadastrosEClassificando.py | 50 | 0.533742 | 0.517791 | 0 | 25 | 31.6 | 85 |
phyiction/eth-gitter-graph-analysis | 6,390,911,360,507 | 16382dbb3d397a1bee01177dee7f2e32a578feb1 | 66fc9a9461c6942eba38037b937a5d8973e6d530 | /extract.py | 7d53d043585829052cb71fba7560127bbb33f732 | []
| no_license | https://github.com/phyiction/eth-gitter-graph-analysis | f74540b7a30ff9d5ae82dc98bea48c08e206da1a | 8b64daae1cb5ce426f823af0c1646fc0c97c2e3e | refs/heads/master | 2020-03-23T17:20:01.470364 | 2018-07-26T13:30:59 | 2018-07-26T13:30:59 | 141,854,768 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import json
import os
import requests
from datetime import datetime
def get_data(url,params={}):
headers = {
'Accept': 'application/json',
'Authorization': 'Bearer ' + os.environ['TOKEN']
}
return requests.get(url,params=params,headers=headers)
#end
def get_groups():
print('Groups')
r = get_data("https://api.gitter.im/v1/groups")
groups = r.json()
for g in groups:
print("{0} {1}".format(g['id'], g['name']))
#end
#end
def get_rooms():
print('Rooms')
r = get_data("https://api.gitter.im/v1/rooms")
rooms = r.json();
for r in rooms:
print("{0} {1}".format(r['id'],r['name']))
#end
#end
def get_messages(room_id):
before_id = None
min_sent = None
f = open('{0}-messages.txt'.format(room_id),'w+')
try:
while True:
params = {
'beforeId': before_id,
'limit': 100
}
r = get_data('https://api.gitter.im/v1/rooms/{0}/chatMessages'.format(room_id),params=params)
if r.status_code == 200:
messages = r.json();
for m in messages:
sent = datetime.strptime(m['sent'],'%Y-%m-%dT%H:%M:%S.%fZ')
if min_sent == None or sent < min_sent:
min_sent = sent
before_id = m['id']
#end
f.write(json.dumps(m,sort_keys=True) + '\n')
#end
print(before_id)
else:
print(r.reason)
print("Run again in " + r.headers['X-RateLimit-Reset'] + "seconds")
break
#end
#end
finally:
f.close()
#end
#end
# get_rooms()
# ethereum research
messages = get_messages('55fe873b0fc9f982beb13b83') | UTF-8 | Python | false | false | 1,833 | py | 3 | extract.py | 1 | 0.494272 | 0.478451 | 0 | 68 | 25.970588 | 105 |
deco93/MomsVengeance | 15,822,659,531,448 | b85409c2270dd45341338805f1369f274e9d5dac | f4b7fce1c5277ab81e66a72746553554b9ae9a7b | /MothersVengeance/classes/healthBar.py | ed1196857e3a3f777c5b2980138d27fa16711174 | []
| no_license | https://github.com/deco93/MomsVengeance | 9bf468e57ad48e57357f5c37b574a63153abd0e5 | c1ce281bf5e124667bd6d5e343f91c729f5fb00c | refs/heads/master | 2022-12-12T08:06:00.830909 | 2020-09-13T13:48:24 | 2020-09-13T13:48:24 | 290,578,113 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import pygame
import math
class HealthBar(object):
def __init__(self, player, radius, line_width, x, y):
self.location = [x, y]
self.player = player
self.radius = radius
self.line_width = line_width
self.fill_color = pygame.Color(207,0,112)
self.edge_color = pygame.Color(255,255,255)
def draw(self, win):
# fill
pygame.draw.circle(win, self.fill_color, self.location, math.ceil(self.radius * self.player.current_health / self.player.max_health), 0)
# line
pygame.draw.circle(win, self.edge_color, self.location, self.radius, self.line_width)
| UTF-8 | Python | false | false | 652 | py | 9 | healthBar.py | 7 | 0.616564 | 0.590491 | 0 | 21 | 29.857143 | 144 |
anmolsrivastava18/ERP_Repository | 14,113,262,574,217 | 8a058585413639c96d6612f71cfc56ef56f0ddf4 | 349ea76817d6647f662aba06a06bdf03873d2b49 | /aims/inventory/migrations/0008_auto_20210105_0115.py | 1c1555c75aff47bb9aee20ad6a329578add60ee8 | []
| no_license | https://github.com/anmolsrivastava18/ERP_Repository | 72f0ccc53fa7f38277f962972a20cf8ee461e7da | 0c9c041624b1133d04c0389a9270140b68c10b21 | refs/heads/master | 2023-05-11T01:36:02.991403 | 2021-03-08T19:22:27 | 2021-03-08T19:22:27 | 333,938,771 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # Generated by Django 2.2.5 on 2021-01-04 19:45
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('inventory', '0007_auto_20210105_0102'),
]
operations = [
migrations.AlterField(
model_name='outward',
name='from_warehouse_name',
field=models.ForeignKey(help_text='Warehouse from which part is being sent/outward', on_delete=django.db.models.deletion.PROTECT, related_name='outward_from_warehouse_name', to='inventory.Warehouse'),
),
]
| UTF-8 | Python | false | false | 597 | py | 85 | 0008_auto_20210105_0115.py | 41 | 0.666667 | 0.61474 | 0 | 19 | 30.421053 | 212 |
lethanhday/hieuhien.vn | 5,076,651,371,944 | 7237fead1cca365deae904c2930c20f30e8099a1 | 4ca97229d63598f59adaeb16da70cfeacf030652 | /plugin.video.hieuhien.vn.phim3s/source.py | aa8f5e02df7fd5c253b27d4504c9122ec75476ca | []
| no_license | https://github.com/lethanhday/hieuhien.vn | 463c2bd7799e5f732f3269df8aa22e41d9e98009 | 4bf3b5514e432921e7a8dd854416df403e9caf36 | refs/heads/master | 2021-01-23T01:45:48.621312 | 2017-03-21T09:01:37 | 2017-03-21T09:01:37 | 85,931,443 | 2 | 0 | null | true | 2017-03-23T09:31:59 | 2017-03-23T09:31:59 | 2016-11-07T11:15:17 | 2017-03-21T09:01:54 | 260,817 | 0 | 0 | 0 | null | null | null | # -*- coding: utf-8 -*-
import re
import urlparse
import urllib
import json
import api
class Source:
def __init__(self, cache_path):
self.cache_path = cache_path
self.base_url = 'http://phim3s.net/'
def __get_page__(self, url, cacheTime=3600000):
return api.SOUPKit(cache_path=self.cache_path).SOUPFromURL(url, cacheTime=cacheTime)
def base_url(self):
return self.base_url
def menu(self):
page = self.__get_page__(self.base_url)
menu = {}
content = page.find('ul', {'class': 'container menu'})
for li in content.children:
a = li.find('a')
if type(a) is int: continue
submenu = []
if 'href' in a.attrs and 'title' in a.attrs and a['href'] <> self.base_url and not a['href'].startswith('http'):
submenu.append({'label': a.text.strip(), 'href': self.base_url + a['href'] + "?order_by=last_update"})
sub_ul = li.find('ul', {'class': 'sub-menu'})
if sub_ul <> None:
for s in sub_ul.find_all('a'):
submenu.append({'label': s.text.strip(), 'href': self.base_url + s['href'] + "?order_by=last_update"})
if unicode(a.text) in menu:
menu[unicode(a.text)].append(submenu)
else:
menu[unicode(a.text)] = submenu
return menu
def contents(self, url):
page = self.__get_page__(url)
contents = []
temp = []
for c in page.find_all('div', {'class': 'inner'}):
a = c.find('a')
href = self.base_url + a['href']
if href in temp or href.startswith('clip'):
continue
temp.append(href)
poster = a.find('img')['src']
status = c.find('div', {'class': 'status'})
if status <> None:
status = status.text
if status == 'Trailer':
continue
title1 = c.find('div', {'class': 'name'}).find('a').text
title2 = c.find('div', {'class': 'name2'}).text
contents.append({'title1': unicode(title1), 'title2': unicode(title2), 'href': href, 'duration': unicode(status), 'info': status, 'poster': poster})
next_page = self.__next_page__(page)
return {'items': contents, 'next_page': next_page}
def media_items(self, url):
page = self.__get_page__(url)
poster = page.find('img', {'class': 'photo'})['src']
title1 = page.find('span', {'class': 'fn'}).text
title2 = page.find('div', {'class': 'name2 fr'}).find('h3').text
watch = self.base_url + page.find('a', {'class': 'btn-watch'})['href']
page = self.__get_page__(watch)
media_items = {}
serverList = page.find('div', {'class': 'serverlist'})
if serverList <> None:
for server in serverList.find_all('div', {'class': 'server'}):
serverName = server.find('div', {'class': 'label'}).text
streams = []
for e in server.find('ul', {'class': 'episodelist'}).find_all('a'):
href = '%sajax/episode/embed/?episode_id=%s' %(self.base_url, e['data-episode-id'])
s = []
ep_title = u''.join([u'Tập ', e.text])
if ep_title in media_items:
s = media_items[unicode(ep_title)]
s.append({'title1': unicode(title1), 'title2': unicode(title2), 'ep_title': ep_title, 'poster': poster, 'banner': '', 'server_name': unicode(serverName), 'href': href})
media_items[unicode(ep_title)] = s
if media_items == {}:
media_items['DEFAULT'] = [{'title1': unicode(title1), 'title2': unicode(title2), 'ep_title': '', 'poster': poster, 'banner': '', 'server_name': unicode('Server 1'), 'href': url}]
return media_items
def search(self, query):
search_url = self.base_url + 'ajax/film/search?' + urllib.urlencode({'keyword': urllib.quote_plus(query)})
result = api.JSONKit(cache_path=self.cache_path).ObjectFromURL(search_url, cacheTime=0)
contents = []
if 'json' in result:
r = result['json']
for i in r:
m = r[i]
contents.append({'title1': unicode(m['title']), 'title2': unicode(m['title_o']), 'href': self.base_url + m['link'], 'duration': unicode(m['status']), 'info': unicode(m['status']), 'poster': m['image_url']})
return {'items': contents, 'next_page': None}
def __next_page__(self, page):
pages = page.find('span', {'class': 'page_nav'})
if pages is None:
return None
n = pages.find('span', {'class': 'current'})
if n is None:
return None
next_sib = n.nextSibling
if next_sib <> None:
n = next_sib.find('a')['href']
return self.base_url + n
return None
def resolve_stream(self, url):
result = api.JSONKit(cache_path=self.cache_path).ObjectFromURL(url, cacheTime=0, headers={'X-Requested-With': 'XMLHttpRequest'})
url = '%s?%s' %(result['grabber'], urllib.urlencode({'link': result['video_url_hash'], 'json': 1}))
result = api.JSONKit(cache_path=self.cache_path).ObjectFromURL(url, cacheTime=0)
loc = None
for c in result:
if c['label'] == '720p':
return c['file']
elif c['label'] == '360p':
locl = c['file']
return loc
| UTF-8 | Python | false | false | 4,719 | py | 122 | source.py | 53 | 0.613314 | 0.604622 | 0 | 140 | 32.671429 | 210 |
dahbiaberrani/s2python | 17,076,790,002,972 | 7a45fd51978586b24ea9335c3f2b712c22f96af9 | d1eadaf86b563bdf056f691cf9e7a5803e1a4935 | /examen/file.py | 72423864a799813961e404ba17bf5808a67ab6dc | []
| no_license | https://github.com/dahbiaberrani/s2python | 2dcca7fabd4cad852eb8c0b7c4a5767a68e2385b | 5bd843fc2b83ed663df523addbab8b856486ddcc | refs/heads/main | 2023-04-22T00:04:48.304756 | 2021-05-06T12:34:46 | 2021-05-06T12:34:46 | 349,734,907 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | class File:
def __init__(self,maxMafile):
self.maxfile = maxMafile
self.contenu = []
def __str__(self):
return str(self.contenu)
def afficher(self):
print(self)
def fileVide(self):
if len(self.contenu) == 0:
return True
else:
return False
def filepleine(self):
return len(self.contenu) == self.maxfile
def ajouter(self,elt):
if self.filepleine():
print("impossible d'ajouter un elt la liste est pleine ")
else:
self.contenu.append(elt)
def retirer(self):
if self.fileVide():
print("impossible de retirer un elt la liste est vide ")
else:
tmp = self.contenu[0]
self.contenu.remove(tmp)
return tmp
def maxSize(self):
return self.maxfile
def reverseFile(file):
newFile = File(file.maxSize())
tab = []
while not file.fileVide():
elem = file.retirer()
tab.append(elem)
for i in range (len(tab)):
newFile.ajouter(tab[len(tab)-1-i])
return newFile
# test
f1 = File(10)
f1 .ajouter (1)
f1 .ajouter (2)
f1 .ajouter (3)
f1 .ajouter (4)
f1 .ajouter (5)
print(f1)
f2 = reverseFile(f1)
print(f2)
| UTF-8 | Python | false | false | 1,300 | py | 18 | file.py | 18 | 0.543846 | 0.528462 | 0 | 60 | 20.583333 | 69 |
UsefNait/espr | 9,208,409,907,132 | b50a4e4694bed869b7f24a4ad59dc5c90902f649 | 1ab88d88a51e4f52a7b56dc14013cdad590ac6df | /ESPR-master/qa/migrations/0006_auto_20180509_2246.py | 7e0a9208126e4e0d4bfde2e8007a787c8ae2076a | []
| no_license | https://github.com/UsefNait/espr | 22be5c40fd89c6f73fb669c129f638b8edb8ca26 | e026a6595b5f69a60e40f1e78e611465bae76c1a | refs/heads/master | 2020-03-19T05:35:28.196434 | 2018-06-03T22:45:44 | 2018-06-03T22:45:44 | 135,946,030 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # Generated by Django 2.0.2 on 2018-05-09 21:46
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('qa', '0005_auto_20180509_2106'),
]
operations = [
migrations.AlterField(
model_name='question',
name='categorie',
field=models.ForeignKey(blank=True, null=True, on_delete=True, to='qa.Categorie'),
),
]
| UTF-8 | Python | false | false | 430 | py | 19 | 0006_auto_20180509_2246.py | 19 | 0.6 | 0.527907 | 0 | 18 | 22.888889 | 94 |
Traklon/IRC | 10,316,511,481,901 | 68ea03b0129a972cc571fba3abdb901fa103aea5 | af3c3ab0fb9fe3b0f5084ecbda4953ba9dba2937 | /Perudo2.py | 66cd02cb21cce15b393609d20ac642ec3947cae7 | []
| no_license | https://github.com/Traklon/IRC | db22750949126890d406a0bb14f1e4ad3525499b | 5422b0adaf6e75838a298a1b2fccfaa8b1927181 | refs/heads/master | 2016-09-05T23:14:13.018002 | 2015-04-08T13:42:16 | 2015-04-08T13:42:16 | 24,571,034 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/env python2
# -*- coding: utf8 -*-
import irclib
import ircbot
import random
import re
import sys
class PeruBot(ircbot.SingleServerIRCBot):
server = sys.argv[1]
chan = '#'+sys.argv[2]
state = 'PRE-JEU'
players = {}
order = []
curr = 0
nb = 0
val = 0
palifico = False
join = []
nojoin = False
if (len(sys.argv) == 3):
port = 6667
else:
port = sys.argv[3]
def __init__(self):
ircbot.SingleServerIRCBot.__init__(self, [(self.server, self.port)],
"PeruBot", "https://github.com/Traklon/IRC")
def on_welcome(self, serv, ev):
serv.join(self.chan)
def on_join(self, serv, ev):
author = irclib.nm_to_n(ev.source())
if self.state == 'PRE-JEU':
serv.privmsg(self.chan, "Bienvenue " + author + " ! Tape !join pour rejoindre ! Rappel : tapez !play pour lancer la partie !")
def melange(self,serv):
for player in self.players.iterkeys():
d = self.players[player]
for (i,e) in enumerate(d):
d[i] = random.randint(1,6)
self.players[player] = d
serv.notice(player, "Tes dés sont : "+ ", ".join(map(str,d)))
def verif(self, serv):
somme = 0
for player in self.players.iterkeys():
tmp_nb = 0
for e in self.players[player]:
if ((e == self.val) or ((e == 1) and (not self.palifico))):
tmp_nb = tmp_nb+1
somme += tmp_nb
serv.privmsg(self.chan, player + " révèle " + str(tmp_nb) + " " + str(self.val) + " !")
return somme
def verif_elimination(self, serv):
if len(self.players[self.order[self.curr]]) == 0:
serv.privmsg(self.chan, self.order[self.curr] + " est éliminé !")
name = self.order[self.curr]
self.order.remove(name)
self.players.pop(name, None)
self.curr = (self.curr)%(len(self.players))
self.nojoin = True
def verif_gg(self, serv):
if len(self.players) == 1:
serv.privmsg(self.chan, self.order[0] + " a gagné ! Félicitations !")
self.reset()
return True
return False
def verif_1de(self, serv):
if len(self.players[self.order[self.curr]]) == 1:
serv.privmsg(self.chan, "PALIFICO !")
self.palifico = True
else:
self.palifico = False
def verif_join(self, serv):
if (not self.nojoin):
for e in self.join:
serv.privmsg(self.chan, e + " a rejoint la partie !")
mini = 5
for player in (self.players.iterkeys()):
if (len(players[player] < mini)):
mini = len(players[player])
self.players[e] = [1,1,1,1,1][:mini]
self.order.append(e)
self.join = []
def reset(self):
self.state = 'PRE-JEU'
self.players = {}
self.order = []
self.curr = 0
self.nb = 0
self.val = 0
self.palifico = False
self.join = []
self.nojoin = False
def nouv_tirage(self, serv):
self.verif_1de(serv)
self.verif_join(serv)
self.verif_elimination(serv)
if (not self.verif_gg(serv)):
serv.privmsg(self.chan, "C'est au tour de " + self.order[self.curr] + " !")
self.state = 'ENCHERES'
self.nb = 0
self.val = 0
self.melange(serv)
def on_pubmsg(self, serv, ev):
author = irclib.nm_to_n(ev.source())
message = ev.arguments()[0].lower()
if message == "!regles":
serv.privmsg(self.chan, "Rappel des règles : on doit obligatoirement augmenter le nombre de dés, mais pas forcément la valeur "+\
"du dé (sauf bien sûr en passant par les 1). 'exact' ne peut être tenté que par le joueur à qui"+\
" c'est le tour. On ne peut pas avoir plus de 5 dés. En 1 vs 1, 'exact' fait perdre un dé. "+\
"Si vous n'aimez pas ces règles, pingez 'traklon' pour l'insulter.")
elif message == "!comm":
serv.privmsg(self.chan, "Annoncer une valeur (ex : 4 fois le dé 6) : 4 6. On ne peut pas écrire paco pour les 1.")
serv.privmsg(self.chan, "Annoncer 'exact' : exact OU calza.")
serv.privmsg(self.chan, "Confondre un menteur : menteur OU dudo OU faux")
serv.privmsg(self.chan, "Meta-jeu : !nbde pour le nombre total de dés. !ordre pour l'ordre du jeu"+\
". !recap pour un récapitulatif du nombre de dés de chacun. !suiv pour avoir le nom du joueur suivant."+\
" !quit pour annuler la partie.")
elif message == "!nbde":
somme = 0
for player in self.players.iterkeys():
somme += len(self.players[player])
serv.privmsg(self.chan, "Il y a " + str(somme) + " dés en jeu.")
elif message == "!quit":
serv.privmsg(self.chan, "Partie annulée !")
self.reset()
elif message == "!recap":
for player in self.players.iterkeys():
serv.privmsg(self.chan, player + " a " + str(len(self.players[player])) + " dés.")
elif message == "!ordre":
serv.privmsg(self.chan, "L'ordre de jeu est : " + ", ".join(self.order))
elif ((message == "!suiv") and (self.state != 'PRE-JEU')):
serv.privmsg(self.chan, "C'est au tour de " + self.order[self.curr] + " !")
elif self.state == 'PRE-JEU':
if ((message == "!join") and (not (author in self.players))):
serv.privmsg(self.chan, author + " a rejoint la partie !")
self.players[author] = [1,1,1,1,1]
self.order.append(author)
if ((message == "!play") and (author in self.players)):
if (len(self.players) == 1):
serv.privmsg(self.chan, "Jouer seul, c'est pas super intéressant...")
else:
serv.privmsg(self.chan, "La partie débute !")
serv.privmsg(self.chan, "Tapez !regles pour connaître les règles en vigueur.")
serv.privmsg(self.chan, "Tapez !comm pour connaîtres les commandes (normalement assez intuitives).")
serv.privmsg(self.chan, "L'ordre de jeu est : "+ ", ".join(self.order))
self.state = 'ENCHERES'
self.melange(serv)
elif message == "!join":
if (not (author in self.players)):
serv.privmsg(self.chan, author + " rejoindra la partie à la fin du tour !")
self.join.append(author)
elif self.state == 'ENCHERES':
if author == self.order[self.curr] :
if re.match (r'^[1-9][0-9]* [1-6]$', message):
tmp_nb = int(message[:-2])
tmp_val = int(message[-1])
if self.palifico :
verif = (tmp_val == self.val) and (tmp_nb > self.nb)
else:
verif = (((self.val == 1) and (((tmp_val > 1) and (tmp_nb > 2*self.nb)) or ((tmp_val == 1) and (tmp_nb > self.nb)))) or ((self.val > 1) and (((tmp_val == 1) and (tmp_nb*2 >= self.nb)) or ((tmp_val > 1) and (tmp_nb > self.nb)))))
if self.val == 0:
verif = self.palifico or tmp_val != 1
if verif:
self.nb = tmp_nb
self.val = tmp_val
self.curr = (self.curr+1)%(len(self.players))
serv.privmsg(self.chan, "C'est au tour de " + self.order[self.curr] + " !")
else:
serv.privmsg(self.chan, "Enchère erronée !")
elif (((message == 'faux') or (message == 'menteur') or (message == 'dudo')) and self.val > 0):
self.state = 'FAUX'
elif (((message == 'exact') or (message == 'calza')) and self.val > 0):
self.state = 'EXACT'
elif re.match (r'^[0-9]+ [0-9]$', message):
serv.privmsg(self.chan, "Crétin.")
else:
if (re.match (r'^[1-9][0-9]* [1-6]$', message) or (message == 'exact') or (message == 'menteur') or (message == 'dudo') or (message == 'faux') or (message == 'calza')):
serv.privmsg(self.chan, author + " : ce n'est pas ton tour, mais celui de " + str(self.order[self.curr]) + " !")
if self.state == 'FAUX':
somme = self.verif(serv)
if (self.nb <= somme):
serv.privmsg(self.chan, "Avec " + str(somme) + " " + str(self.val) + ", l'enchère était correcte ! " + author + " perd un dé !")
else:
self.curr = (self.curr+len(self.players)-1)%(len(self.players))
serv.privmsg(self.chan, "Avec " + str(somme) + " " + str(self.val) + ", l'enchère était fausse ! " + self.order[self.curr] + " perd un dé !")
self.players[self.order[self.curr]] = self.players[self.order[self.curr]][1:]
self.nouv_tirage(serv)
if self.state == 'EXACT':
somme = self.verif(serv)
if (self.nb == somme):
if (len(self.players) > 2):
serv.privmsg(self.chan, "Avec " + str(somme) + " " + str(self.val) + ", l'enchère était exacte ! " + author + " gagne un dé !")
if len(self.players[author]) < 5:
(self.players[author]).append(1)
else:
self.curr = (self.curr+len(self.players)-1)%(len(self.players))
serv.privmsg(self.chan, "Avec " + str(somme) + " " + str(self.val) + ", l'enchère était exacte ! " + self.order[self.curr] + " perd un dé !")
self.players[self.order[self.curr]] = self.players[self.order[self.curr]][1:]
else:
serv.privmsg(self.chan, "Avec " + str(somme) + " " + str(self.val) + ", l'enchère était inexacte ! " + author + " perd un dé !")
self.players[self.order[self.curr]] = self.players[self.order[self.curr]][1:]
self.nouv_tirage(serv)
if __name__ == "__main__":
PeruBot().start()
| UTF-8 | Python | false | false | 10,607 | py | 2 | Perudo2.py | 1 | 0.498248 | 0.489533 | 0 | 236 | 43.733051 | 252 |
WagnerAndrade-DEV/Python-Basics | 3,659,312,156,418 | fd19c04065ef2c9b844fe174ee30c6cd2302412b | 2113e1f991c99d101deeb0e8911288fc70297d73 | /exercicios_resolvidos/ex001.py | a8007c5edbb5d1291bab3cfe30098346603351c0 | [
"MIT"
]
| permissive | https://github.com/WagnerAndrade-DEV/Python-Basics | ea51ff44feab8b3ebb6cb2cf464228284ef5376c | 77b6f4b48721809c6a13ddbb7b7bc4c3bc9f712f | refs/heads/main | 2023-08-15T21:33:05.544598 | 2021-09-22T02:19:57 | 2021-09-22T02:19:57 | 404,154,691 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #crie um programa que monstre uma mensagem
print('Hello World') | UTF-8 | Python | false | false | 64 | py | 16 | ex001.py | 15 | 0.78125 | 0.78125 | 0 | 3 | 20.666667 | 42 |
DependableSystemsLab/GPU-Trident | 12,635,793,835,783 | 36151b29f2e9db0748666c051990bf226f2a18e3 | 6dac5ee64421d6d2c3a0470ccfae28babada8b2e | /benchmarks/SRAD K1/config_gen.py | 501c9e80ff0e4f6db1a1b897363e8294f5b8e2ac | [
"MIT"
]
| permissive | https://github.com/DependableSystemsLab/GPU-Trident | 661cc6e2c68e98371cd9f574cc8e4635b1b9daa2 | c734cd8a18146869fc915af73a6ca13ceca35c0b | refs/heads/master | 2022-12-17T03:38:21.185947 | 2020-08-26T21:51:42 | 2020-08-26T21:51:42 | 156,037,899 | 2 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | X_threads = 16*8
Y_threads = 16*8
Invoc_count = 2
start_index = 0
end_index = 0
src_list = ["srad.h","srad_kernel.cu"]
SHARED_MEM_USE = True
total_shared_mem_size = 6.1440*1000
domi_list = []
domi_val = []
| UTF-8 | Python | false | false | 209 | py | 921 | config_gen.py | 47 | 0.645933 | 0.559809 | 0 | 13 | 15.076923 | 38 |
TANUKIpro/color_tracker | 15,281,493,659,007 | f498fd367a55753aba19ac2b08beb7df7ff16a4b | e036ccf41984647b9d184a86c01cfc380b9fd254 | /consideration/all_rounder.py | 277f345cd9140c5dba2aec73f13ae0dec4deefbe | []
| no_license | https://github.com/TANUKIpro/color_tracker | 7e22e66fd5d16d198dc51906e6170a68d5dc664f | e55522784e053fbacede4545f35fb62015cbc450 | refs/heads/master | 2020-09-21T12:53:27.692038 | 2020-01-24T04:51:19 | 2020-01-24T04:51:19 | 224,795,123 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # -*- coding: utf-8 -*-
import sys
try:
py_path = sys.path
ros_CVpath = '/opt/ros/kinetic/lib/python2.7/dist-packages'
if py_path[3] == ros_CVpath:
print("[INFO] : ROS and OpenCV are competing")
sys.path.remove(py_path[3])
except: pass
import cv2
import numpy as np
import time
import matplotlib.pyplot as plt
try:
import scipy.ndimage as ndimage
from scipy.optimize import curve_fit
except: pass
#setup global
image_frag = True
global S2G
S2G = int(460 + 477) #STARTからGOALまでの距離(cm)
frame_rate = 30 #動画のフレームレート
class Mouse:
def __init__(self, window_name, cp_image, org_image, HumanHeight):
self.cp_image = cp_image
self.org_image = org_image
self.event_call = 0
self.ClickedPoint = [None, None, None, None]
self.Prediction = [None, None, None, None]
self.mouseEvent = {"x":None, "y":None, "event":None, "flags":None}
#cv2.setMouseCallback(window_name, self.__NORMALCallBack, None)
cv2.setMouseCallback(window_name, self.__CallBack, None)
def __NORMALCallBack(self, eventType, x, y, flags, userdata):
self.mouseEvent["x"] = x
self.mouseEvent["y"] = y
self.mouseEvent["event"] = eventType
self.mouseEvent["flags"] = flags
def __CallBack(self, eventType, x, y, flags, userdata):
self.mouseEvent["x"] = x
self.mouseEvent["y"] = y
self.mouseEvent["event"] = eventType
self.mouseEvent["flags"] = flags
if self.mouseEvent["event"] == cv2.EVENT_LBUTTONDOWN:
self.event_call += 1
M_x = self.mouseEvent["x"]
M_y = self.mouseEvent["y"]
global image_frag
#1回目のクリック
if self.event_call == 1:
image_frag = True
self.ClickedPoint[0] = M_x
self.ClickedPoint[1] = M_y
print("1st : ", self.ClickedPoint[0], self.ClickedPoint[1])
cv2.circle(self.cp_image, (self.ClickedPoint[0], self.ClickedPoint[1]), 5, (0, 0, 255), -1)
cv2.putText(self.cp_image, "START", (self.ClickedPoint[0] - 40,
self.ClickedPoint[1] - 10),
cv2.FONT_HERSHEY_SIMPLEX, 1., (0, 0, 255), 2, cv2.LINE_AA)
#2回目のクリック
elif self.event_call == 2:
self.ClickedPoint[2] = M_x
self.ClickedPoint[3] = M_y
print("2nd : ", self.ClickedPoint[2], self.ClickedPoint[3])
cv2.circle(self.cp_image, (self.ClickedPoint[2], self.ClickedPoint[3]), 5, (0, 255, 0), -1)
cv2.putText(self.cp_image, "GOAL", (self.ClickedPoint[2] - 40,
self.ClickedPoint[3] - 10),
cv2.FONT_HERSHEY_SIMPLEX, 1., (0, 255, 0), 2, cv2.LINE_AA)
#START --> GOAL までの線を引っ張る
cv2.line(self.cp_image, (self.ClickedPoint[0], self.ClickedPoint[1]),
(self.ClickedPoint[2], self.ClickedPoint[3]),(255, 0, 0), 2)
#トラッキング範囲を推定。被験者の身長から割り出す
S_x, S_y, G_x, G_y = self.ClickedPoint
#単位ピクセルあたりの実際の距離を算出して、Y軸から減算
px_d = G_x - S_x
K = S2G / px_d
S2H = HumanHeight / K
#描画
HS_y = int(S_y - S2H)
HG_y = int(G_y - S2H)
cv2.line(self.cp_image, (S_x, HS_y), (G_x, HG_y),(70, 220, 140), 2)
self.Prediction = [S_x, HS_y, G_x, HG_y]
print("<Prediction array> : ", self.Prediction)
#2回目以降のクリック
elif self.event_call > 2:
print("Reload Image")
image_frag = False
#クリック回数、イメージに描いた線、マウス座標をリセット
self.cp_image = self.org_image
self.ClickedPoint = [None, None, None, None]
self.Prediction = [None, None, None, None]
self.event_call = 0
def getData(self):
return self.mouseEvent
def getEvent(self):
return self.mouseEvent["event"]
def getFlags(self):
return self.mouseEvent["flags"]
def posXY(self):
return (self.mouseEvent["x"], self.mouseEvent["y"])
def clicked_point(self):
return self.ClickedPoint
def prediction(self):
return self.Prediction
class HSV_supporter:
def __init__(self):
self.t_init = False
self.MB = True
self.fill_holes = False #Not working
self.opening = True
self.closing = True
self.ColorErase = False #Not working
self.kernel = np.ones((8,8),np.uint8)
self.center_x = 0
self.center_y = 0
self.window_name0 = "Serection"
self.window_name1 = "Frame"
self.min_HP = []
self.K = 0 # 単位ピクセルあたりの実際の距離(cm)
self.frame_num = 0 # 全体のフレーム数
self.frame_time = 1 / frame_rate # 単位フレームあたりの時間
self.N_frame_time = 0 # Nフレーム時の時間
self.velocity = 0 # 瞬間の速度
#トラックバーの初期設定
def TBcallback(self, x):
pass
def Trackbars_init(self):
cv2.namedWindow('value')
cv2.createTrackbar('H_Hue','value',179,179,self.TBcallback)
cv2.createTrackbar('L_Hue','value',0,179,self.TBcallback)
cv2.createTrackbar('H_Saturation','value',255,255,self.TBcallback)
cv2.createTrackbar('L_Saturation','value',0,255,self.TBcallback)
cv2.createTrackbar('H_Value','value',255,255,self.TBcallback)
cv2.createTrackbar('L_Value','value',0,255,self.TBcallback)
def color_detect(self, hsv, img):
hsv_min = np.array([15,127,0])
hsv_max = np.array([240,255,255])
mask = cv2.inRange(hsv, hsv_min, hsv_max)
return mask
# 速度の計算
def _velocity(self, x, y):
#ピクセル --> 距離(M)に変換
__real_x = (self.K * x) * (1 / 100)
__real_y = (self.K * y) * (1 / 100)
#フレームN --> フレームN+1 までの間に移動した微小距離dxの計算
#x[1:]-x[:-1] で隣接する要素の引き算ができる
__diff_real_x = __real_x[1:] - __real_x[:-1]
__diff_real_y = __real_y[1:] - __real_y[:-1]
#隣接するフレーム間の微小距離dxを算出
__dx = np.sqrt(__diff_real_x**2 + __diff_real_y**2)
__Velocity = __dx / self.frame_time
return __Velocity
#ブロブ解析
def analysis_blob(self, binary_img):
label = cv2.connectedComponentsWithStats(binary_img)
n = label[0] - 1
data = np.delete(label[2], 0, 0)
center = np.delete(label[3], 0, 0)
maxblob = {}
if len(data[:, 4]) is 0:
max_index = None
maxblob["center"] = [0, 0]
else:
max_index = np.argmax(data[:, 4])
maxblob["upper_left"] = (data[:, 0][max_index], data[:, 1][max_index])
maxblob["width"] = data[:, 2][max_index]
maxblob["height"] = data[:, 3][max_index]
maxblob["area"] = data[:, 4][max_index]
maxblob["center"] = center[max_index]
return data, center, maxblob
#グラフ出力
def data_plot(self, data, VideoName):
data_np = np.array(data)
if len(data_np) <= 0:
print("too many indices for array")
f = 0
x = 0
y = 0
t = 0
else:
f = data_np[:,0]
x = data_np[:,1]
y = data_np[:,2]
t = data_np[:,3]
#瞬間の速度を計算
Velocity = self._velocity(x, y)
fig, (axL, axR) = plt.subplots(ncols = 2, sharex = "none", figsize = (10,4))
fig.suptitle("VIDEO PATH : " + VideoName)
#1つ目のグラフ描画
axL.plot(f, x, "r-", linewidth=1.5, label = "x")
axL.plot(f, y, "g-", linewidth=1.5, label = "y")
axL.legend(fontsize=7,
frameon=True,
facecolor="lightgreen")
axL.set_title('< Frame - Pixel >')
axL.set_xlabel('frame[mai]')
axL.set_ylabel('Position[px]')
axL.grid(True)
#2つ目のグラフ描画
axR.plot(t[1:], Velocity, "b-", linewidth=1.5, label = "Velocity")
axR.legend(fontsize=7,
frameon=True,
facecolor="lightgreen")
axR.set_title('< Time - Velocity >')
axR.set_xlabel('Time[sec]')
axR.set_ylabel('Velocity[m/sec]')
axR.set_ylim(-0.5, 4)
axR.grid(True)
fig.tight_layout()
fig.subplots_adjust(top=0.85)
plt.show()
#グラフのセーブ
sNAME = VideoName.strip('.mp4')
fig.savefig(sNAME + '.png')
#トラックバーからコールバックを受け取り、値を返す
def trackbars(self):
lowH = cv2.getTrackbarPos('L_Hue', 'value')
highH = cv2.getTrackbarPos('H_Hue', 'value')
lowS = cv2.getTrackbarPos('L_Saturation', 'value')
highS = cv2.getTrackbarPos('H_Saturation', 'value')
lowV = cv2.getTrackbarPos('L_Value', 'value')
highV = cv2.getTrackbarPos('H_Value', 'value')
return (lowH, lowS, lowV, highH, highS, highV)
#======================================================================
def median_blar(self, image, size):
_image = cv2.medianBlur(image, size)
return _image
def Fill_Holes(self, image):
_image = ndimage.binary_fill_holes(image).astype(int) * 255
return _image
def Opening(self, image):
opening = cv2.morphologyEx(image, cv2.MORPH_OPEN, self.kernel)
return opening
def Closing(self, image):
closing = cv2.morphologyEx(image, cv2.MORPH_CLOSE, self.kernel)
return closing
def color_eraser(self, image, RGBarray):
#TODO 特定色の消去する処理
pass
#======================================================================
def resize_image(self, img, dsize, X, Y):
re_image = cv2.resize(img, dsize, fx=X, fy=Y)
return re_image
def px2cm(self, S_x, S_y, G_x, G_y):
px_d = np.sqrt((S_x - G_x)**2 + (S_y + G_y)**2)
#単位ピクセルあたりの実際の距離
K = S2G / px_d
return K
#特定のリストから、指定値に最も近い値の格納場所を返す関数
def getNearestValue(self, array, num):
"""
@param array: データ配列
@param num: 対象値
@return 対象値に最も近い値
"""
#print(array)
if len(array) > 0:
print("array:{0}, num:{1}".format(array, num))
idx = np.abs(np.asarray(array) - num).argmin()
print(idx)
return(array[idx])
else:
pass
def main(self, videofile_path, HumanHeight):
data = []
cap = cv2.VideoCapture(videofile_path)
if self.t_init is True:
self.Trackbars_init()
if cap.isOpened():
print("[INFO] : Video loaded successfully.")
else:
print("[INFO] : LOAD ERROR *** Chack video path or name ***")
print("CAP : ", cap)
exit()
#最初の1フレームだけ表示して、STARTとGOALを選択させる
print("\n[INFO] : This is the 1st frame.\n[INFO] : Choose start and goal positions and Click.")
print("[INFO] : Quit order 'q' Key")
ret, frame0 = cap.read()
cp_frame0 = frame0.copy()
cv2.namedWindow(self.window_name0)
mouse = Mouse(self.window_name0, cp_frame0, frame0, HumanHeight)
#STARTとGOALの選択
while(cap.isOpened()):
if image_frag is not True:
cp_frame0 = frame0
cv2.imshow(self.window_name0, cp_frame0)
if cv2.waitKey(1) & 0xFF == ord('q'):
print("[INFO] : Order 'q' key. Proceed to the next step...../\n")
time.sleep(.2)
break
cv2.destroyAllWindows()
#マウス座標のアンパックとピクセルから距離の割り出し
S_x, S_y, G_x, G_y = mouse.clicked_point()
self.K = self.px2cm(S_x, S_y, G_x, G_y)
print("[INFO] : Distance from START to GOAL is {0} pixel".format(G_x - S_x))
print("[INFO] : So {0}(cm) is calculated as {1} pixcel".format(S2G, G_x - S_x))
print("[INFO] : K is ", self.K)
#メインのループ
while(cap.isOpened()):
ret, frame = cap.read()
if frame is None:
print("\nframe is None")
break
#最初の処理で指定したSTARTとGOALに合わせてトリミングする
h, w = frame.shape[:2]
frame = frame[:, S_x:G_x]
hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
#hsv決め打ちの時はココを編集
if self.t_init is True:
Lh, Ls, Lv = self.trackbars()[:3]
Hh, Hs, Hv = self.trackbars()[3:]
else:
#青
Lh, Ls, Lv = (40, 40, 109)
Hh, Hs, Hv = (121, 255, 255)
hsv_min = np.array([Lh,Ls,Lv])
hsv_max = np.array([Hh,Hs,Hv])
"""
print( "H:{0} - {1}\nS:{2} - {3}\nV:{4} - {5}\n-------------"
.format(Lh, Hh, Ls, Hs, Lv, Hv))
"""
mask = cv2.inRange(hsv, hsv_min, hsv_max)
#ノイズ除去のためのアルゴリズム判定(True/False)
if self.MB is True: mask = self.median_blar(mask, 3)
if self.fill_holes is True: mask = self.Fill_Holes(mask)
if self.opening is True: mask = self.Opening(mask)
if self.closing is True: mask = self.Closing(mask)
if self.ColorErase is True: mask = self.color_eraser(mask, None)
_, center, maxblob = self.analysis_blob(mask)
print("\rtarget num:{0}, FRAME:{1}".format(len(center), self.frame_num), end="")
#見つけた領域にとりあえず円を描画してみる
#ちょい重いから、マシンスペックに応じてコメントアウトするといいかも
for i in center:
cv2.circle(frame, (int(i[0]), int(i[1])), 10, (255, 0, 0),
thickness=-3, lineType=cv2.LINE_AA)
#ブロブ解析の結果、最大面積だった部分の抽出
self.center_x = int(maxblob["center"][0])
self.center_y = int(maxblob["center"][1])
#ラベル付けされたブロブに円を描画
cv2.circle(frame, (self.center_x, self.center_y), 30, (0, 200, 0),
thickness=3, lineType=cv2.LINE_AA)
# 円を表示する用のデータを選択(選択したSTART-GOAL以外のデータは捨てる)
if S_x <= self.center_x <= G_x:
data.append([self.frame_num, self.center_x, self.center_y, self.N_frame_time])
self.N_frame_time = self.frame_time * self.frame_num
self.frame_num += 1
else: continue
cv2.imshow("first frame", cp_frame0)
frame=self.resize_image(frame, None, .8, .8)
cv2.imshow(self.window_name1, frame)
mask = self.resize_image(mask, None, .8, .8)
cv2.imshow("mask image", mask)
if cv2.waitKey(10) & 0xFF == ord('q'):
break
cap.release()
cv2.destroyAllWindows()
self.data_plot(data, videofile_path)
if __name__ == '__main__':
if len(sys.argv) == 1:
videofile_path = "20191122/nihongi_f_l1.mp4"
HumanHeight = 160.0
elif len(sys.argv) == 2:
videofile_path = sys.argv[1]
HumanHeight = 160.0
elif len(sys.argv) == 3:
videofile_path = sys.argv[1]
HumanHeight = sys.argv[2]
hsv_sup = HSV_supporter()
try:
hsv_sup.main(videofile_path, HumanHeight)
except ZeroDivisionError as err:
print(err, type(err))
else:
print("[INFO] : Completed successfully")
| UTF-8 | Python | false | false | 16,905 | py | 4 | all_rounder.py | 3 | 0.50699 | 0.482094 | 0 | 468 | 32.452991 | 111 |
capeexplain/cape | 3,393,024,164,083 | 3bba551cae907714a3bc2bd0cfee4fcb017a5168 | 2ad9f691e6f98bf32793fa492b3b72da24d77e32 | /capexplain/explain/explanation_crime.py | 01c7f4fb3be6297e2f0baa5ac0756fac4a2feb7b | [
"Apache-2.0"
]
| permissive | https://github.com/capeexplain/cape | a35e46e353499b149a9b329143e3e9ba60e9c254 | 99bac46b4f339a4019ed0017da56fb87bde66d21 | refs/heads/master | 2022-12-23T15:16:36.363312 | 2019-06-19T04:01:23 | 2019-06-19T04:01:23 | 191,392,833 | 0 | 1 | Apache-2.0 | false | 2022-06-21T22:09:27 | 2019-06-11T14:49:45 | 2019-06-19T04:01:45 | 2022-06-21T22:09:26 | 525 | 0 | 1 | 2 | Python | false | false | #!/usr/bin/python
# -*- coding:utf-8 -*-
import psycopg2
import sys, getopt
import pandas
import csv
#import statsmodels.formula.api as smf
from sklearn import preprocessing
import math
import time
from heapq import *
import re
import itertools
from capexplain.similarity.category_similarity_matrix import *
from capexplain.similarity.category_network_embedding import *
from capexplain.similarity.category_similarity_naive import *
from capexplain.similarity.category_similarity_matrix import *
from capexplain.similarity.category_network_embedding import *
from capexplain.utils import *
from capexplain.pattern_model.LocalRegressionPattern import *
DEFAULT_QUERY_RESULT_TABLE = 'crime_2017_2'
# DEFAULT_PATTERN_TABLE = 'pub_select'
DEFAULT_PATTERN_TABLE = 'crime_2017_2'
DEFAULT_USER_QUESTION_NUMBER = 5
EXAMPLE_NETWORK_EMBEDDING_PATH = './input/NETWORK_EMBEDDING'
EXAMPLE_SIMILARITY_MATRIX_PATH = './input/SIMILARITY_DEFINITION'
DEFAULT_AGGREGATE_COLUMN = '*'
DEFAULT_EPSILON = 0.25
TOP_K = 5
def build_local_regression_constraint(data, column_index, t, con, epsilon, agg_col, regression_package):
"""Build local regression constraint from Q(R), t, and global regression constraint
Args:
data: result of Q(R)
column_index: index for values in each column
t: target tuple in Q(R)
con: con[0] is the list of fixed attributes in Q(R), con[1] is the list of variable attributes in Q(R)
epsilon: threshold for local regression constraint
regression_package: which package is used to compute regression
Returns:
A LocalRegressionConstraint object whose model is trained on \pi_{con[1]}(Q_{t[con[0]]}(R))
"""
tF = get_F_value(con[0], t)
local_con = LocalRegressionConstraint(con[0], tF, con[1], agg_col, epsilon)
train_data = {agg_col: []}
for v in con[1]:
train_data[v] = []
# for index, row in data['df'].iterrows():
# if get_F_value(con[0], row) == tF:
# for v in con[1]:
# train_data[v].append(row[v])
# train_data[agg_col].append(row[agg_col])
for idx in column_index[con[0][0]][tF[0]]:
row = data['df'].loc[data['df']['index'] == idx]
row = row.to_dict('records')[0]
#print row
if get_F_value(con[0], row) == tF:
for v in con[1]:
train_data[v].append(row[v])
train_data[agg_col].append(row[agg_col])
if regression_package == 'scikit-learn':
train_x = {}
for v in con[1]:
if v in data['le']:
train_data[v] = data['le'][v].transform(train_data[v])
train_data[v] = data['ohe'][v].transform(train_data[v].reshape(-1, 1))
#print data['ohe'][v].transform(train_data[v].reshape(-1, 1))
train_x[v] = train_data[v]
else:
if v != agg_col:
train_x[v] = np.array(train_data[v]).reshape(-1, 1)
train_y = np.array(train_data[agg_col]).reshape(-1, 1)
train_x = np.concatenate(list(train_x.values()), axis=-1)
local_con.train_sklearn(train_x, train_y)
else:
#train_data = pandas.DataFrame(train_data)
formula = agg_col + ' ~ ' + ' + '.join(con[1])
local_con.train(train_data, formula)
return local_con
def predict(local_pattern, t):
# print('In predict ', local_pattern)
if local_pattern[5] == 'const':
# predictY = float(local_pattern[-1][1:-1])
predictY = float(local_pattern[-2][1:-1].split(',')[0])
elif local_pattern[5] == 'linear':
# print(local_pattern, t)
v = get_V_value(local_pattern[2], t)
# params = list(map(float, local_pattern[-1][1:-1].split(',')))
params_str = local_pattern[-1].split('\n')
# params = list(map(float, ))
# print(params_str, v)
params_dict = {}
for i in range(0, len(params_str)-1):
# p_cate = re.compile(r'(.*)\[T\.\s+(.*)\]\s+(-?\d+\.\d+)')
p_cate = re.compile(r'(.*)\[T\.\s*(.*)\]\s+(-?\d+\.\d+)')
cate_res = p_cate.findall(params_str[i])
#print(params_str[i], cate_res)
if len(cate_res) != 0:
cate_res = cate_res[0]
v_attr = cate_res[0]
v_val = cate_res[1]
param = float(cate_res[2])
if v_attr not in params_dict:
params_dict[v_attr] = {}
params_dict[v_attr][v_val] = param
else:
p_nume = re.compile(r'([^\s]+)\s+(-?\d+\.\d+)')
nume_res = p_nume.findall(params_str[i])
if len(nume_res) == 0:
continue
# print(nume_res)
v_attr = nume_res[0][0]
param = float(nume_res[0][1])
params_dict[v_attr] = param
predictY = 0.0
# print(params_dict)
for v_attr, v_dict in params_dict.items():
# print(v_attr, v_dict, t)
if v_attr == 'Intercept':
predictY += v_dict
else:
if isinstance(v_dict, dict):
# print(t, v_attr, v_dict)
v_key = t[v_attr].replace('\'', '').replace(' ', '')
# print(v_attr, v_key)
# print(v_dict.keys())
if v_key in v_dict:
predictY += v_dict[v_key]
else:
if v_attr in t:
predictY += v_dict * t[v_attr]
# predictY = sum(map(lambda x: x[0]*x[1], zip(params[:-1], v))) + params[-1]
return predictY
def validate_local_regression_pattern(local_pattern, epsilon, t, dir, agg_col, cur, table_name):
"""Check the validicity of the user question under a local regression constraint
Args:
local_pattern:
t: target tuple in Q(R)
dir: whether user thinks t[agg(B)] is high or low
agg_col: the column of aggregated value
Returns:
the actual direction that t[agg(B)] compares to its expected value, and the expected value from local_con
"""
print(local_pattern, t)
test_tuple = {}
# print('PAT', local_pattern)
if isinstance(local_pattern, dict):
return -2, 0
for v in local_pattern[2]:
test_tuple[v] = t[v.replace(' ', '')]
# if regression_package == 'scikit-learn':
# for v in local_con.var_attr:
# if v in data['le']:
# test_tuple[v] = data['le'][v].transform(test_tuple[v])
# test_tuple[v] = data['ohe'][v].transform(test_tuple[v].reshape(-1, 1))
# else:
# test_tuple[v] = np.array(test_tuple[v]).reshape(-1, 1)
# test_tuple = np.concatenate(list(test_tuple.values()), axis=-1)
# predictY = local_con.predict_sklearn(test_tuple)
# else:
# predictY = local_con.predict(pandas.DataFrame(test_tuple))
predictY = predict(local_pattern, test_tuple)
if t[agg_col] < (1-epsilon) * predictY:
# print(test_tuple, predictY)
return -dir, predictY
elif t[agg_col] > (1+epsilon) * predictY:
# print(test_tuple, predictY)
return dir, predictY
else:
return 0, predictY
def tuple_similarity(t1, t2, var_attr, cat_sim, num_dis_norm, agg_col):
"""Compute the similarity between two tuples t1 and t2 on their attributes var_attr
Args:
t1, t2: two tuples
var_attr: variable attributes
cat_sim: the similarity measure for categorical attributes
num_dis_norm: normalization terms for numerical attributes
agg_col: the column of aggregated value
Returns:
the Gower similarity between t1 and t2
"""
sim = 0.0
cnt = 0
for v_col in var_attr:
col = v_col.replace(' ', '')
if t1[col] is None or t2[col] is None:
continue
if cat_sim.is_categorical(col):
t1_key = t1[col].replace("'", '').replace(' ', '')
t2_key = t2[col].replace("'", '').replace(' ', '')
s = cat_sim.compute_similarity(col, t1_key, t2_key, agg_col)
# print(s)
sim += s
else:
print(col, t1, t2, num_dis_norm[col])
# print( num_dis_norm[col]['range'])
if num_dis_norm[col]['range'] is None:
if t1[col] == t2[col]:
sim += 1
else:
if col != agg_col and col != 'index':
temp = abs(t1[col] - t2[col]) / num_dis_norm[col]['range']
sim += 1-temp
cnt += 1
# print(t1, t2, sim)
return sim / cnt
def get_local_pattern(g_pat, t, cur, table_name):
tF = get_F_value(g_pat[0], t)
# print(tF)
local_pattern_query = '''SELECT * FROM {} WHERE REPLACE(fixed_value, ' ', '')=REPLACE('{}', ' ', '') AND variable='{}' AND in_a='{}' AND agg='{}' AND model='{}';'''.format(
table_name + '_local', str(tF).replace("\'", ''),
str(g_pat[1]).replace("\'", ''), g_pat[2], g_pat[3], g_pat[4]
)
# print(local_pattern_query)
cur.execute(local_pattern_query)
res = cur.fetchall()
l_pat = []
# print('In get local patters: ', res)
if len(res) != 0:
for i in range(3):
l_pat.append(res[0][i][1:-1].split(','))
l_pat[0] = list(map(lambda x: x.replace(' ', ''), l_pat[0]))
l_pat[2] = list(map(lambda x: x.replace(' ', ''), l_pat[2]))
for i in range(3, len(res[0])):
l_pat.append(res[0][i])
# print(l_pat)
return l_pat
else:
l_pat.append(g_pat[0])
l_pat.append(tF)
for i in range(1, len(g_pat)):
l_pat.append(g_pat[i])
return {'hold': False, 'pattern': l_pat}
def get_local_patterns(global_patterns, t, cur, table_name):
local_patterns = []
local_patterns_dict = {}
for pat in global_patterns:
F_set = set(pat[0])
V_set = set(pat[1])
t_set = set(t.keys())
FV_set = F_set.union(V_set)
if not (FV_set.issubset(t_set) and len(FV_set) == len(t_set) - 1
and '{}({})'.format(pat[3], pat[2]) in t):
# if F_set.union(V_set) != t_set:
continue
# print(pat[0], t)
local_patterns.append(get_local_pattern(pat, t, cur, table_name))
# for num_of_fix in range(1, min(len(t.keys()),4)):
# for possible_f_attr in itertools.combinations(list(t.keys())[:-1], num_of_fix):
# tF = get_F_value(possible_f_attr, t)
# local_pattern_query = '''SELECT * FROM {} WHERE REPLACE(fixed_value, ' ', '')=REPLACE('{}', ' ', '')'''.format(
# table_name + '_local', str(tF).replace("\'", '"'))
# # print(tF)
# print(local_pattern_query)
# cur.execute(local_pattern_query)
# res = cur.fetchall()
# # print('In get local patterns: ', res)
# for k in range(len(res)):
# l_pat = []
# if str(res[k]) in local_patterns_dict:
# continue
# else:
# local_patterns_dict[str(res[k])] = True
# for i in range(3):
# l_pat.append(res[k][i][1:-1].split(','))
# l_pat[0] = list(map(lambda x: x.replace(' ', ''), l_pat[0]))
# l_pat[2] = list(map(lambda x: x.replace(' ', ''), l_pat[2]))
# for i in range(3, len(res[0])):
# l_pat.append(res[0][i])
# # print(l_pat)
# if l_pat not in local_patterns:
# local_patterns.append(l_pat)
# print('GET ', local_patterns)
return local_patterns
def get_tuples_by_F(local_pattern, f_value, cur, table_name, cat_sim):
def tuple_column_to_str_in_where_clause_2(col_value):
# print(col_value, cat_sim.is_categorical(col_value[0]))
if cat_sim.is_categorical(col_value[0]):
return "like '%" + str(col_value[1]) + "%'"
else:
if is_float(col_value[1]):
return '=' + str(col_value[1])
else:
return "like '%" + str(col_value[1]) + "%'"
F = str(local_pattern[0]).replace("\'", '')[1:-1]
V = str(local_pattern[2]).replace("\'", '')[1:-1]
F_list = F.split(', ')
V_list = V.split(', ')
where_clause = ' AND '.join(list(map(lambda x: "{} {}".format(x[0], x[1]), zip(local_pattern[0], map(tuple_column_to_str_in_where_clause_2, zip(F_list, f_value))))))
tuples_query = '''SELECT {},{},{}({}) FROM {} WHERE {} GROUP BY {}, {};'''.format(
F, V, local_pattern[4], local_pattern[3], table_name, where_clause, F, V
)
# tuples_query = "SELECT * FROM {} WHERE {};".format(table_name, where_clause)
# column_name_query = "SELECT column_name FROM information_schema.columns where table_name='{}';".format(table_name)
# print(column_name_query)
# cur.execute(column_name_query)
# column_name = cur.fetchall()
column_name = F_list + V_list + [local_pattern[4] + '(' + local_pattern[3] + ')']
cur.execute(tuples_query)
# print(tuples_query)
tuples = []
res = cur.fetchall()
min_agg = 1e10
max_agg = -1e10
for row in res:
min_agg = min(min_agg, row[-1])
max_agg = max(max_agg, row[-1])
tuples.append(dict(zip(map(lambda x: x, column_name), row)))
# row_data = {}
# cnt = 0
# for f_attr in F_list:
# if is_float(row[cnt]):
# row_data[f_attr] = float(row[cnt])
# elif is_integer(row[cnt]):
# row_data[f_attr] = float(int(row[cnt]))
# else:
# row_data[f_attr] = t[cnt]
# cnt += 1
# for v_attr in V_list:
# if is_float(row[cnt]):
# row_data[v_attr] = float(row[cnt])
# elif is_integer(row[cnt]):
# row_data[v_attr] = float(int(row[cnt]))
# else:
# row_data[v_attr] = row[cnt]
# cnt += 1
# row_data[pat[4] + '(' + pat[3] + ')'] = row[-1]
# tuples.append(row_data)
return tuples, max_agg - min_agg
def find_explanation_regression_based(user_question_list, global_patterns, cat_sim, num_dis_norm, epsilon, cur, pat_table_name, res_table_name):
"""Find explanations for user questions
Args:
data: data['df'] is the data frame storing Q(R)
data['le'] is the label encoder, data['ohe'] is the one-hot encoder
user_question_list: list of user questions (t, dir), all questions have the same Q(R)
cons: list of fixed attributes and variable attributes of global constraints
cat_sim: the similarity measure for categorical attributes
num_dis_norm: normalization terms for numerical attributes
cons_epsilon: threshold for local regression constraints
agg_col: the column of aggregated value
regression_package: which package is used to compute regression
Returns:
the top-k list of explanations for each user question
"""
answer = [[] for i in range(len(user_question_list))]
local_pattern_loading_time = 0
question_validating_time = 0
score_computing_time = 0
result_merging_time = 0
local_patterns_list = []
for j, uq in enumerate(user_question_list):
dir = uq['dir']
t = uq['target_tuple']
print(uq)
start = time.clock()
local_patterns = get_local_patterns(global_patterns, t, cur, res_table_name)
end = time.clock()
local_pattern_loading_time += end - start
candidate_list = [[] for i in range(len(local_patterns))]
top_k_lists = [[] for i in range(len(local_patterns))]
validate_res_list = []
local_patterns_list.append(local_patterns)
psi = []
explanation_type = 0
for i in range(0, len(local_patterns)):
psi.append(0)
start = time.clock()
# print('PAT', i, local_patterns[i])
if isinstance(local_patterns[i], list):
agg_col = local_patterns[i][4] + '(' + local_patterns[i][3] + ')'
else:
agg_col = 'count(*)'
validate_res, predicted_aggr = validate_local_regression_pattern(local_patterns[i], epsilon,
t, dir, agg_col,
cur, res_table_name
)
validate_res_list.append(validate_res)
end = time.clock()
question_validating_time += end - start
if validate_res < -1:
print ("The local regression constraint derived from the " + str(i+1) +
"th constraint and the target tuple does not hold")
explanation_type = 1
top_k_lists[i] = [1, local_patterns[i]]
elif validate_res == -1:
print ("The user question is invalid according to the local regression constraint derived from the "
+ str(i+1) + "th constraint and the target tuple -- in the opposite direction")
explanation_type = 2
top_k_lists[i] = [-2, local_patterns[i], t]
elif validate_res == 0:
print ("The user question is invalid according to the local regression constraint derived from the "
+ str(i+1) + "th constraint and the target tuple -- "
"the target tuple can be predicted by the constraint")
explanation_type = 2
top_k_lists[i] = [2, local_patterns[i], t]
else:
print ("The user question is valid according to the local regression constraint derived from the "
+ str(i+1) + "th constraint and the target tuple")
explanation_type = 3
#deviation = dir * (t[agg_col] - predicted_aggr) / predicted_aggr
cnt = 0
#for index, row in data['df'].iterrows():
start = time.clock()
f_value = get_F_value(local_patterns[i][0], t)
### replace with db query
tuples_same_F_value, agg_range = get_tuples_by_F(local_patterns[i], f_value, cur, res_table_name, cat_sim)
max_pred_aggr = -1e10
min_pred_aggr = 1e10
for row in tuples_same_F_value:
# print(row)
psi[-1] += row[agg_col]
t_sim = tuple_similarity(t, row, local_patterns[i][2], cat_sim, num_dis_norm, agg_col)
# print local_cons[i].var_attr, row
test_tuple = dict(zip(local_patterns[i][2],
map(lambda x:x, get_V_value(local_patterns[i][2], row))))
predicted_aggr = []
predicted_aggr = predict(local_patterns[i], test_tuple)
if abs(predicted_aggr) < 1e-8:
# print(local_patterns[i], test_tuple)
deviation = 0
else:
deviation = (row[agg_col] - predicted_aggr) / predicted_aggr
candidate_list[i].append([deviation, t_sim, row, predicted_aggr])
print(row, predicted_aggr, deviation, t_sim)
max_pred_aggr = max(max_pred_aggr, predicted_aggr)
min_pred_aggr = min(min_pred_aggr, predicted_aggr)
cnt += 1
# print(len(candidate_list[i]), cnt)
# print('In scoring: ', local_patterns[i])
pred_aggr_range = max_pred_aggr - min_pred_aggr
print(max_pred_aggr, min_pred_aggr)
for k in range(cnt):
deviation = candidate_list[i][k][0]
t_sim = candidate_list[i][k][1]
predicted_aggr = candidate_list[i][k][3]
# denominator = agg_range * agg_range
if pred_aggr_range < 1e-8:
denominator = 1
else:
denominator = pred_aggr_range * pred_aggr_range
score = math.sqrt(t_sim * t_sim + deviation * deviation / denominator)
# score = abs(deviation)
# print(deviation, t_sim, score, candidate_list[i][k][2])
if deviation == 0:
top_k_lists[i].append((-1e10, i, candidate_list[i][k][2]))
else:
top_k_lists[i].append((-dir * deviation / abs(deviation) * score, i, candidate_list[i][k][2]))
end = time.clock()
score_computing_time += end - start
# uses heapq to manipulate merge of explanations from multiple constraints
# start = time.clock()
# merge_top_k_list = []
# marked = {}
# for i in range(len(cons)):
# heapify(top_k_lists[i])
# heappush(merge_top_k_list, heappop(top_k_lists[i]))
# answer[j] = [{} for i in range(TOP_K)]
# cnt = 0
# while cnt < TOP_K:
# poped_tuple = heappop(merge_top_k_list)
# if poped_tuple[2] in marked:
# continue
# marked[poped_tuple[2]] = True
# answer[j][cnt] = (-poped_tuple[0], poped_tuple[1], poped_tuple[2])
# heappush(merge_top_k_list, heappop(top_k_lists[poped_tuple[1]]))
# cnt += 1
# end = time.clock()
# result_merging_time += end - start
for i in range(len(local_patterns)):
# print("TOP K: ", top_k_lists[i])
if len(top_k_lists[i]) > 3:
answer[j].append(sorted(top_k_lists[i], key=lambda x: x[0], reverse=True)[0:TOP_K])
else:
answer[j].append(top_k_lists[i])
# print(len(answer[j][-1]))
#answer[j] = top_k_lists
print('Local pattern loading time: ' + str(local_pattern_loading_time) + 'seconds')
print('Question validating time: ' + str(question_validating_time) + 'seconds')
print('Score computing time: ' + str(score_computing_time) + 'seconds')
#print('Result merging time: ' + str(result_merging_time) + 'seconds')
return answer, local_patterns_list
def find_user_question(cur, global_patterns, table_name, max_uq_num, cat_sim):
'''
load user questions
'''
uq = []
for pat in global_patterns:
# print(pat)
F = str(pat[0]).replace("\'", '')[1:-1]
V = str(pat[1]).replace("\'", '')[1:-1]
agg_query = '''SELECT {},{},{}({}) FROM {} GROUP BY {}, {};'''.format(
F, V, pat[3], pat[2], table_name, F, V
)
cur.execute(agg_query)
res = cur.fetchall()
uq_cnt_cur_pat = 0
for t in res:
row_data = {}
F_list = F.split(', ')
V_list = V.split(', ')
if 'month' not in V_list:
break
cnt = 0
for f_attr in F_list:
if cat_sim.is_categorical(f_attr):
row_data[f_attr] = t[cnt]
else:
if is_float(t[cnt]):
row_data[f_attr] = float(t[cnt])
elif is_integer(t[cnt]):
row_data[f_attr] = float(int(t[cnt]))
else:
row_data[f_attr] = t[cnt]
cnt += 1
for v_attr in V_list:
if cat_sim.is_categorical(v_attr):
row_data[v_attr] = t[cnt]
else:
if is_float(t[cnt]):
row_data[v_attr] = float(t[cnt])
elif is_integer(t[cnt]):
row_data[v_attr] = float(int(t[cnt]))
else:
row_data[v_attr] = t[cnt]
cnt += 1
row_data[pat[3] + '(' + pat[2] + ')'] = t[-1]
l_pat = get_local_pattern(pat, row_data, cur, table_name)
if isinstance(l_pat, dict):
continue
validate_res, predicted_aggr = validate_local_regression_pattern(
l_pat, DEFAULT_EPSILON, row_data, 1, pat[3] + '(' + pat[2] + ')',
cur, table_name
)
if validate_res != 0:
uq.append({'target_tuple': row_data, 'dir': validate_res})
uq_cnt_cur_pat += 1
if uq_cnt_cur_pat > 0:
break;
if len(uq) >= max_uq_num:
break
print(uq)
return uq[:max_uq_num]
def load_patterns(cur, pat_table_name):
'''
load pre-defined constraints(currently only fixed attributes and variable attributes)
'''
global_pattern_table = pat_table_name + '_global'
load_query = "SELECT * FROM {};".format(global_pattern_table)
cur.execute(load_query)
res = cur.fetchall()
print(res)
patterns = []
for pat in res:
if pat[0].find('community_area') != -1 and pat[0].find('district') != -1:
continue
if pat[1].find('community_area') != -1 and pat[1].find('district') != -1:
continue
if pat[0].find('community_area') != -1 and pat[1].find('district') != -1:
continue
if pat[1].find('community_area') != -1 and pat[0].find('district') != -1:
continue
# if pat[1].find('community_area') == -1 and pat[1].find('district') == -1:
# continue
# if pat[0].find('district') != -1 and pat[1].find('district') == -1:
# continue
patterns.append(list(pat))
patterns[-1][0] = patterns[-1][0][1:-1].replace(' ', '').split(',')
patterns[-1][1] = patterns[-1][1][1:-1].replace(' ', '').split(',')
return patterns
def output_explanation(outputfile, Q, explanations_list, local_patterns_list, global_patterns):
ofile = sys.stdout
if outputfile != '':
ofile = open(outputfile, 'w')
for i, top_k_list in enumerate(explanations_list):
ofile.write('User question {}: {}\n'.format(str(i+1), str(Q[i])))
print(len(top_k_list))
for k, list_by_pat in enumerate(top_k_list):
if list_by_pat[0] != 1:
if k < len(global_patterns):
ofile.write('Globally held pattern: {}\n'.format(str(local_patterns_list[i][k])))
else:
ofile.write('Locally held pattern: {}\n'.format(str(local_patterns_list[i][k])))
print(len(list_by_pat))
if isinstance(list_by_pat[0], int):
if list_by_pat[0] == 1:
ofile.write("The local regression constraint derived from the " + str(i+1) +
"th constraint and the target tuple does not hold\n")
elif list_by_pat[0] == -2:
ofile.write("The user question is invalid according to the local regression constraint derived from the "
+ str(i+1) + "th constraint and the target tuple -- in the opposite direction\n")
elif list_by_pat[0] == 2:
ofile.write("The user question is invalid according to the local regression constraint derived from the "
+ str(i+1) + "th constraint and the target tuple -- the target tuple can be predicted by the constraint\n")
else:
if len(list_by_pat) > 2:
for j in range(TOP_K):
e = list_by_pat[j]
if e[0] < 0:
break
ofile.write('------------------------\n')
print_str = ''
e_tuple = list(e[2].values())
# e_tuple_str = ','.join(e_tuple.to_string(header=False,index=False,index_names=False).split(' ')[1:])
e_tuple_str = ','.join(map(str, e_tuple))
ofile.write('Top ' + str(j+1) + ' explanation:\n')
# ofile.write('Constraint ' + str(e[1]+1) + ': [' + ','.join(global_patterns[e[1]][0]) + ']' + '[' + ','.join(global_patterns[e[1]][1]) + ']')
ofile.write('Constraint ' + str(e[1]+1) + ': [' + ','.join(local_patterns_list[i][e[1]][0]) + ']' +
'[' + ','.join(local_patterns_list[i][e[1]][1]) + ']' +
'[' + ','.join(local_patterns_list[i][e[1]][2]) + ']')
ofile.write('\n')
ofile.write('Score: ' + str(e[0]))
ofile.write('\n')
ofile.write('(' + e_tuple_str + ')')
ofile.write('\n')
else:
ofile.write('------------------------\n')
ofile.write('Explanation:\n')
ofile.write(str(list_by_pat) + '\n')
ofile.write('------------------------\n\n\n------------------------\n')
ofile.close()
def main(argv=[]):
query_result_table = DEFAULT_QUERY_RESULT_TABLE
pattern_table = DEFAULT_PATTERN_TABLE
user_question_number = DEFAULT_USER_QUESTION_NUMBER
outputfile = ''
epsilon = DEFAULT_EPSILON
aggregate_column = DEFAULT_AGGREGATE_COLUMN
try:
# conn = psycopg2.connect("host=216.47.152.61 port=5432 dbname=postgres user=antiprov password=test")
conn = psycopg2.connect("host=localhost port=5432 dbname=antiprov user=zjmiao password=keertijeff")
cur = conn.cursor()
except psycopg2.OperationalError:
print('数据库连接失败!')
try:
opts, args = getopt.getopt(argv,"hq:p:un:o:e:a",["qtable=", "ptable=", "uqnum=","ofile=","epsilon=","aggregate_column="])
except getopt.GetoptError:
print('explanation.py -q <query_result_table> -p <pattern_table> -un <user_question_number> -o <outputfile> -e <epsilon>')
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
print('explanation.py -q <query_result_table> -p <pattern_table> -un <user_question_number> -o <outputfile> -e <epsilon>')
sys.exit(2)
elif opt in ("-q", "--qtable"):
query_result_table = arg
elif opt in ("-p", "--ptable"):
pattern_table = arg
elif opt in ("-un", "--uqnum"):
user_question_number = int(arg)
elif opt in ("-o", "--ofile"):
outputfile = arg
elif opt in ("-e", "--epsilon"):
epsilon = float(arg)
start = time.clock()
category_similarity = CategorySimilarityNaive(cur, pattern_table,
[('block', 'block_location'),
('community_area', 'community_area_location'),
('district', 'district_location')]
)
global_patterns = load_patterns(cur, pattern_table)
print(global_patterns)
Q = find_user_question(cur, global_patterns, pattern_table, user_question_number, category_similarity)
# category_similarity = CategorySimilarityMatrix(EXAMPLE_SIMILARITY_MATRIX_PATH)
#category_similarity = CategoryNetworkEmbedding(EXAMPLE_NETWORK_EMBEDDING_PATH, data['df'])
#num_dis_norm = normalize_numerical_distance(data['df'])
num_dis_norm = normalize_numerical_distance(cur=cur, table_name=query_result_table)
end = time.clock()
print('Loading time: ' + str(end-start) + 'seconds')
start = time.clock()
#regression_package = 'scikit-learn'
regression_package = 'statsmodels'
explanations_list, local_patterns_list = find_explanation_regression_based(Q, global_patterns, category_similarity,
num_dis_norm, epsilon,
cur, pattern_table, query_result_table)
end = time.clock()
print('Total querying time: ' + str(end-start) + 'seconds')
# for i, explanations in enumerate(explanations_list):
# ofile.write('User question ' + str(i+1) + ':\n')
# for j, e in enumerate(explanations):
# print_str = ''
# e_tuple = data['df'].loc[data['df']['index'] == e[2]]
# e_tuple_str = ','.join(e_tuple.to_string(header=False,index=False,index_names=False).split(' ')[1:])
# ofile.write('Top ' + str(j+1) + ' explanation:\n')
# ofile.write('Constraint ' + str(e[1]+1) + ': [' + ','.join(constraints[e[1]][0]) + ']' + '[' + ','.join(constraints[e[1]][1]) + ']')
# ofile.write('\n')
# ofile.write('Score: ' + str(e[0]))
# ofile.write('\n')
# ofile.write('(' + e_tuple_str + ')')
# ofile.write('\n')
# ofile.write('------------------------\n')
output_explanation(outputfile, Q, explanations_list, local_patterns_list, global_patterns)
print(Q)
if __name__ == "__main__":
main(sys.argv[1:])
| UTF-8 | Python | false | false | 33,191 | py | 45 | explanation_crime.py | 42 | 0.513971 | 0.502517 | 0 | 764 | 42.420157 | 176 |
briantsaunders/flask-vrfs-api | 2,860,448,243,067 | 2f788672720357c1ab56dc141d4f73854017ae44 | c1ef1a986f3f6c60c0605648c0f01f54e6755a85 | /app/schemas/response/vrf.py | df66155f28fcbffeb9ab8e8533d5eeb3a83c5bfa | [
"MIT"
]
| permissive | https://github.com/briantsaunders/flask-vrfs-api | 5f9a8c590512e1aacc6103a06292fe591d0b1c61 | 1378d0c6985af91f2cc89c86f5466381b7b19a86 | refs/heads/master | 2020-07-10T07:09:54.477055 | 2019-08-31T02:22:55 | 2019-08-31T02:22:55 | 204,201,073 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # import third party libs
from marshmallow import Schema, fields
class VrfResponseSchema(Schema):
name = fields.String()
table = fields.Integer()
vrf_id = fields.Integer()
| UTF-8 | Python | false | false | 186 | py | 20 | vrf.py | 14 | 0.715054 | 0.715054 | 0 | 8 | 22.25 | 38 |
HX001/ECE444-project-backend | 8,572,754,736,464 | 2fee171b45ffe9145d5363154a23485e7ea636d5 | 66cd4a0be0bbe68b48b3c7aad62857268da11cb0 | /Blogs/blog_model.py | 70076ee245500b9777ffa3e0fd974fdc514c1470 | []
| no_license | https://github.com/HX001/ECE444-project-backend | 8d3d51c3309cd5b37a2be79635f6697177db415f | d6e6334f8d98df5f8423e6a80880848b605c5593 | refs/heads/master | 2023-01-13T02:08:38.401533 | 2020-11-19T00:19:22 | 2020-11-19T00:19:22 | 314,039,564 | 0 | 0 | null | false | 2020-11-18T20:44:09 | 2020-11-18T19:40:21 | 2020-11-18T20:27:15 | 2020-11-18T20:44:08 | 18 | 0 | 0 | 0 | Python | false | false | from init import db
from flask import Flask, request, jsonify
class Blogs(db.Document):
username = db.StringField(required=True)
title = db.StringField(required=True)
rating = db.IntField()
content = db.StringField(required=True)
imageURL = db.StringField()
def to_json(self):
return {
"id": str(self.pk),
"username": self.username,
"title": self.title,
"rating": self.rating,
"content": self.content,
"imageURL": self.imageURL,
"status": 'success'
}
| UTF-8 | Python | false | false | 579 | py | 13 | blog_model.py | 12 | 0.57513 | 0.57513 | 0 | 21 | 26.52381 | 44 |
freeipa/freeipa-demo | 10,256,381,936,468 | 0acefa398c939a27f4e267eff3950c0ffa898dde | f3193ed65ab8f243e3b4d40473b69cd2d808cc6a | /freeipa_org_demo_reset.py | cba1406f906a69c2616acea728084eafffe8e275 | []
| no_license | https://github.com/freeipa/freeipa-demo | 0bc5ad5683580ef58189c1d9fa97094b3bcfc31e | c158c9293fcec6b976a314c434d3d61330bc6cde | refs/heads/master | 2023-04-30T21:25:50.395668 | 2021-03-25T13:17:34 | 2021-05-19T07:50:05 | 115,020,722 | 1 | 2 | null | false | 2021-05-19T07:50:06 | 2017-12-21T15:34:22 | 2019-09-03T11:01:27 | 2021-05-19T07:50:06 | 15 | 0 | 2 | 0 | Python | false | false | #!/usr/bin/env python3
import click
from freeipa_org_demo.reset import reset as demo_reset
from freeipa_org_demo.config import ec2_configuration
@click.command()
@click.option('--debug', is_flag=True,
help='Print debugging information')
@click.option("--unattended", is_flag=True,
help="Run unattended")
@click.option("--maint-mode", is_flag=True,
help="Run in maintenance mode (disables demo reset)")
@click.option("--no-rebuild", 'rebuild', flag_value=False, default=True,
help="Do not rebuild the demo instance")
@click.option("--no-eip", 'eip', flag_value=False, default=True,
help="Do not update EIP of the demo instance")
@click.option("--instance-type", type=str, default=ec2_configuration['instance_type'],
help="Instance type (defaults to {})".format(ec2_configuration['instance_type']))
def cli(debug, unattended, maint_mode, rebuild, eip, instance_type):
demo_reset(debug, unattended, maint_mode, rebuild, eip, instance_type)
if __name__ == '__main__':
cli()
# Triggered by Amazon AWS
def handler(event, context):
print("Run FreeIPA Demo Lambda", event, context)
demo_reset(debug=True,
unattended=True,
maint_mode=False,
rebuild=True,
eip=True,
instance_type=ec2_configuration['instance_type'])
print("FreeIPA Demo Lambda finished")
return {'message': "FreeIPA Demo sucessfully (re)started"}
| UTF-8 | Python | false | false | 1,490 | py | 15 | freeipa_org_demo_reset.py | 12 | 0.649664 | 0.646309 | 0 | 36 | 40.388889 | 95 |
tgrx/obliviscor | 14,937,896,291,241 | 6ab83aa769a444e8c17a2cbcce745c432e816bb9 | 834d4fa3df2beb6146d137b456b67edf703fbbdb | /src/applications/reminders/migrations/0004_auto_20200426_1624.py | e2b525c413b81c8f21a5e187b3001c8168bcdd8b | [
"Apache-2.0"
]
| permissive | https://github.com/tgrx/obliviscor | 7e8e2cf9f0a60017809ab3a3299a2bd83210883d | 31f9a4476892460c931b9a8fc5403c3afcc47607 | refs/heads/master | 2022-05-24T08:01:06.371545 | 2020-04-27T14:01:40 | 2020-04-27T14:01:40 | 256,064,646 | 0 | 0 | Apache-2.0 | false | 2020-04-27T14:01:41 | 2020-04-15T23:59:59 | 2020-04-26T23:50:31 | 2020-04-27T14:01:41 | 114 | 0 | 0 | 4 | Python | false | false | # Generated by Django 3.0.5 on 2020-04-26 13:24
from django.conf import settings
from django.db import migrations
from django.db import models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
("reminders", "0003_auto_20200422_0431"),
]
operations = [
migrations.AlterField(
model_name="reminder",
name="notify_at",
field=models.DateTimeField(blank=True, db_index=True, null=True),
),
migrations.AlterField(
model_name="reminder",
name="participants",
field=models.ManyToManyField(
blank=True,
db_index=True,
related_name="participated_reminders",
to=settings.AUTH_USER_MODEL,
),
),
migrations.AlterField(
model_name="reminder",
name="status",
field=models.CharField(
choices=[
("CREATED", "Created"),
("DONE", "Done"),
("ENQUEUED", "Enqueued"),
("NOTIFIED", "Notified"),
],
db_index=True,
default="CREATED",
max_length=255,
),
),
]
| UTF-8 | Python | false | false | 1,338 | py | 127 | 0004_auto_20200426_1624.py | 100 | 0.499253 | 0.473842 | 0 | 45 | 28.733333 | 77 |
mustafaemreozmen/google-image-downloader | 2,602,750,220,972 | 6e3228199bd0bd64d810b474eda75ca3eceaec95 | eb1caca0e19691688834d31e68667531ef80f32b | /src/argParsing.py | e17613149d366ad46bf99b7cf61c9ca9e4a3acf1 | [
"MIT"
]
| permissive | https://github.com/mustafaemreozmen/google-image-downloader | 41f5eaa77d9f14d99f1c6e224d14f26f72819637 | 7528ba28958e6d2daa49360cf80c09b6b23b5069 | refs/heads/master | 2022-11-30T04:27:10.556829 | 2020-08-10T23:01:48 | 2020-08-10T23:01:48 | 286,595,747 | 2 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import argparse
def argParser():
parser = argparse.ArgumentParser(
prog='Photo Downloader (from Google)',
epilog='Developed by Mustafa Emre Ozmen with <3. Feels free use and develop.'
)
parser.add_argument('--keyword',
help='Keyword for download images.',
required=True,
type=str)
parser.add_argument('--scrollQuantity',
help='Scroll quantity for Google Image page. Default Scroll Quantity: 5',
default=5,
type=int)
parser.add_argument('--quantity',
help='Quantity for downloading. Default Quantity: 10',
default=10,
type=int)
parser.add_argument('--width',
help='Width for downloading images. Default Width: 800',
default=800,
type=int)
parser.add_argument('--height',
help='Height for downloading images. Default Height: 480',
default=480,
type=int)
passedArgs = parser.parse_args()
return passedArgs | UTF-8 | Python | false | false | 1,248 | py | 5 | argParsing.py | 3 | 0.490385 | 0.47516 | 0 | 35 | 34.685714 | 97 |
italogsfernandes/telemedicine | 5,394,478,924,649 | 611ab657b4ba282186443fd14a08532196e45c46 | 11bc884ed5f5322ae9cd043c4b3b64eb63d37295 | /html/telespecialista.py | caf6fae519e99a7c1281e7ee13be4f74b04f7a67 | [
"MIT"
]
| permissive | https://github.com/italogsfernandes/telemedicine | 0650645e97419e0adbe529d57e0c7a1598f85ad1 | 3411c39f9366e2fb9aa92ac413183796d0fb813e | refs/heads/master | 2022-07-17T11:53:14.319347 | 2020-05-09T14:56:11 | 2020-05-09T14:56:11 | 262,388,986 | 0 | 0 | MIT | true | 2020-05-08T17:41:20 | 2020-05-08T17:41:20 | 2020-04-01T21:33:10 | 2018-12-25T09:29:28 | 4,375 | 0 | 0 | 0 | null | false | false | # -*- coding: utf-8 -*-
from flask import Flask, render_template, redirect, url_for, flash
from forms import DoctorRegistrationForm, LoginForm
from flask_sqlalchemy import SQLAlchemy
app = Flask(__name__)
# OVERIDE THIS KEY TO USE ON YOUR SERVER
app.config['SECRET_KEY'] = '846d33d018c947de7832c0993498b2a1'
# CONFIGURANDO A LOCALIZAÇÃO DA DATABASE
app.config['SQLACHEMY_DATABASE_URI'] = 'sqlite:///site.db'
db = SQLAlchemy(app)
@app.route("/")
@app.route("/index")
def index():
return render_template("index.html")
@app.route("/testing")
def testing():
return "Testando ambiente"
@app.route("/home")
def home():
return render_template("home.html", title="Início")
@app.route("/account")
def account():
return render_template("account.html", title="Atualizar infos")
@app.route("/search_specialists")
def search_specialists():
return render_template("search_specialist.html", title="Buscar", specialties=specialties_dict.values(), image_file="static/profilePics/default.jpeg")
@app.route("/check_requests")
def check_requests():
return render_template("check_requests.html", title="Minhas solicitações")
@app.route("/register", methods=["POST", "GET"])
def register():
form = DoctorRegistrationForm();
if form.validate_on_submit():
flash(f'Sucesso no login para o usuário {form.email.data}!', category='success')
return redirect(url_for('register'))
# PRECISA ALTERAR O register.html PRA RECEBER O form. DELETE ISTO QUANDO ALTERAR
return render_template("register.html", title="Cadastrar colaborador", form=form)
@app.route("/login", methods=["POST", "GET"])
def login():
form = LoginForm()
if form.validate_on_submit():
flash(f'Sucesso no login para o usuário {form.email.data}!', category='success')
return redirect(url_for('home'))
return render_template("login.html", title="Entar", form=form)
@app.route("/logout")
def logout():
return redirect(url_for("index"))
@app.route("/show_schedule")
def show_schedule():
return render_template("show_schedule.html", title="Minha agenda")
if __name__ == '__main__':
app.run(debug=True)
| UTF-8 | Python | false | false | 2,158 | py | 40 | telespecialista.py | 12 | 0.695026 | 0.683403 | 0 | 75 | 27.68 | 153 |
cinequant/pinalyzer | 10,969,346,513,151 | f426d4121cc6f3aa0b62c7e5284a367b34350316 | 1f60cf5b9b3bff326ec2a7cd7bde126558f93e2d | /Pinalyzer/map/user.py | 67a0eadc1da371d027654219bfed146ad366b3f1 | []
| no_license | https://github.com/cinequant/pinalyzer | 08e3175a1f24427c190ce067ac6498a7da7c1886 | 6c320b4ad8880d5bbd87943f6f282a1b3070fb70 | refs/heads/master | 2021-01-23T18:49:03.258705 | 2012-08-31T16:10:32 | 2012-08-31T16:10:32 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # -*- coding: utf-8 -*-
from django.db import IntegrityError
from django.utils.simplejson import loads, dumps
from models import UserModel, LocationModel, UserStatModel
from userheader import UserHeader
from pinlistpage import PinListPage
import urllib3
import re
import string, random
import datetime
import fun
from threading import Thread, Lock
http = urllib3.PoolManager()
# Calculate from an adress, the corresponding geographic coordinates ( a (lat,lng) couple ), using geocoding service ( google map api).
def addressToLatLng(address):
"""
@param address: 'address' selon googlemap (ce qui peut être une addresse, une ville un pays)
@return (latitude,longitude)
"""
adr=string.replace(address, ' ', '+')
r=http.request('GET','https://maps.googleapis.com/maps/api/geocode/json?address='+adr+'&sensor=false')
json_output=r.data
output=loads(json_output)
if output['status'] != "OK":
raise Exception('status ='+output['status'])
return (output['results'][0]['geometry']['location']['lat'],output['results'][0]['geometry']['location']['lng'])
class User(object):
"""
Classe pour stocker les informations sur un utilisateur de Pinterest sans les mettre en bdd
"""
@staticmethod
def getUserIdList(nb_page=10):
"""
Rechecher des utilisateurs dans popular
@return: Liste d'user id (pinterest name)
"""
res=[]
for p in range(1,nb_page+1):
r=http.request('GET','http://pinterest.com/popular/?lazy=1&page='+str(p))
l=[match.group('id') for match in re.finditer(User.re_name,r.data) if match.group('id')[:3] !='all' ]
res.extend(list(set(l)))
return res
@staticmethod
def fetchPopularUsers(nb_page=10):
"""
Ajouter des utilisateurs de popular en base de données
"""
user_id_list=User.getUserIdList(nb_page)
total=0
not_fetched=0
for user_id in user_id_list:
total+=1
u=User(user_id)
try:
u.fetchUser()
u.fetchScoring()
u.saveDB()
except Exception:
not_fetched+=1
return not_fetched,total
@staticmethod
def fetchLatestStats():
"""
Mettre à jour les stats des utilisateurs déjà en bdd
"""
total = 0
not_fetched = 0
for user in UserModel.objects.all():
try:
d = datetime.datetime.now()
stat = user.userstatmodel_set.get(date__year=d.year, date__month=d.month, date__day=d.day)
except UserStatModel.DoesNotExist:
total += 1
u = User(user.user_id)
try:
u.fetchUser()
u.fetchScoring()
u.saveDB()
except Exception:
not_fetched += 1
return not_fetched,total
@staticmethod
def modelToUser(u_model):
"""
Créer un objet user avec les données d'un utilisateur en bdd
@param: Objet models.UserModel (objet de l'ORM django)
@return: Objet User
"""
u=User(u_model.user_id)
u.name=u_model.name
u.photo_url=u_model.photo_url
u.location=u_model.location
return u
def __init__(self,user_id,location=None, name=None, photo_url=None,nb_followers=None,nb_following=None):
self.id=user_id # User id in pinterest
self.name=name # User name
self.photo_url=photo_url
self.followers=None
self.following=None
self.nb_followers=nb_followers
self.nb_following=nb_following
self.nb_pin=None
self.nb_repin=None
self.location=location #User location , ex: "Paris,France"
self.lat=None # Latitude
self.lng=None # Longitude
def __str__(self):
return '{self.id},{self.name}'.format(self=self)
def __eq__(self,o):
return self.id==o.id
def __hash__(self):
return self.id
def url(self):
return 'http://www.pinterest.com/{0}'.format(self.id)
def fetchUser(self,header_info=None):
"""
Récupérer informations sur un utilisateur.
Résultats stockés dans self.xxxx
"""
if header_info==None:
header_info=UserHeader(self.id)
header_info.fetch()
self.name=header_info.name
self.location=header_info.location
self.photo_url=header_info.photo_url
self.nb_board=header_info.nb_board
self.nb_pin=header_info.nb_pin
self.nb_like=header_info.nb_like
self.nb_followers=header_info.nb_followers
self.nb_following=header_info.nb_following
def fetchScoring(self):
"""
Récupérer informations liés au scoring d'un utilisateur
"""
if not self.nb_pin:
self.fetchUser()
pins_info=PinListPage('{0}/pins/?page='.format(self.url()), self.id)
nb_pages=self.nb_pin/50 + (self.nb_pin%50 !=0)
pins_info.fetch(nb_pages)
pins_info.calcScoringInfo(self.nb_pin)
self.nb_liked=pins_info.nb_liked
self.nb_comment=pins_info.nb_comment
self.nb_repin=pins_info.nb_repin
def fetchFollowers(self, nb_page):
"""
Récupérer el followers d'un utilisateur
Résultat stockés dans self.followers
"""
from followpage import FollowPage
f_page=FollowPage(self.id,'followers')
f_page.fetch(nb_page)
self.fetchUser(f_page.user_info)
self.saveDB()
self.followers=f_page.follow_list
def fetchFollowing(self, nb_page):
"""
Récupérer le following d'un utilisateur
Résultat stockés dans self.following
"""
from followpage import FollowPage
f_page=FollowPage(self.id,'following')
f_page.fetch(nb_page)
self.fetchUser(f_page.user_info)
self.saveDB()
self.following=f_page.follow_list
def getBestFollowers(self,nb, key=lambda u: u.nb_followers):
"""
Retourner followers les plus populaires
"""
if self.followers==None:
self.fetchFollowers(1)
self.followers.sort(key=key)
nb=min(nb,len(self.followers))
return self.followers[-nb:]
def getBestFollowing(self,nb, key=lambda u: u.nb_followers):
"""
Retourner following les plus populaires
"""
if self.following==None:
self.fetchFollowing(1)
self.following.sort(key=key)
nb=min(nb,len(self.following))
return self.following[-nb:]
def getAlikes(self):
"""
Retourne les personnes suivants les mêmes personnes que l'utilisateur
dans l'ordre du moins semblable (ayant le moins de following en commun) au plus semblable(ayant le plsu de following en commun)
"""
best_following=self.getBestFollowing(10)
if best_following:
best_following=random.sample(best_following,1)
alikes=[]
thread_list=[]
lock=Lock()
for u in best_following:
def task(user):
user.fetchFollowers(1)
best_followers=user.followers
lock.acquire()
alikes.extend(best_followers)
lock.release()
t=Thread(target=task,args=(u,))
thread_list.append(t)
for t in thread_list:
t.start()
for t in thread_list:
t.join()
alikes=fun.sort_by_freq(alikes)
alikes=filter(lambda x:x.id!=self.id, alikes)
return alikes
else:
return []
def getToFollow(self,alikes=None):
"""
Retourne les personnes à suivre
"""
if not alikes:
alikes=self.getAlikes()
if alikes:
alike=alikes[-1]
to_follow=[]
cpt=0
i=0
l=alike.getBestFollowing(20)
while cpt<len(l) and i<len(l):
if l[i] not in self.following and l[i] not in to_follow:
to_follow.append(l[i])
cpt+=1
i+=1
return to_follow
else:
return []
def getToRepin(self,nb=10):
"""
Retourne la liste des pins suggérer
"""
nb_user=2
to_follow=self.getToFollow()[-10:]
if len(to_follow)>nb_user:
to_follow=random.sample(to_follow,nb_user)
pin_list=[]
thread_list=[]
lock=Lock()
for u in to_follow:
print u
def task(url,user_id):
pin_page=PinListPage('{0}/pins/?page='.format(url), user_id)
pin_page.fetch(1)
lock.acquire()
pin_list.extend(pin_page.pin_list)
lock.release()
t=Thread(target=task,args=(u.url(),u.id,))
thread_list.append(t)
for t in thread_list:
t.start()
for t in thread_list:
t.join()
pin_list.sort(key=lambda pin:-pin.nb_repin)
pin_list=pin_list[:(nb*3)]
return random.sample(pin_list, nb*(nb<len(pin_list)))
def saveDB(self):
"""
Sauver les infos de l'utilisateurs en bdd
"""
try:
u=UserModel.objects.get(user_id=self.id)
u.name=self.name
u.location=self.location
u.photo_url=self.photo_url
u.save()
except UserModel.DoesNotExist:
u=UserModel.objects.create(user_id=self.id,
name=self.name,
location=self.location,
photo_url=self.photo_url)
d=datetime.datetime.now()
if self.nb_repin !=None:
try:
u.userstatmodel_set.get(date__year=d.year, date__month=d.month, date__day=d.day)
except UserStatModel.DoesNotExist:
u.userstatmodel_set.create(date= d,
nb_board=self.nb_board,
nb_pin=self.nb_pin,
nb_like=self.nb_like,
nb_followers=self.nb_followers,
nb_following=self.nb_following,
nb_comment=self.nb_comment,
nb_repin=self.nb_repin,
nb_liked=self.nb_liked
)
return u
def calcLatLng(self):
"""
@return: (latitude,longitude) de l'utilisateur
"""
# Get the user or create a new one.
if self.location !=None:
try:
u_model = UserModel.objects.get(pk=self.id)
except UserModel.DoesNotExist:
u_model = UserModel.objects.create(pk=self.id, address=self.location)
u_model.address = self.location # User address update on the db
u_model.save()
# Get the (lat,lng) or use geocoding
try:
loc_model=LocationModel.objects.get(pk=u_model.address)
self.lat,self.lng=(loc_model.lat,loc_model.lng)
except LocationModel.DoesNotExist:
try:
self.lat, self.lng =addressToLatLng(self.location) # Geocoding
LocationModel.objects.create(address=self.location, lat=self.lat, lng=self.lng) # Mise à jour de la base
except Exception as e:
print e.args
def getFollowGroups(self,limit=1):
f_list=[]
group_list=[]
for f in self.fetchFollowers(1):
if f.lat !=None and f.lng != None:
f_list.append([f,True])
for f in self.fetchFollowing(1):
if f.lat !=None and f.lng != None:
f_list.append([f,False])
if f_list !=[]:
f_list.sort(key=lambda x:(x[0].lat,x[0].lng))
prec_lat=f_list[0][0].lat
prec_lng=f_list[0][0].lng
group_list.append([])
j=0
group_list[0].append(f_list[0])
for i in range(1,len(f_list)):
if (prec_lat !=f_list[i][0].lat) or (prec_lng != f_list[i][0].lng):
j+=1
group_list.append([])
group_list[j].append(f_list[i])
prec_lat=f_list[i][0].lat
prec_lng=f_list[i][0].lng
return group_list
def getFollowersJSON(self):
return dumps(self.followers)
def getFollowingJSON(self):
return dumps(self.following)
## test ##
if __name__=='__main__':
User.fetchLatestStat()
| UTF-8 | Python | false | false | 13,474 | py | 17 | user.py | 7 | 0.519631 | 0.514649 | 0 | 385 | 33.927273 | 135 |
Aviah/one-click-django-server | 16,071,767,640,559 | d37f2d5213dc80443fe0866105ef35383adc70ce | 0a855433f5a1c78fc7fcd58867c2e79f04cdb666 | /site_repo/settings_production.py | 0797a21d2c2e339c54005ae61cf54319933bcc81 | [
"MIT"
]
| permissive | https://github.com/Aviah/one-click-django-server | 0afc2a6dc04369294d9672b2644f777ccb50543a | ddce7181f025b7f8d0979d725f85f8124add6adf | refs/heads/master | 2020-12-24T19:27:36.914097 | 2016-03-18T18:36:33 | 2016-03-18T18:36:33 | 53,527,710 | 10 | 4 | null | null | null | null | null | null | null | null | null | null | null | null | null | #Production Settings
DEBUG = False
ALLOWED_HOSTS = ['.example.com']
DEBUG_LOG = False
DEBUG_DB_LOG = False | UTF-8 | Python | false | false | 107 | py | 39 | settings_production.py | 27 | 0.728972 | 0.728972 | 0 | 6 | 17 | 32 |
cornrn/planepower | 10,342,281,282,249 | c3ee56f46fefce7e8bda3087e59477f34b0acb0f | 11844a4b8f96dc49df3ceaff157e1bbddd95c5ba | /pythonCodegen/experimental/cse.py | 87d2e27510ce72b08b4e50fecc507ffc023f5fc4 | []
| no_license | https://github.com/cornrn/planepower | b479a061de013b22caf0ee0316017554551d2fcc | c41f38d8f7d540c005394c9baf97b21b357e081d | refs/heads/master | 2020-04-07T23:30:12.699403 | 2014-09-16T15:44:30 | 2014-09-16T15:44:30 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/env python
# Functions for performing common subexpression
# elimination of casadi expressions.
import casadi
from casadi import SX
# Dictionary mapping hash values to git-style object signatures
# Something like a recursive hashmap
casadi_cse_recursive_hashmap = {}
# Dictionary mapping hash values to cse'd casadi expressions
casadi_cse_hashed_expressions = {}
# Dictionary mapping casadi expressions to hash values.
# Essentially the reverse of the above map, and including maps
# for nodes that have not been cse'd.
casadi_cse_expression_hashes = {}
# Some getOp() mappings:
# Binary:
# + 1
# - 2
# * 3
# / 4
binary_opcodes = [1,2,3,4]
# Unary:
# - 5
# sqrt 10
# sin 11
# cos 12
# tan 13
unary_opcodes = [5,10,11,12,13]
all_opcodes = binary_opcodes + unary_opcodes
def apply_unary_opcode(code, p):
assert code in unary_opcodes, "Opcode not recognized!"
if code==5:
return -p
elif code==10:
return casadi.sqrt(p)
elif code==11:
return casadi.sin(p)
elif code==12:
return casadi.cos(p)
elif code==13:
return casadi.tan(p)
assert False
def apply_binary_opcode(code, a, b):
assert code in binary_opcodes, "Opcode not recognized!"
if code==1:
return a+b
if code==2:
return a-b
if code==3:
return a*b
if code==4:
return a/b
assert False
def isBinaryOperator(node):
if node.isConstant() or node.isLeaf():
return False
assert node.getOp() in all_opcodes, "Opcode not recognized!"
return node.getOp() in binary_opcodes
def isUnaryOperator(node):
if node.isConstant() or node.isLeaf():
return False
assert node.getOp() in all_opcodes, "Opcode not recognized!"
return node.getOp() in unary_opcodes
def isLeaf(node):
return node.isLeaf()
mergeVariablesOfSameName = False
def hash_node(node):
if casadi_cse_expression_hashes.has_key(node):
return casadi_cse_expression_hashes[node]
if isBinaryOperator(node):
child0hash = hash_node(node.getDep(0))
child1hash = hash_node(node.getDep(1))
if node.isCommutative() and child1hash<child0hash:
child0hash,child1hash=child1hash,child0hash
mysig = (node.getOp(), child0hash, child1hash)
myhash = hash(mysig)
casadi_cse_recursive_hashmap[myhash] = mysig
casadi_cse_expression_hashes[node] = myhash
return myhash
if isUnaryOperator(node):
childhash = hash_node(node.getDep(0))
mysig = (node.getOp(), childhash)
myhash = hash(mysig)
casadi_cse_recursive_hashmap[myhash] = mysig
casadi_cse_expression_hashes[node] = myhash
return myhash
if node.isConstant():
mysig = (node.getValue(),)
myhash = hash(mysig)
casadi_cse_recursive_hashmap[myhash] = mysig
casadi_cse_expression_hashes[node] = myhash
return myhash
if node.isSymbolic():
global mergeVariablesOfSameName
if mergeVariablesOfSameName:
# Merge variables BY STRING NAME.
# Makes the hashmap pickleable, but forces user to use
# unique names for all expressions.
mysig = (node.getName(),)
else:
# Merge variables BY SX POINTER.
# Allows the user to have unnamed,
# or redundantly-named symbolics in an expression.
mysig = (node,)
myhash = hash(mysig)
casadi_cse_recursive_hashmap[myhash] = mysig
casadi_cse_expression_hashes[node] = myhash
return myhash
assert False, "Ran into an unhandled node type!"
# Take a the hash value for an (un-cse'd) casadi node,
# and return an equivalent cse'd casadi node.
def hash_to_casadi_node(h):
myhash = h
# Node already has an associated cse'd casadi node
if casadi_cse_hashed_expressions.has_key(myhash):
return casadi_cse_hashed_expressions[myhash]
mysig = casadi_cse_recursive_hashmap[myhash]
# Node is a leaf
if len(mysig)==1:
node = casadi.SX(mysig[0])
casadi_cse_hashed_expressions[myhash] = node
return node
# Node is a unary operator
if len(mysig)==2:
opcode = mysig[0]
childnode = hash_to_casadi_node(mysig[1])
node = apply_unary_opcode(opcode, childnode)
casadi_cse_hashed_expressions[myhash] = node
return node
# Node is a binary operator
if len(mysig)==3:
opcode = mysig[0]
childnode0 = hash_to_casadi_node(mysig[1])
childnode1 = hash_to_casadi_node(mysig[2])
node = apply_binary_opcode(opcode, childnode0, childnode1)
casadi_cse_hashed_expressions[myhash] = node
return node
# Hashing CSE using recursion
#
# Input:
# casadi expression
# Output:
# equivalent casadi expression, hopefully with fewer nodes.
def cse(node):
if type(node)==casadi.SX:
h = hash_node(node)
return hash_to_casadi_node(h)
if type(node)==casadi.SXMatrix:
M = node
shape = M.shape
l = list(M) # Column-major conversion of M to a python list
hashes = map(hash_node,l)
nodes = map(hash_to_casadi_node, hashes)
l_casadi = casadi.SXMatrix(nodes)
return l_casadi.reshape(shape)
if __name__=='__main__':
pass
| UTF-8 | Python | false | false | 4,737 | py | 618 | cse.py | 364 | 0.719443 | 0.705721 | 0 | 175 | 26.062857 | 63 |
reportportal/service-auto-analyzer | 18,975,165,544,335 | aac5c399513706f5b8c972f146475974f2ed046d | 2aeffd32356d75b4317a146db3bd3965b9ddc76f | /commons/object_saving/object_saver.py | e7c24ce9833a0b4adc617112f5acab2a6eada997 | [
"Apache-2.0"
]
| permissive | https://github.com/reportportal/service-auto-analyzer | a1cb588c250bc36903a6260423c0c320144239bf | 4dfd729d437df1f7239d8ed1eec75d1e4185c920 | refs/heads/develop | 2023-08-07T20:43:32.963817 | 2023-06-27T10:03:58 | 2023-06-27T10:03:58 | 224,684,584 | 12 | 17 | Apache-2.0 | false | 2023-08-30T14:44:28 | 2019-11-28T15:43:43 | 2023-06-06T06:58:22 | 2023-08-30T14:44:26 | 41,420 | 10 | 16 | 2 | Python | false | false | """
* Copyright 2019 EPAM Systems
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
"""
import logging
from commons.object_saving.minio_client import MinioClient
from commons.object_saving.filesystem_saver import FilesystemSaver
logger = logging.getLogger("analyzerApp.objectSaver")
class ObjectSaver:
def __init__(self, app_config):
self.app_config = app_config
self.saving_strategy = {
"minio": self.create_minio,
"filesystem": self.create_fs
}
self.binarystore_type = "filesystem"
if "binaryStoreType" in self.app_config and\
self.app_config["binaryStoreType"] in self.saving_strategy:
self.binarystore_type = self.app_config["binaryStoreType"]
def create_minio(self):
return MinioClient(self.app_config)
def create_fs(self):
return FilesystemSaver(self.app_config)
def get_bucket_name(self, project_id):
return self.app_config["minioBucketPrefix"] + str(project_id)
def remove_project_objects(self, project_id, object_names):
self.saving_strategy[self.binarystore_type]().remove_project_objects(
self.get_bucket_name(project_id), object_names)
def put_project_object(self, data, project_id, object_name, using_json=False):
self.saving_strategy[self.binarystore_type]().put_project_object(
data, self.get_bucket_name(project_id),
object_name, using_json=using_json)
def get_project_object(self, project_id, object_name, using_json=False):
return self.saving_strategy[self.binarystore_type]().get_project_object(
self.get_bucket_name(project_id), object_name, using_json=using_json)
def does_object_exists(self, project_id, object_name):
return self.saving_strategy[self.binarystore_type]().does_object_exists(
self.get_bucket_name(project_id), object_name)
def get_folder_objects(self, project_id, folder):
return self.saving_strategy[self.binarystore_type]().get_folder_objects(
self.get_bucket_name(project_id), folder)
def remove_folder_objects(self, project_id, folder):
return self.saving_strategy[self.binarystore_type]().remove_folder_objects(
self.get_bucket_name(project_id), folder)
| UTF-8 | Python | false | false | 2,790 | py | 112 | object_saver.py | 54 | 0.695341 | 0.692473 | 0 | 69 | 39.434783 | 83 |
yaniv14/OpenCommunity | 15,032,385,562,023 | ea7f75fbd26c9ec616ce33b7667708b8093afbd5 | 4bc08a50c36a3814d7d74b616263b5c9be2795d5 | /src/communities/migrations/0012_auto_20160601_1133.py | d4bb3f6336119323e677cb15a62c07f584c318db | [
"BSD-2-Clause"
]
| permissive | https://github.com/yaniv14/OpenCommunity | 29afce604d11bee99bc339f4aa99006743d5880c | 549438b126797a252dbd554179b03764566b946a | refs/heads/master | 2021-01-18T09:54:22.190240 | 2017-11-09T16:43:38 | 2017-11-09T16:43:38 | 9,332,156 | 0 | 2 | null | true | 2016-07-25T11:48:23 | 2013-04-09T21:34:48 | 2015-04-15T20:51:18 | 2016-07-25T11:48:23 | 10,080 | 2 | 2 | 0 | JavaScript | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-06-01 08:33
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('communities', '0011_auto_20150615_1535'),
]
operations = [
migrations.AlterField(
model_name='communitygroup',
name='community',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='groups', to='communities.Community', verbose_name='Community'),
),
migrations.AlterField(
model_name='communitygroup',
name='title',
field=models.CharField(max_length=200, verbose_name='Title'),
),
migrations.AlterField(
model_name='communitygrouprole',
name='committee',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='group_roles', to='communities.Committee', verbose_name='Committee'),
),
migrations.AlterField(
model_name='communitygrouprole',
name='group',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='group_roles', to='communities.CommunityGroup', verbose_name='Group'),
),
migrations.AlterField(
model_name='communitygrouprole',
name='role',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='group_roles', to='acl.Role', verbose_name='Role'),
),
]
| UTF-8 | Python | false | false | 1,606 | py | 129 | 0012_auto_20160601_1133.py | 91 | 0.63076 | 0.608966 | 0 | 41 | 38.170732 | 164 |
saatchiy/cheers | 5,497,558,177,535 | 099be38a6a79a076910de715fd7e733ef231a0bb | 2910a376ea90c64833867057507f6da2bfd26864 | /test/test_runner.py | 5be02ee6a24bdcd4cbbe8097e9f0d3d00d0a1fbe | []
| no_license | https://github.com/saatchiy/cheers | aa9a8410161831dcd933b4c06b206b61f7e16535 | f289316e157826fbbffc1e7bd4a345d6ef10fb23 | refs/heads/master | 2020-12-02T12:47:11.010227 | 2017-08-02T02:41:03 | 2017-08-02T02:41:03 | 96,595,702 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import unittest
from pi_calculation_test import TestPi
from alpha_calculator_test import TestAlphaCalculation
from overlap_calculator_test import TestOverlapCalculation
if __name__ == '__main__':
runner = unittest.TextTestRunner()
test_suite = unittest.TestSuite()
test_suite.addTest(unittest.makeSuite(TestPi))
test_suite.addTest(unittest.makeSuite(TestAlphaCalculation))
test_suite.addTest(unittest.makeSuite(TestOverlapCalculation))
result = runner.run(test_suite)
print("---- START OF TEST RESULTS")
print(result)
print("result::errors")
print(result.errors)
print("result::failures")
print(result.failures)
print("result::skipped")
print(result.skipped)
print("result::successful")
print(result.wasSuccessful())
print("result::test-run")
print(result.testsRun)
print("---- END OF TEST RESULTS") | UTF-8 | Python | false | false | 878 | py | 21 | test_runner.py | 18 | 0.716401 | 0.716401 | 0 | 27 | 31.555556 | 66 |
dmitry-po/Yet_another_coding_course | 12,584,254,184,139 | b4f12ab02b10735cd7fb40210daff26b7f97db81 | 373a5c33a53f40f323c3aac736b7d5c3abc8da32 | /Python/evil.py | a39f14e405ef312478fae5b2cbb1fcc2ed5d0462 | []
| no_license | https://github.com/dmitry-po/Yet_another_coding_course | 30785d49ba7fe38ffd77f02ec45341e3cd5c5e45 | 0ef836d6613036dcc935feebaaf46620ae70383c | refs/heads/master | 2023-07-18T23:43:28.818958 | 2021-09-15T16:27:27 | 2021-09-15T16:27:27 | 248,457,595 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from sys import stdout
def print(*args, **kwargs):
stdout.write('Бу-га-га! Функция печати сломана!')
def sum(l):
s = 1
for i in l:
s+=i
stdout.write('Бу-га-гашенька')
return s | UTF-8 | Python | false | false | 226 | py | 20 | evil.py | 2 | 0.648936 | 0.643617 | 0 | 11 | 16.181818 | 53 |
AyWa/connected_poulailler | 7,894,149,904,572 | 9063ea924a7d3532ccbb0bf5aa386a59ae2e3d8c | 404ef3d5fc0d92281f7d703a0f94b8e317c5ebae | /script/servo.py | 616c7940f2c93576fb1cdc906be78469c9d07cde | [
"MIT"
]
| permissive | https://github.com/AyWa/connected_poulailler | 751d789b5fa7de5d83291f0ca3f1b4dac99be039 | f1c9b15ed24b5b1c7dfbade774bc1a2fcc2c4a9c | refs/heads/master | 2020-07-12T17:07:36.619741 | 2016-12-11T21:19:05 | 2016-12-11T21:19:05 | 73,902,866 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # !/usr/bin/python
import time
import RPi.GPIO as GPIO
import sys
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BCM)
GPIO.setup(21,GPIO.OUT)
#dc =0
p = GPIO.PWM(21, 50)
#print 'Arguments: ',str(sys.argv)
arg = sys.argv[1]
print arg
p.start(7.5)
try:
if arg =='true':
p.ChangeDutyCycle(7.5) #turn towards 90 degree
time.sleep(1)
print 'Door opened'
if arg =='false':
p.ChangeDutyCycle(2.5) #0 degree
time.sleep(1)
print 'Door closed'
#p.ChangeDutyCycle(12.5) #180 degree
#time.sleep(1)
# while 1:
# for fc in range(0, 101, 2):
# p.ChangeDutyCycle(dc)
# time.sleep(0.1)
# for dc in range(100, -1, -2):
# p.ChangeDutyCycle(dc)
# time.sleep(0.1)
except KeyboardInterrupt:
# pass
p.stop()
GPIO.cleanup()
| UTF-8 | Python | false | false | 854 | py | 20 | servo.py | 11 | 0.567916 | 0.519906 | 0 | 40 | 20.35 | 48 |
babbitt/Slugs | 17,386,027,654,946 | c407ab61d189496714093742d1d3d0d9c3521bb2 | 66893207b22e42dae3a36786a27f88cc35d0cdc2 | /training/models.py | a4502232ee4df1166183cce3a4ba9ba23b3e7f7d | []
| no_license | https://github.com/babbitt/Slugs | deb4b9a9a78ba3420c0af3f0f7aa7aacda6eaa19 | f34007bcb5fff82e6f421bbccd99fe92c49d05f5 | refs/heads/master | 2021-01-06T09:33:37.552874 | 2020-06-18T00:19:22 | 2020-06-18T00:19:22 | 241,280,908 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from django.db import models
from gig.models import System
from employee.models import Employee
# Create your models here.
class TrainingRequest(models.Model):
training_datetime = models.DateTimeField(null=True)
system = models.ForeignKey(System, models.PROTECT)
class Trainee(models.Model):
linked_employee = models.ForeignKey(Employee, models.PROTECT)
class Trainer(models.Model):
linked_employee = models.ForeignKey(Employee, models.PROTECT) | UTF-8 | Python | false | false | 464 | py | 58 | models.py | 28 | 0.784483 | 0.784483 | 0 | 15 | 30 | 65 |
mntushar/django-dynamic-admin-dashboard | 16,312,285,835,066 | 574c1aa401d47f2e7bf900a8ef9261dacb4f122c | a7fd1d9ce894c4f783ca728f98ae46cd5e6f175f | /student/migrations/0001_initial.py | 2e3dc60d211622bfe6fc8e999e7482701d3801fb | []
| no_license | https://github.com/mntushar/django-dynamic-admin-dashboard | 29dd76a58e763e09ce25d2c1f73c9c154e5e0732 | 300e18d27d97c2cddaa40b6907a593d83fed4bf0 | refs/heads/main | 2023-06-24T05:15:52.407498 | 2021-07-31T05:55:04 | 2021-07-31T05:55:04 | 391,264,879 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # Generated by Django 2.2.8 on 2021-07-31 03:13
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='UserAcademicInfo',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('degree', models.CharField(blank=True, choices=[('psc', 'PSC'), ('jsc', 'JSC'), ('ssc', 'SSC'), ('hsc', 'HSC'), ('bsc-engineering', 'Bsc.Engineering'), ('m.engineering', 'M.Engineering'), ('d.engr.', 'D.Engr.'), ('doctor', 'Doctor'), ('md', 'MD'), ('ph.d', 'Ph.D'), ('b.sc.', 'B.sc.'), ('m.sc.', 'M.sc.'), ('bba', 'BBA'), ('mba', 'MBA'), ('ba', 'BA'), ('ma', 'Ma')], max_length=100, null=True)),
('last_passing_institution_name', models.CharField(blank=True, max_length=100, null=True)),
('last_passing_year', models.DateField(blank=True, null=True)),
('user_id', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='UserAddressInfo',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('house_no', models.CharField(blank=True, max_length=50, null=True)),
('village_name', models.CharField(blank=True, max_length=50, null=True)),
('post_office', models.CharField(blank=True, max_length=50, null=True)),
('thana_name', models.CharField(blank=True, max_length=50, null=True)),
('district_name', models.CharField(blank=True, max_length=50, null=True)),
('user_id', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='UserBasicInfo',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=100, null=True)),
('gender', models.CharField(blank=True, choices=[('male', 'Male'), ('female', 'Female')], max_length=6, null=True)),
('date_of_birth', models.DateField(blank=True, null=True)),
('phone_number', models.IntegerField(blank=True, null=True)),
('email', models.EmailField(blank=True, error_messages={'unique': 'This email has already been registered.'}, max_length=100, null=True, unique=True)),
('designation', models.CharField(blank=True, choices=[('student', 'Student')], max_length=10, null=True)),
('user_id', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='UserInfo',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('user_academic', models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='student.UserAcademicInfo')),
('user_address', models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='student.UserAddressInfo')),
('user_basic', models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='student.UserBasicInfo')),
],
),
]
| UTF-8 | Python | false | false | 3,917 | py | 38 | 0001_initial.py | 19 | 0.599694 | 0.589482 | 0 | 62 | 62.177419 | 412 |
Baekjoon-Solutions/Baekjoon_Solutions | 10,514,079,990,397 | 8301139736654f9144a91a32b062e382f01ee4ee | eb64c7c1a3893c078146b2796ae128f5158ca6d5 | /Python/6086_최대_유량/6086_ford_fulkerson.py | 15ba90a6b89c8551dbbac87350084d79d0f7998c | [
"Apache-2.0"
]
| permissive | https://github.com/Baekjoon-Solutions/Baekjoon_Solutions | ec9d449deec40dae354569a3c7eec8bace49fb31 | 0cd3a57e08d7a12639aa43e6927d5f8a1ba09fbc | refs/heads/main | 2023-02-04T02:16:09.822904 | 2020-12-22T12:25:39 | 2020-12-22T12:25:39 | 303,272,803 | 5 | 12 | NOASSERTION | false | 2020-12-22T12:25:40 | 2020-10-12T04:00:55 | 2020-12-19T09:32:20 | 2020-12-22T12:25:39 | 67 | 7 | 8 | 0 | Java | false | false | import sys
from collections import defaultdict, deque
input = sys.stdin.readline
pipe = defaultdict(lambda:defaultdict(int))
num = int(input())
#그래프 입력
for i in range(num):
one, two, flow = map(str, input().split())
pipe[one][two] += int(flow)
pipe[two][one] += int(flow)
def bfs(start, sink, parent):
visited = defaultdict(lambda:0)
queue = deque()
queue.append(start)
visited[start] = 1
while queue:
u = queue.popleft()
for i in pipe[u]:
val = pipe[u][i]
if visited[i]:
continue
if val <= 0:
continue
queue.append(i)
visited[i] = 1
parent[i] = u
return 1 if visited[sink] else 0
def ford_fulkerson(start, sink):
parent = defaultdict(lambda : -1)
max_flow = 0
while bfs(start, sink, parent):
path_flow = float('inf')
s = sink
while s!= start:
path_flow = min(path_flow, pipe[parent[s]][s])
s = parent[s]
max_flow += path_flow
v = sink
while v != start:
u = parent[v]
pipe[u][v] -= path_flow
pipe[v][u] += path_flow
v = parent[v]
return max_flow
print(ford_fulkerson('A','Z')) | UTF-8 | Python | false | false | 1,334 | py | 22 | 6086_ford_fulkerson.py | 18 | 0.502266 | 0.496224 | 0 | 51 | 24 | 58 |
hdeweirdt/imp | 9,637,906,620,067 | 276b560fb82b13cfeae1835062d7b464776dda90 | 8250dc408468b298c5ab1df57de434093a0a4985 | /src/Imp/agent/io/local.py | 434683cd1665e6e4c1c79bb97968cc9980f9e9c6 | [
"Apache-2.0"
]
| permissive | https://github.com/hdeweirdt/imp | 5643fc389b0bbc6ea5a58ee812bc3066301a5f9a | fced3e4ae17bc2f209c1a42528facaf68056256f | refs/heads/master | 2021-01-21T09:43:27.907131 | 2014-01-06T13:39:01 | 2014-01-06T13:39:01 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | """
Copyright 2013 KU Leuven Research and Development - iMinds - Distrinet
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Administrative Contact: dnet-project-office@cs.kuleuven.be
Technical Contact: bart.vanbrabant@cs.kuleuven.be
"""
import hashlib, subprocess, os, pwd, grp, shutil
try:
from pwd import getpwnam
except ImportError:
getpwnam = None
try:
from grp import getgrnam
except ImportError:
getgrnam = None
class LocalIO(object):
"""
This class provides handler IO methods
"""
def hash_file(self, path):
sha1sum = hashlib.sha1()
with open(path, 'rb') as f:
for chunk in iter(lambda: f.read(32768), b''):
sha1sum.update(chunk)
return sha1sum.hexdigest()
def run(self, command, arguments = [], env = None):
"""
Execute a command with the given argument and return the result
"""
cmds = [command] + arguments
result = subprocess.Popen(cmds, stdout = subprocess.PIPE,
stderr = subprocess.PIPE, env = env)
data = result.communicate()
return (data[0].strip().decode("utf-8"), data[1].strip().decode("utf-8"), result.returncode)
def file_exists(self, path):
"""
Check if a given file exists
"""
return os.path.exists(path)
def readlink(self, path):
"""
Return the target of the path
"""
return os.readlink(path)
def symlink(self, source, target):
"""
Symlink source to target
"""
return os.symlink(source, target)
def is_symlink(self, path):
"""
Is the given path a symlink
"""
return os.path.islink(path)
def file_stat(self, path):
"""
Do a statcall on a file
"""
stat_result = os.stat(path)
status = {}
status["owner"] = pwd.getpwuid(stat_result.st_uid).pw_name
status["group"] = grp.getgrgid(stat_result.st_gid).gr_name
status["permissions"] = int(oct(stat_result.st_mode)[-4:])
return status
def remove(self, path):
"""
Remove a file
"""
return os.remove(path)
def put(self, path, content):
"""
Put the given content at the given path
"""
with open(path, "wb+") as fd:
fd.write(content)
def _get_gid(self, name):
"""Returns a gid, given a group name."""
# Stolen from the python3 shutil lib
if getgrnam is None or name is None:
return None
try:
result = getgrnam(name)
except KeyError:
result = None
if result is not None:
return result[2]
return None
def _get_uid(self, name):
"""Returns an uid, given a user name."""
# Stolen from the python3 shutil lib
if getpwnam is None or name is None:
return None
try:
result = getpwnam(name)
except KeyError:
result = None
if result is not None:
return result[2]
return None
def chown(self, path, user = None, group = None):
"""
Change the ownership information
"""
# Stolen from the python3 shutil lib
if user is None and group is None:
raise ValueError("user and/or group must be set")
_user = user
_group = group
# -1 means don't change it
if user is None:
_user = -1
# user can either be an int (the uid) or a string (the system username)
elif not isinstance(user, int):
_user = self._get_uid(user)
if _user is None:
raise LookupError("no such user: {!r}".format(user))
if group is None:
_group = -1
elif not isinstance(group, int):
_group = self._get_gid(group)
if _group is None:
raise LookupError("no such group: {!r}".format(group))
os.chown(path, _user, _group)
def chmod(self, path, permissions):
"""
Change the permissions
"""
os.chmod(path, permissions)
def mkdir(self, path):
"""
Create a directory
"""
os.mkdir(path)
def rmdir(self, path):
"""
Remove a directory
"""
os.rmdir(path)
def close(self):
pass
if __name__ == '__channelexec__':
local_io = LocalIO()
# fd = open("/tmp/execnet.log", "a+")
for item in channel:
if hasattr(local_io, item[0]):
# fd.write("Calling %s with args %s\n" % item)
try:
method = getattr(local_io, item[0])
result = method(*item[1])
# fd.write("Got result %s\n" % repr(result))
#
channel.send(result)
except Exception as e:
import traceback
# fd.write(str(e) + "\n")
# fd.write(str(traceback.format_exc()))
channel.send(str(traceback.format_exc()))
pass
else:
raise AttributeError("Method %s is not supported" % item[0])
# fd.close()
| UTF-8 | Python | false | false | 5,797 | py | 46 | local.py | 34 | 0.5458 | 0.539934 | 0 | 203 | 27.55665 | 100 |
tLDP/python-tldp | 11,287,174,067,290 | d93095ae7183a6fa60a6bacca7440e25c6529567 | 539d9780cadc3618157986f3b9a09aa29578ea09 | /tests/test_utils.py | 8366e84cf9e9c05c6456fb21e62fe8a10a3884d8 | [
"MIT"
]
| permissive | https://github.com/tLDP/python-tldp | e610903bd94a36c8b0d2e6a3962c7ec0e05ce4a7 | 4947542e01b50dacd57a7e6abf3965ff53ef0b13 | refs/heads/master | 2022-11-07T01:06:04.984169 | 2022-10-24T03:42:44 | 2022-10-24T03:42:44 | 53,902,981 | 12 | 6 | MIT | false | 2022-10-24T03:42:45 | 2016-03-15T00:53:33 | 2022-10-23T22:33:33 | 2022-10-24T03:42:44 | 471 | 11 | 6 | 1 | Python | false | false | # -*- coding: utf8 -*-
#
# Copyright (c) 2016 Linux Documentation Project
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
import os
import stat
import uuid
import errno
import posix
import unittest
from tempfile import mkdtemp
from tempfile import NamedTemporaryFile as ntf
from tldptesttools import TestToolsFilesystem
# -- SUT
from tldp.utils import which, execute
from tldp.utils import statfile, statfiles, stem_and_ext
from tldp.utils import arg_isexecutable, isexecutable
from tldp.utils import arg_isreadablefile, isreadablefile
from tldp.utils import arg_isdirectory, arg_isloglevel
from tldp.utils import arg_isstr
from tldp.utils import swapdirs
class Test_isexecutable_and_friends(unittest.TestCase):
def test_isexecutable(self):
f = ntf(prefix='executable-file')
self.assertFalse(isexecutable(f.name))
mode = stat.S_IXUSR | stat.S_IRUSR | stat.S_IWUSR
os.chmod(f.name, mode)
self.assertTrue(isexecutable(f.name))
def test_arg_isexecutable(self):
f = ntf(prefix='executable-file')
self.assertIsNone(arg_isexecutable(f.name))
mode = stat.S_IXUSR | stat.S_IRUSR | stat.S_IWUSR
os.chmod(f.name, mode)
self.assertEqual(f.name, arg_isexecutable(f.name))
class Test_isreadablefile_and_friends(unittest.TestCase):
def test_isreadablefile(self):
f = ntf(prefix='readable-file')
self.assertTrue(isreadablefile(f.name))
mode = os.stat(f.name).st_mode
os.chmod(f.name, 0)
if 0 == os.getuid():
self.assertTrue(isreadablefile(f.name))
else:
self.assertFalse(isreadablefile(f.name))
os.chmod(f.name, mode)
def test_arg_isreadablefile(self):
f = ntf(prefix='readable-file')
self.assertEqual(f.name, arg_isreadablefile(f.name))
mode = os.stat(f.name).st_mode
os.chmod(f.name, 0)
if 0 == os.getuid():
self.assertEqual(f.name, arg_isreadablefile(f.name))
else:
self.assertIsNone(arg_isreadablefile(f.name))
os.chmod(f.name, mode)
class Test_arg_isstr(unittest.TestCase):
def test_arg_isstr(self):
self.assertEqual('s', arg_isstr('s'))
self.assertEqual(None, arg_isstr(7))
class Test_arg_isloglevel(unittest.TestCase):
def test_arg_isloglevel_integer(self):
self.assertEqual(7, arg_isloglevel(7))
self.assertEqual(40, arg_isloglevel('frobnitz'))
self.assertEqual(20, arg_isloglevel('INFO'))
self.assertEqual(10, arg_isloglevel('DEBUG'))
class Test_arg_isdirectory(TestToolsFilesystem):
def test_arg_isdirectory(self):
self.assertTrue(arg_isdirectory(self.tempdir))
f = ntf(dir=self.tempdir)
self.assertFalse(arg_isdirectory(f.name))
class Test_execute(TestToolsFilesystem):
def test_execute_returns_zero(self):
exe = which('true')
result = execute([exe], logdir=self.tempdir)
self.assertEqual(0, result)
def test_execute_stdout_to_devnull(self):
exe = which('cat')
cmd = [exe, '/etc/hosts']
devnull = open('/dev/null', 'w')
result = execute(cmd, stdout=devnull, logdir=self.tempdir)
devnull.close()
self.assertEqual(0, result)
def test_execute_stderr_to_devnull(self):
exe = which('cat')
cmd = [exe, '/etc/hosts']
devnull = open('/dev/null', 'w')
result = execute(cmd, stderr=devnull, logdir=self.tempdir)
devnull.close()
self.assertEqual(0, result)
def test_execute_returns_nonzero(self):
exe = which('false')
result = execute([exe], logdir=self.tempdir)
self.assertEqual(1, result)
def test_execute_exception_when_logdir_none(self):
exe = which('true')
with self.assertRaises(ValueError) as ecm:
execute([exe], logdir=None)
e = ecm.exception
self.assertTrue('logdir must be a directory' in e.args[0])
def test_execute_exception_when_logdir_enoent(self):
exe = which('true')
logdir = os.path.join(self.tempdir, 'nonexistent-directory')
with self.assertRaises(IOError) as ecm:
execute([exe], logdir=logdir)
e = ecm.exception
self.assertTrue('nonexistent' in e.filename)
class Test_which(unittest.TestCase):
def test_good_which_python(self):
python = which('python')
self.assertIsNotNone(python)
self.assertTrue(os.path.isfile(python))
qualified_python = which(python)
self.assertEqual(python, qualified_python)
def test_bad_silly_name(self):
silly = which('silliest-executable-name-which-may-yet-be-possible')
self.assertIsNone(silly)
def test_fq_executable(self):
f = ntf(prefix='tldp-which-test', delete=False)
f.close()
notfound = which(f.name)
self.assertIsNone(notfound)
mode = stat.S_IRWXU | stat.S_IRGRP | stat.S_IROTH
os.chmod(f.name, mode)
found = which(f.name)
self.assertEqual(f.name, found)
os.unlink(f.name)
class Test_statfiles(unittest.TestCase):
def test_statfiles_dir_in_result(self):
'''Assumes that directory ./sample-documents/ exists here'''
here = os.path.dirname(os.path.abspath(__file__))
statinfo = statfiles(here, relative=here)
self.assertIsInstance(statinfo, dict)
adoc = 'sample-documents/asciidoc-complete.txt'
self.assertTrue(adoc in statinfo)
def test_statfiles_dir_rel(self):
here = os.path.dirname(os.path.abspath(__file__))
statinfo = statfiles(here, relative=here)
self.assertIsInstance(statinfo, dict)
self.assertTrue(os.path.basename(__file__) in statinfo)
def test_statfiles_dir_abs(self):
here = os.path.dirname(os.path.abspath(__file__))
statinfo = statfiles(here)
self.assertIsInstance(statinfo, dict)
self.assertTrue(__file__ in statinfo)
def test_statfiles_file_rel(self):
here = os.path.dirname(os.path.abspath(__file__))
statinfo = statfiles(__file__, relative=here)
self.assertIsInstance(statinfo, dict)
self.assertTrue(os.path.basename(__file__) in statinfo)
def test_statfiles_file_abs(self):
statinfo = statfiles(__file__)
self.assertIsInstance(statinfo, dict)
self.assertTrue(__file__ in statinfo)
def test_statfiles_nonexistent_file(self):
here = os.path.dirname(os.path.abspath(__file__))
this = os.path.join(here, str(uuid.uuid4()))
statinfo = statfiles(this)
self.assertIsInstance(statinfo, dict)
self.assertEqual(0, len(statinfo))
class Test_statfile(TestToolsFilesystem):
def test_statfile_bogustype(self):
with self.assertRaises(TypeError):
statfile(0)
def test_statfile_enoent(self):
f = ntf(dir=self.tempdir)
self.assertIsNone(statfile(f.name + '-ENOENT_TEST'))
def test_statfile_exception(self):
f = ntf(dir=self.tempdir)
omode = os.stat(self.tempdir).st_mode
os.chmod(self.tempdir, 0)
if 0 != os.getuid():
with self.assertRaises(Exception) as ecm:
statfile(f.name)
e = ecm.exception
self.assertIn(e.errno, (errno.EPERM, errno.EACCES))
os.chmod(self.tempdir, omode)
stbuf = statfile(f.name)
self.assertIsInstance(stbuf, posix.stat_result)
class Test_stem_and_ext(unittest.TestCase):
def test_stem_and_ext_final_slash(self):
r0 = stem_and_ext('/h/q/t/z/Frobnitz-HOWTO')
r1 = stem_and_ext('/h/q/t/z/Frobnitz-HOWTO/')
self.assertEqual(r0, r1)
def test_stem_and_ext_rel_abs(self):
r0 = stem_and_ext('/h/q/t/z/Frobnitz-HOWTO')
r1 = stem_and_ext('Frobnitz-HOWTO/')
self.assertEqual(r0, r1)
class Test_swapdirs(TestToolsFilesystem):
def test_swapdirs_bogusarg(self):
with self.assertRaises(OSError) as ecm:
swapdirs('/path/to/frickin/impossible/dir', None)
e = ecm.exception
self.assertTrue(errno.ENOENT is e.errno)
def test_swapdirs_b_missing(self):
a = mkdtemp(dir=self.tempdir)
b = a + '-B'
self.assertFalse(os.path.exists(b))
swapdirs(a, b)
self.assertTrue(os.path.exists(b))
def test_swapdirs_with_file(self):
a = mkdtemp(dir=self.tempdir)
afile = os.path.join(a, 'silly')
b = mkdtemp(dir=self.tempdir)
bfile = os.path.join(b, 'silly')
with open(afile, 'w'):
pass
self.assertTrue(os.path.exists(a))
self.assertTrue(os.path.exists(afile))
self.assertTrue(os.path.exists(b))
self.assertFalse(os.path.exists(bfile))
swapdirs(a, b)
self.assertTrue(os.path.exists(a))
self.assertFalse(os.path.exists(afile))
self.assertTrue(os.path.exists(b))
self.assertTrue(os.path.exists(bfile))
#
# -- end of file
| UTF-8 | Python | false | false | 9,065 | py | 54 | test_utils.py | 34 | 0.636514 | 0.632543 | 0 | 275 | 31.963636 | 75 |
dftbplus/skpar | 17,626,545,816,809 | 976534702816616c0c3921eb2a9a642be94db8fb | 1958d78834810cd45170be0ad6f4982ca1f076ca | /bin/skpar_addrepulsive | 24df43479c6656398c6ab09db5dd520a098a59b9 | [
"MIT"
]
| permissive | https://github.com/dftbplus/skpar | 92fde1b2e8a45114a3a47aea7e482a73f8058b2d | 4c574a0e2dc4826697b39c7f41cf34210fe79d9b | refs/heads/main | 2023-04-19T13:28:13.179784 | 2023-03-29T06:59:02 | 2023-03-29T06:59:02 | 103,598,862 | 1 | 1 | NOASSERTION | false | 2023-03-29T06:59:22 | 2017-09-15T01:20:57 | 2023-03-29T06:48:06 | 2023-03-29T06:59:18 | 3,973 | 9 | 7 | 4 | Python | false | false | #!/usr/bin/env python3
import argparse
from skpar.dftbutils.repfit import append_spline
workdir = '.'
def main():
# argument parsing
# -------------------------------------------------------------------
parser = argparse.ArgumentParser(
description="Script that takes an existing SK-file and a "
"spline file and combines them in a new SK-file"
)
parser.add_argument(
'-i', dest='skin', action='store',
metavar='INPUT', help="Input SK-file"
)
parser.add_argument(
'-o', dest='skout', action='store',
metavar='OUTPUT', help="Output SK-file"
)
parser.add_argument(
'-s', dest='spl', default='repulsive.dat', action='store',
metavar='SPLINE',
help="Spline file; Must have the 'Spline' tag included on top!"
)
parser.add_argument(
'-e', dest='elements', nargs=2, default=None, action='store',
metavar=('A1', 'A2'),
help="Pair of chemical elements whose SKF is patched with a repulsive"
)
args = parser.parse_args()
if args.elements is not None:
e1, e2 = args.elements
fin1 = "{:s}-{:s}.template.skf".format(e1, e2)
fout1 = "{:s}-{:s}.skf".format(e1, e2)
append_spline(fin=fin1, fspl=args.spl, fout=fout1)
if e1 != e2:
fin2 = "{:s}-{:s}.template.skf".format(e2, e1)
fout2 = "{:s}-{:s}.skf".format(e2, e1)
append_spline(fin=fin2, fspl=args.spl, fout=fout2)
else:
append_spline(fin=args.skin, fspl=args.spl, fout=args.skout)
if __name__ == '__main__':
main()
| UTF-8 | Python | false | false | 1,702 | 49 | skpar_addrepulsive | 12 | 0.519976 | 0.505875 | 0 | 46 | 36 | 82 |
|
ftonolini45/VICI | 3,375,844,344,421 | 7087a12eed48ffe78481b0ee49a912c348e5c206 | 32a7f38f60959e3d7eb11e02923005f4ad892667 | /neural_networks/VAE.py | 53bbaf411a92ce9372f089e64e5e6abec1526b70 | [
"CC0-1.0"
]
| permissive | https://github.com/ftonolini45/VICI | eef81773f2348e28f4df34aca1831450295152da | 7508328b474492edc7479bb41527cbbfd5f43a1d | refs/heads/main | 2023-05-30T04:03:25.876630 | 2021-06-14T08:48:50 | 2021-06-14T08:48:50 | 326,956,668 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | '''
Neural networks for a VAE or conditional VAE
'''
import numpy as np
import collections
import tensorflow as tf
from tensorflow import math as tfm
from tensorflow import linalg as tfl
from neural_networks import NN_utils as vae_utils
class Decoder(object):
'''
Class for Gaussian decoder, taking latent variables z and outputting
Gaussian distribution p(x|z)
'''
def __init__(self, name, n_x, n_z, N_h, nonlinearity=tf.nn.leaky_relu, sig_lim=10):
'''
Initialisation
INPUTS:
name - name to assign to the decoder
n_x - dimensionality of the input
n_z - dimensionality of latent space
N_h - array of hidden units' dimensionalities in the format [Nhz,Nh1,Nh2,...,Nhn]
OPTIONAL INPUTS:
nonlinearity - choice of non-linearity (e.g. tf.nn.relu/tf.nn.leaky_relu/tf.nn.elu)
sig_lim - range to impose to the output log_sig_sq to avoid divergence.
'''
self.n_x = n_x
self.n_z = n_z
self.N_h = N_h
self.name = name
self.bias_start = 0.0
self.sig_lim = sig_lim
network_weights = self._create_weights()
self.weights = network_weights
self.nonlinearity = nonlinearity
def compute_moments(self,z,constrain=True):
'''
compute moments of input/output Gaussian distribution
INPUTS:
z - latent variable
OPTIONAL INPUTS:
constrain - whether to force the output mean to be between 0 and 1
OUTPUTS:
mu_x - mean of output Gaussian distribution
log_sig_sq_x - log variance of output Gaussian distribution
'''
hidden1_pre = tfm.add(tfl.matmul(z, self.weights['W_z_to_h1']), self.weights['b_z_to_h1'])
hidden_post = self.nonlinearity(hidden1_pre)
num_layers_middle = np.shape(self.N_h)[0]-1
for i in range(num_layers_middle):
ni = i+2
hidden_pre = tfm.add(tfl.matmul(hidden_post, self.weights['W_h{}_to_h{}'.format(ni-1,ni)]), self.weights['b_h{}_to_h{}'.format(ni-1,ni)])
hidden_post = self.nonlinearity(hidden_pre)
mu_x = tfm.add(tfl.matmul(hidden_post, self.weights['W_h{}_to_mux'.format(ni)]), self.weights['b_h{}_to_mux'.format(ni)])
if constrain==True:
mu_x = tf.nn.sigmoid(mu_x)
log_sig_sq_x = tfm.add(tfl.matmul(hidden_post, self.weights['W_h{}_to_sx'.format(ni)]), self.weights['b_h{}_to_sx'.format(ni)])
log_sig_sq_x = self.sig_lim*(tf.nn.sigmoid(log_sig_sq_x/self.sig_lim)-0.5)
return mu_x, log_sig_sq_x
def _create_weights(self):
'''
Initialise weights
'''
all_weights = collections.OrderedDict()
all_weights['W_z_to_h1'] = tf.Variable(vae_utils.xavier_init(self.n_z, self.N_h[0]), dtype=tf.float32)
all_weights['b_z_to_h1'] = tf.Variable(tf.zeros([self.N_h[0]], dtype=tf.float32) * self.bias_start)
num_layers_middle = np.shape(self.N_h)[0]-1
for i in range(num_layers_middle):
ni = i+2
all_weights['W_h{}_to_h{}'.format(ni-1,ni)] = tf.Variable(vae_utils.xavier_init(self.N_h[ni-2], self.N_h[ni-1]), dtype=tf.float32)
all_weights['b_h{}_to_h{}'.format(ni-1,ni)] = tf.Variable(tf.zeros([self.N_h[ni-1]], dtype=tf.float32) * self.bias_start)
all_weights['W_h{}_to_mux'.format(ni)] = tf.Variable(vae_utils.xavier_init(self.N_h[ni-1], self.n_x), dtype=tf.float32)
all_weights['b_h{}_to_mux'.format(ni)] = tf.Variable(tf.zeros([self.n_x], dtype=tf.float32) * self.bias_start)
all_weights['W_h{}_to_sx'.format(ni)] = tf.Variable(vae_utils.xavier_init(self.N_h[ni-1], self.n_x), dtype=tf.float32)
all_weights['b_h{}_to_sx'.format(ni)] = tf.Variable(tf.zeros([self.n_x], dtype=tf.float32) * self.bias_start)
return all_weights
class Encoder(object):
'''
Class for Gaussian encoder, taking inputs/outputs x and outputting
Gaussian distribution q(z|x). Also used for obtaining conditional
prior p(z|y) or p(z|x) in CVAEs.
'''
def __init__(self, name, n_x, n_z, N_h, nonlinearity=tf.nn.leaky_relu, sig_lim=10):
'''
Initialisation
INPUTS:
name - name to assign to the encoder
n_x - dimensionality of the input
n_z - dimensionality of latent space
N_h - array of hidden units' dimensionalities in the format [Nhx,Nh1,Nh2,...,Nhn]
OPTIONAL INPUTS:
nonlinearity - choice of non-linearity (e.g. tf.nn.relu/tf.nn.leaky_relu/tf.nn.elu)
sig_lim - range to impose to the output log_sig_sq to avoid divergence.
'''
self.n_x = n_x
self.n_z = n_z
self.N_h = N_h
self.name = name
self.bias_start = 0.0
self.sig_lim = sig_lim
network_weights = self._create_weights()
self.weights = network_weights
self.nonlinearity = nonlinearity
def compute_moments(self,x):
'''
compute moments of latent Gaussian distribution
INPUTS:
x - conditional input
OUTPUTS:
mu_z - mean of latent Gaussian distribution
log_sig_sq_z - log variance of latent Gaussian distribution
'''
hidden1_pre = tfm.add(tfl.matmul(x, self.weights['W_x_to_h1']), self.weights['b_x_to_h1'])
hidden_post = self.nonlinearity(hidden1_pre)
num_layers_middle = np.shape(self.N_h)[0]-1
for i in range(num_layers_middle):
ni = i+2
hidden_pre = tfm.add(tfl.matmul(hidden_post, self.weights['W_h{}_to_h{}'.format(ni-1,ni)]), self.weights['b_h{}_to_h{}'.format(ni-1,ni)])
hidden_post = self.nonlinearity(hidden_pre)
mu_z = tfm.add(tfl.matmul(hidden_post, self.weights['W_h{}_to_muz'.format(ni)]), self.weights['b_h{}_to_muz'.format(ni)])
log_sig_sq_z = tfm.add(tfl.matmul(hidden_post, self.weights['W_h{}_to_sz'.format(ni)]), self.weights['b_h{}_to_sz'.format(ni)])
log_sig_sq_z = self.sig_lim*(tf.nn.sigmoid(log_sig_sq_z/self.sig_lim)-0.5)
return mu_z, log_sig_sq_z
def _create_weights(self):
'''
Initialise weights
'''
all_weights = collections.OrderedDict()
all_weights['W_x_to_h1'] = tf.Variable(vae_utils.xavier_init(self.n_x, self.N_h[0]), dtype=tf.float32)
all_weights['b_x_to_h1'] = tf.Variable(tf.zeros([self.N_h[0]], dtype=tf.float32) * self.bias_start)
num_layers_middle = np.shape(self.N_h)[0]-1
for i in range(num_layers_middle):
ni = i+2
all_weights['W_h{}_to_h{}'.format(ni-1,ni)] = tf.Variable(vae_utils.xavier_init(self.N_h[ni-2], self.N_h[ni-1]), dtype=tf.float32)
all_weights['b_h{}_to_h{}'.format(ni-1,ni)] = tf.Variable(tf.zeros([self.N_h[ni-1]], dtype=tf.float32) * self.bias_start)
all_weights['W_h{}_to_muz'.format(ni)] = tf.Variable(vae_utils.xavier_init(self.N_h[ni-1], self.n_z), dtype=tf.float32)
all_weights['b_h{}_to_muz'.format(ni)] = tf.Variable(tf.zeros([self.n_z], dtype=tf.float32) * self.bias_start)
all_weights['W_h{}_to_sz'.format(ni)] = tf.Variable(vae_utils.xavier_init(self.N_h[ni-1], self.n_z), dtype=tf.float32)
all_weights['b_h{}_to_sz'.format(ni)] = tf.Variable(tf.zeros([self.n_z], dtype=tf.float32) * self.bias_start)
return all_weights
class ConditionalDecoder(object):
'''
Class for Gaussian conditional decoder, taking inputs x, latent variable
z and outputting Gaussian distribution p(y|z,x)
'''
def __init__(self, name, n_x, n_y, n_z, N_hx, N_hz, N_h, nonlinearity=tf.nn.leaky_relu, sig_lim=10):
'''
Initialisation
INPUTS:
name - name to assign to the conditional decoder
n_x - dimensionality of the conditional input
n_y - dimensionality of the output
n_z - dimensionality of latent space
N_hx - array of hidden units' dimensionalities for the conditional input x in the format [Nhx1,Nhx2,...,Nhxn]
N_hz - array of hidden units' dimensionalities for the latent variable z in the format [Nhz1,Nhz2,...,Nhzn]
N_h - array of hidden units' dimensionalities for joint channels in the format [Nh1,Nh2,...,Nhn]
OPTIONAL INPUTS:
nonlinearity - choice of non-linearity (e.g. tf.nn.relu/tf.nn.leaky_relu/tf.nn.elu)
sig_lim - range to impose to the output log_sig_sq to avoid divergence.
'''
self.n_x = n_x
self.n_y = n_y
self.n_z = n_z
self.N_hx = N_hx
self.N_hz = N_hz
self.N_h = N_h
self.name = name
self.bias_start = 0.0
self.sig_lim = sig_lim
network_weights = self._create_weights()
self.weights = network_weights
self.nonlinearity = nonlinearity
def compute_moments(self,z,x,constrain=True):
'''
compute moments of output Gaussian distribution
INPUTS:
x - conditional input
z - latent variable
OPTIONAL INPUTS:
constrain - whether to force the output mean to be between 0 and 1
OUTPUTS:
mu_y - mean of output Gaussian distribution
log_sig_sq_y - log variance of output Gaussian distribution
'''
# Channel for latent variable alone
hidden_pre_z = tfm.add(tfl.matmul(z, self.weights['W_z_to_h1z']), self.weights['b_z_to_h1z'])
hidden_post_z = self.nonlinearity(hidden_pre_z)
num_layers_middle_z = np.shape(self.N_hz)[0]-1
for i in range(num_layers_middle_z):
ni = i+2
hidden_pre_z = tfm.add(tfl.matmul(hidden_post_z, self.weights['W_h{}z_to_h{}z'.format(ni-1,ni)]), self.weights['b_h{}z_to_h{}z'.format(ni-1,ni)])
hidden_post_z = self.nonlinearity(hidden_pre_z)
# Channel for conditional input alone
hidden_pre_x = tfm.add(tfl.matmul(x, self.weights['W_x_to_h1x']), self.weights['b_x_to_h1x'])
hidden_post_x = self.nonlinearity(hidden_pre_x)
num_layers_middle_x = np.shape(self.N_hx)[0]-1
for i in range(num_layers_middle_x):
ni = i+2
hidden_pre_x = tfm.add(tfl.matmul(hidden_post_x, self.weights['W_h{}x_to_h{}x'.format(ni-1,ni)]), self.weights['b_h{}x_to_h{}x'.format(ni-1,ni)])
hidden_post_x = self.nonlinearity(hidden_pre_x)
hidden_post = tf.concat([hidden_post_z,hidden_post_x],1)
# Channel after combining the inputs
hidden_pre = tfm.add(tfl.matmul(hidden_post, self.weights['W_h0_to_h1']), self.weights['b_h0_to_h1'])
hidden_post = self.nonlinearity(hidden_pre)
num_layers_middle = np.shape(self.N_h)[0]-1
for i in range(num_layers_middle):
ni = i+2
hidden_pre = tfm.add(tfl.matmul(hidden_post, self.weights['W_h{}_to_h{}'.format(ni-1,ni)]), self.weights['b_h{}_to_h{}'.format(ni-1,ni)])
hidden_post = self.nonlinearity(hidden_pre)
mu_y = tfm.add(tfl.matmul(hidden_post, self.weights['W_h{}_to_muy'.format(ni)]), self.weights['b_h{}_to_muy'.format(ni)])
if constrain==True:
mu_y = tf.nn.sigmoid(mu_y)
log_sig_sq_y = tfm.add(tfl.matmul(hidden_post, self.weights['W_h{}_to_sy'.format(ni)]), self.weights['b_h{}_to_sy'.format(ni)])
log_sig_sq_y = self.sig_lim*(tf.nn.sigmoid(log_sig_sq_y/self.sig_lim)-0.5)
return mu_y, log_sig_sq_y
def _create_weights(self):
'''
Initialise weights
'''
all_weights = collections.OrderedDict()
all_weights['W_z_to_h1z'] = tf.Variable(vae_utils.xavier_init(self.n_z, self.N_hz[0]), dtype=tf.float32)
all_weights['b_z_to_h1z'] = tf.Variable(tf.zeros([self.N_hz[0]], dtype=tf.float32) * self.bias_start)
num_layers_middle_z = np.shape(self.N_hz)[0]-1
for i in range(num_layers_middle_z):
ni = i+2
all_weights['W_h{}z_to_h{}z'.format(ni-1,ni)] = tf.Variable(vae_utils.xavier_init(self.N_hz[ni-2], self.N_hz[ni-1]), dtype=tf.float32)
all_weights['b_h{}z_to_h{}z'.format(ni-1,ni)] = tf.Variable(tf.zeros([self.N_hz[ni-1]], dtype=tf.float32) * self.bias_start)
all_weights['W_x_to_h1x'] = tf.Variable(vae_utils.xavier_init(self.n_x, self.N_hx[0]), dtype=tf.float32)
all_weights['b_x_to_h1x'] = tf.Variable(tf.zeros([self.N_hx[0]], dtype=tf.float32) * self.bias_start)
num_layers_middle_x = np.shape(self.N_hx)[0]-1
for i in range(num_layers_middle_x):
ni = i+2
all_weights['W_h{}x_to_h{}x'.format(ni-1,ni)] = tf.Variable(vae_utils.xavier_init(self.N_hx[ni-2], self.N_hx[ni-1]), dtype=tf.float32)
all_weights['b_h{}x_to_h{}x'.format(ni-1,ni)] = tf.Variable(tf.zeros([self.N_hx[ni-1]], dtype=tf.float32) * self.bias_start)
all_weights['W_h0_to_h1'] = tf.Variable(vae_utils.xavier_init(self.N_hz[-1]+self.N_hx[-1], self.N_h[0]), dtype=tf.float32)
all_weights['b_h0_to_h1'] = tf.Variable(tf.zeros([self.N_h[0]], dtype=tf.float32) * self.bias_start)
num_layers_middle = np.shape(self.N_h)[0]-1
for i in range(num_layers_middle):
ni = i+2
all_weights['W_h{}_to_h{}'.format(ni-1,ni)] = tf.Variable(vae_utils.xavier_init(self.N_h[ni-2], self.N_h[ni-1]), dtype=tf.float32)
all_weights['b_h{}_to_h{}'.format(ni-1,ni)] = tf.Variable(tf.zeros([self.N_h[ni-1]], dtype=tf.float32) * self.bias_start)
all_weights['W_h{}_to_muy'.format(ni)] = tf.Variable(vae_utils.xavier_init(self.N_h[ni-1], self.n_y), dtype=tf.float32)
all_weights['b_h{}_to_muy'.format(ni)] = tf.Variable(tf.zeros([self.n_y], dtype=tf.float32) * self.bias_start)
all_weights['W_h{}_to_sy'.format(ni)] = tf.Variable(vae_utils.xavier_init(self.N_h[ni-1], self.n_y), dtype=tf.float32)
all_weights['b_h{}_to_sy'.format(ni)] = tf.Variable(tf.zeros([self.n_y], dtype=tf.float32) * self.bias_start)
return all_weights
class ConditionalEncoder(object):
'''
Class for Gaussian conditional encoder, taking inputs y and x and outputting latent variable
distribution q(z|x,y)
'''
def __init__(self, name, n_x, n_y, n_z, N_hx, N_hy, N_h, nonlinearity=tf.nn.leaky_relu, sig_lim=10):
'''
Initialisation
INPUTS:
name - name to assign to the conditional encoder
n_x - dimensionality of the conditional input
n_y - dimensionality of the input/output
n_z - dimensionality of latent space
N_hx - array of hidden units' dimensionalities for the conditional input x in the format [Nhx1,Nhx2,...,Nhxn]
N_hy - array of hidden units' dimensionalities for the input/output y in the format [Nhy1,Nhy2,...,Nhyn]
N_h - array of hidden units' dimensionalities for joint channels in the format [Nh1,Nh2,...,Nhn]
OPTIONAL INPUTS:
nonlinearity - choice of non-linearity (e.g. tf.nn.relu/tf.nn.leaky_relu/tf.nn.elu)
sig_lim - range to impose to the output log_sig_sq to avoid divergence.
'''
self.n_x = n_x
self.n_y = n_y
self.n_z = n_z
self.N_hx = N_hx
self.N_hy = N_hy
self.N_h = N_h
self.name = name
self.bias_start = 0.0
self.sig_lim = sig_lim
network_weights = self._create_weights()
self.weights = network_weights
self.nonlinearity = nonlinearity
def compute_moments(self,x,y):
'''
compute moments of latent Gaussian distribution
INPUTS:
x - conditional input
y - output to encode
OUTPUTS:
mu_z - mean of output Gaussian distribution
log_sig_sq_z - log variance of output Gaussian distribution
'''
# Channel for input/output alone
hidden_pre_y = tfm.add(tfl.matmul(y, self.weights['W_y_to_h1y']), self.weights['b_y_to_h1y'])
hidden_post_y = self.nonlinearity(hidden_pre_y)
num_layers_middle_y = np.shape(self.N_hy)[0]-1
for i in range(num_layers_middle_y):
ni = i+2
hidden_pre_y = tfm.add(tfl.matmul(hidden_post_y, self.weights['W_h{}y_to_h{}y'.format(ni-1,ni)]), self.weights['b_h{}y_to_h{}y'.format(ni-1,ni)])
hidden_post_y = self.nonlinearity(hidden_pre_y)
# Channel for conditional input alone
hidden_pre_x = tfm.add(tfl.matmul(x, self.weights['W_x_to_h1x']), self.weights['b_x_to_h1x'])
hidden_post_x = self.nonlinearity(hidden_pre_x)
num_layers_middle_x = np.shape(self.N_hx)[0]-1
for i in range(num_layers_middle_x):
ni = i+2
hidden_pre_x = tfm.add(tfl.matmul(hidden_post_x, self.weights['W_h{}x_to_h{}x'.format(ni-1,ni)]), self.weights['b_h{}x_to_h{}x'.format(ni-1,ni)])
hidden_post_x = self.nonlinearity(hidden_pre_x)
hidden_post = tf.concat([hidden_post_y,hidden_post_x],1)
# Channel after combining the inputs
hidden_pre = tfm.add(tfl.matmul(hidden_post, self.weights['W_h0_to_h1']), self.weights['b_h0_to_h1'])
hidden_post = self.nonlinearity(hidden_pre)
num_layers_middle = np.shape(self.N_h)[0]-1
for i in range(num_layers_middle):
ni = i+2
hidden_pre = tfm.add(tfl.matmul(hidden_post, self.weights['W_h{}_to_h{}'.format(ni-1,ni)]), self.weights['b_h{}_to_h{}'.format(ni-1,ni)])
hidden_post = self.nonlinearity(hidden_pre)
mu_z = tfm.add(tfl.matmul(hidden_post, self.weights['W_h{}_to_muz'.format(ni)]), self.weights['b_h{}_to_muz'.format(ni)])
log_sig_sq_z = tfm.add(tfl.matmul(hidden_post, self.weights['W_h{}_to_sz'.format(ni)]), self.weights['b_h{}_to_sz'.format(ni)])
log_sig_sq_z = self.sig_lim*(tf.nn.sigmoid(log_sig_sq_z/self.sig_lim)-0.5)
return mu_z, log_sig_sq_z
def _create_weights(self):
'''
Initialise weights
'''
all_weights = collections.OrderedDict()
all_weights['W_y_to_h1y'] = tf.Variable(vae_utils.xavier_init(self.n_y, self.N_hy[0]), dtype=tf.float32)
all_weights['b_y_to_h1y'] = tf.Variable(tf.zeros([self.N_hy[0]], dtype=tf.float32) * self.bias_start)
num_layers_middle_y = np.shape(self.N_hy)[0]-1
for i in range(num_layers_middle_y):
ni = i+2
all_weights['W_h{}y_to_h{}y'.format(ni-1,ni)] = tf.Variable(vae_utils.xavier_init(self.N_hy[ni-2], self.N_hy[ni-1]), dtype=tf.float32)
all_weights['b_h{}y_to_h{}y'.format(ni-1,ni)] = tf.Variable(tf.zeros([self.N_hy[ni-1]], dtype=tf.float32) * self.bias_start)
all_weights['W_x_to_h1x'] = tf.Variable(vae_utils.xavier_init(self.n_x, self.N_hx[0]), dtype=tf.float32)
all_weights['b_x_to_h1x'] = tf.Variable(tf.zeros([self.N_hx[0]], dtype=tf.float32) * self.bias_start)
num_layers_middle_x = np.shape(self.N_hx)[0]-1
for i in range(num_layers_middle_x):
ni = i+2
all_weights['W_h{}x_to_h{}x'.format(ni-1,ni)] = tf.Variable(vae_utils.xavier_init(self.N_hx[ni-2], self.N_hx[ni-1]), dtype=tf.float32)
all_weights['b_h{}x_to_h{}x'.format(ni-1,ni)] = tf.Variable(tf.zeros([self.N_hx[ni-1]], dtype=tf.float32) * self.bias_start)
all_weights['W_h0_to_h1'] = tf.Variable(vae_utils.xavier_init(self.N_hy[-1]+self.N_hx[-1], self.N_h[0]), dtype=tf.float32)
all_weights['b_h0_to_h1'] = tf.Variable(tf.zeros([self.N_h[0]], dtype=tf.float32) * self.bias_start)
num_layers_middle = np.shape(self.N_h)[0]-1
for i in range(num_layers_middle):
ni = i+2
all_weights['W_h{}_to_h{}'.format(ni-1,ni)] = tf.Variable(vae_utils.xavier_init(self.N_h[ni-2], self.N_h[ni-1]), dtype=tf.float32)
all_weights['b_h{}_to_h{}'.format(ni-1,ni)] = tf.Variable(tf.zeros([self.N_h[ni-1]], dtype=tf.float32) * self.bias_start)
all_weights['W_h{}_to_muz'.format(ni)] = tf.Variable(vae_utils.xavier_init(self.N_h[ni-1], self.n_z), dtype=tf.float32)
all_weights['b_h{}_to_muz'.format(ni)] = tf.Variable(tf.zeros([self.n_z], dtype=tf.float32) * self.bias_start)
all_weights['W_h{}_to_sz'.format(ni)] = tf.Variable(vae_utils.xavier_init(self.N_h[ni-1], self.n_z), dtype=tf.float32)
all_weights['b_h{}_to_sz'.format(ni)] = tf.Variable(tf.zeros([self.n_z], dtype=tf.float32) * self.bias_start)
return all_weights
class DoubleConditionalDecoder(object):
'''
Class for Gaussian conditional decoder, taking inputs x and x2, latent variable
z and outputting Gaussian distribution p(y|z,x,x2)
'''
def __init__(self, name, n_x, n_x2, n_y, n_z, N_hx, N_hx2, N_hz, N_h, nonlinearity=tf.nn.leaky_relu, sig_lim=10):
'''
Initialisation
INPUTS:
name - name to assign to the conditional decoder
n_x - dimensionality of the first conditional input
n_x2 - dimensionality of the second conditional input
n_y - dimensionality of the output
n_z - dimensionality of latent space
N_hx - array of hidden units' dimensionalities for the conditional input x in the format [Nhx1,Nhx2,...,Nhxn]
N_hx2 - array of hidden units' dimensionalities for the conditional input x2 in the format [Nhxb1,Nhxb2,...,Nhxbn]
N_hz - array of hidden units' dimensionalities for the latent variable z in the format [Nhz1,Nhz2,...,Nhzn]
N_h - array of hidden units' dimensionalities for joint channels in the format [Nh1,Nh2,...,Nhn]
OPTIONAL INPUTS:
nonlinearity - choice of non-linearity (e.g. tf.nn.relu/tf.nn.leaky_relu/tf.nn.elu)
sig_lim - range to impose to the output log_sig_sq to avoid divergence.
'''
self.n_x = n_x
self.n_x2 = n_x2
self.n_y = n_y
self.n_z = n_z
self.N_hx = N_hx
self.N_hx2 = N_hx2
self.N_hz = N_hz
self.N_h = N_h
self.name = name
self.bias_start = 0.0
self.sig_lim = sig_lim
network_weights = self._create_weights()
self.weights = network_weights
self.nonlinearity = nonlinearity
def compute_moments(self,z,x,x2,constrain=True):
'''
compute moments of output Gaussian distribution
INPUTS:
x - conditional input
x2 - conditional input
z - latent variable
OPTIONAL INPUTS:
constrain - whether to force the output mean to be between 0 and 1
OUTPUTS:
mu_y - mean of output Gaussian distribution
log_sig_sq_y - log variance of output Gaussian distribution
'''
# Channel for latent variable alone
hidden_pre_z = tfm.add(tfl.matmul(z, self.weights['W_z_to_h1z']), self.weights['b_z_to_h1z'])
hidden_post_z = self.nonlinearity(hidden_pre_z)
num_layers_middle_z = np.shape(self.N_hz)[0]-1
for i in range(num_layers_middle_z):
ni = i+2
hidden_pre_z = tfm.add(tfl.matmul(hidden_post_z, self.weights['W_h{}z_to_h{}z'.format(ni-1,ni)]), self.weights['b_h{}z_to_h{}z'.format(ni-1,ni)])
hidden_post_z = self.nonlinearity(hidden_pre_z)
# Channel for first conditional input alone
hidden_pre_x = tfm.add(tfl.matmul(x, self.weights['W_x_to_h1x']), self.weights['b_x_to_h1x'])
hidden_post_x = self.nonlinearity(hidden_pre_x)
num_layers_middle_x = np.shape(self.N_hx)[0]-1
for i in range(num_layers_middle_x):
ni = i+2
hidden_pre_x = tfm.add(tfl.matmul(hidden_post_x, self.weights['W_h{}x_to_h{}x'.format(ni-1,ni)]), self.weights['b_h{}x_to_h{}x'.format(ni-1,ni)])
hidden_post_x = self.nonlinearity(hidden_pre_x)
# Channel for second conditional input alone
hidden_pre_x2 = tfm.add(tfl.matmul(x2, self.weights['W_x2_to_h1x2']), self.weights['b_x2_to_h1x2'])
hidden_post_x2 = self.nonlinearity(hidden_pre_x2)
num_layers_middle_x2 = np.shape(self.N_hx2)[0]-1
for i in range(num_layers_middle_x2):
ni = i+2
hidden_pre_x2 = tfm.add(tfl.matmul(hidden_post_x2, self.weights['W_h{}x2_to_h{}x2'.format(ni-1,ni)]), self.weights['b_h{}x2_to_h{}x2'.format(ni-1,ni)])
hidden_post_x2 = self.nonlinearity(hidden_pre_x2)
hidden_post = tf.concat([hidden_post_z,hidden_post_x,hidden_post_x2],1)
# Channel after combining the inputs
hidden_pre = tfm.add(tfl.matmul(hidden_post, self.weights['W_h0_to_h1']), self.weights['b_h0_to_h1'])
hidden_post = self.nonlinearity(hidden_pre)
num_layers_middle = np.shape(self.N_h)[0]-1
for i in range(num_layers_middle):
ni = i+2
hidden_pre = tfm.add(tfl.matmul(hidden_post, self.weights['W_h{}_to_h{}'.format(ni-1,ni)]), self.weights['b_h{}_to_h{}'.format(ni-1,ni)])
hidden_post = self.nonlinearity(hidden_pre)
mu_y = tfm.add(tfl.matmul(hidden_post, self.weights['W_h{}_to_muy'.format(ni)]), self.weights['b_h{}_to_muy'.format(ni)])
if constrain==True:
mu_y = tf.nn.sigmoid(mu_y)
log_sig_sq_y = tfm.add(tfl.matmul(hidden_post, self.weights['W_h{}_to_sy'.format(ni)]), self.weights['b_h{}_to_sy'.format(ni)])
log_sig_sq_y = self.sig_lim*(tf.nn.sigmoid(log_sig_sq_y/self.sig_lim)-0.5)
return mu_y, log_sig_sq_y
def _create_weights(self):
'''
Initialise weights
'''
all_weights = collections.OrderedDict()
all_weights['W_z_to_h1z'] = tf.Variable(vae_utils.xavier_init(self.n_z, self.N_hz[0]), dtype=tf.float32)
all_weights['b_z_to_h1z'] = tf.Variable(tf.zeros([self.N_hz[0]], dtype=tf.float32) * self.bias_start)
num_layers_middle_z = np.shape(self.N_hz)[0]-1
for i in range(num_layers_middle_z):
ni = i+2
all_weights['W_h{}z_to_h{}z'.format(ni-1,ni)] = tf.Variable(vae_utils.xavier_init(self.N_hz[ni-2], self.N_hz[ni-1]), dtype=tf.float32)
all_weights['b_h{}z_to_h{}z'.format(ni-1,ni)] = tf.Variable(tf.zeros([self.N_hz[ni-1]], dtype=tf.float32) * self.bias_start)
all_weights['W_x_to_h1x'] = tf.Variable(vae_utils.xavier_init(self.n_x, self.N_hx[0]), dtype=tf.float32)
all_weights['b_x_to_h1x'] = tf.Variable(tf.zeros([self.N_hx[0]], dtype=tf.float32) * self.bias_start)
num_layers_middle_x = np.shape(self.N_hx)[0]-1
for i in range(num_layers_middle_x):
ni = i+2
all_weights['W_h{}x_to_h{}x'.format(ni-1,ni)] = tf.Variable(vae_utils.xavier_init(self.N_hx[ni-2], self.N_hx[ni-1]), dtype=tf.float32)
all_weights['b_h{}x_to_h{}x'.format(ni-1,ni)] = tf.Variable(tf.zeros([self.N_hx[ni-1]], dtype=tf.float32) * self.bias_start)
all_weights['W_x2_to_h1x2'] = tf.Variable(vae_utils.xavier_init(self.n_x2, self.N_hx2[0]), dtype=tf.float32)
all_weights['b_x2_to_h1x2'] = tf.Variable(tf.zeros([self.N_hx2[0]], dtype=tf.float32) * self.bias_start)
num_layers_middle_x2 = np.shape(self.N_hx2)[0]-1
for i in range(num_layers_middle_x2):
ni = i+2
all_weights['W_h{}x2_to_h{}x2'.format(ni-1,ni)] = tf.Variable(vae_utils.xavier_init(self.N_hx2[ni-2], self.N_hx2[ni-1]), dtype=tf.float32)
all_weights['b_h{}x2_to_h{}x2'.format(ni-1,ni)] = tf.Variable(tf.zeros([self.N_hx2[ni-1]], dtype=tf.float32) * self.bias_start)
all_weights['W_h0_to_h1'] = tf.Variable(vae_utils.xavier_init(self.N_hz[-1]+self.N_hx[-1]+self.N_hx2[-1], self.N_h[0]), dtype=tf.float32)
all_weights['b_h0_to_h1'] = tf.Variable(tf.zeros([self.N_h[0]], dtype=tf.float32) * self.bias_start)
num_layers_middle = np.shape(self.N_h)[0]-1
for i in range(num_layers_middle):
ni = i+2
all_weights['W_h{}_to_h{}'.format(ni-1,ni)] = tf.Variable(vae_utils.xavier_init(self.N_h[ni-2], self.N_h[ni-1]), dtype=tf.float32)
all_weights['b_h{}_to_h{}'.format(ni-1,ni)] = tf.Variable(tf.zeros([self.N_h[ni-1]], dtype=tf.float32) * self.bias_start)
all_weights['W_h{}_to_muy'.format(ni)] = tf.Variable(vae_utils.xavier_init(self.N_h[ni-1], self.n_y), dtype=tf.float32)
all_weights['b_h{}_to_muy'.format(ni)] = tf.Variable(tf.zeros([self.n_y], dtype=tf.float32) * self.bias_start)
all_weights['W_h{}_to_sy'.format(ni)] = tf.Variable(vae_utils.xavier_init(self.N_h[ni-1], self.n_y), dtype=tf.float32)
all_weights['b_h{}_to_sy'.format(ni)] = tf.Variable(tf.zeros([self.n_y], dtype=tf.float32) * self.bias_start)
return all_weights
class DoubleConditionalEncoder(object):
'''
Class for Gaussian conditional encoder, taking inputs x, x2 and y, and outputting latent Gaussian distribution q(z|x,x2,y)
'''
def __init__(self, name, n_x, n_x2, n_y, n_z, N_hx, N_hx2, N_hy, N_h, nonlinearity=tf.nn.leaky_relu, sig_lim=10):
'''
Initialisation
INPUTS:
name - name to assign to the conditional encoder
n_x - dimensionality of the conditional input
n_x2 - dimensionality of the second conditional input
n_y - dimensionality of the input/output
n_z - dimensionality of latent space
N_hx - array of hidden units' dimensionalities for the conditional input x in the format [Nhx1,Nhx2,...,Nhxn]
N_hx2 - array of hidden units' dimensionalities for the conditional input x2 in the format [Nhxb1,Nhxb2,...,Nhxbn]
N_hy - array of hidden units' dimensionalities for the input/output y in the format [Nhy1,Nhy2,...,Nhyn]
N_h - array of hidden units' dimensionalities for joint channels in the format [Nh1,Nh2,...,Nhn]
OPTIONAL INPUTS:
nonlinearity - choice of non-linearity (e.g. tf.nn.relu/tf.nn.leaky_relu/tf.nn.elu)
sig_lim - range to impose to the output log_sig_sq to avoid divergence.
'''
self.n_x = n_x
self.n_x2 = n_x2
self.n_y = n_y
self.n_z = n_z
self.N_hx = N_hx
self.N_hx2 = N_hx2
self.N_hy = N_hy
self.N_h = N_h
self.name = name
self.bias_start = 0.0
self.sig_lim = sig_lim
network_weights = self._create_weights()
self.weights = network_weights
self.nonlinearity = nonlinearity
def compute_moments(self,y,x,x2):
'''
compute moments of latent Gaussian distribution
INPUTS:
x - conditional input
y - output to encode
OUTPUTS:
mu_z - mean of output Gaussian distribution
log_sig_sq_z - log variance of output Gaussian distribution
'''
# Channel for input/output alone
hidden_pre_y = tfm.add(tfl.matmul(y, self.weights['W_y_to_h1y']), self.weights['b_y_to_h1y'])
hidden_post_y = self.nonlinearity(hidden_pre_y)
num_layers_middle_y = np.shape(self.N_hy)[0]-1
for i in range(num_layers_middle_y):
ni = i+2
hidden_pre_y = tfm.add(tfl.matmul(hidden_post_y, self.weights['W_h{}y_to_h{}y'.format(ni-1,ni)]), self.weights['b_h{}y_to_h{}y'.format(ni-1,ni)])
hidden_post_y = self.nonlinearity(hidden_pre_y)
# Channel for conditional input alone
hidden_pre_x = tfm.add(tfl.matmul(x, self.weights['W_x_to_h1x']), self.weights['b_x_to_h1x'])
hidden_post_x = self.nonlinearity(hidden_pre_x)
num_layers_middle_x = np.shape(self.N_hx)[0]-1
for i in range(num_layers_middle_x):
ni = i+2
hidden_pre_x = tfm.add(tfl.matmul(hidden_post_x, self.weights['W_h{}x_to_h{}x'.format(ni-1,ni)]), self.weights['b_h{}x_to_h{}x'.format(ni-1,ni)])
hidden_post_x = self.nonlinearity(hidden_pre_x)
# Channel for second conditional input alone
hidden_pre_x2 = tfm.add(tfl.matmul(x2, self.weights['W_x2_to_h1x2']), self.weights['b_x2_to_h1x2'])
hidden_post_x2 = self.nonlinearity(hidden_pre_x2)
num_layers_middle_x2 = np.shape(self.N_hx2)[0]-1
for i in range(num_layers_middle_x2):
ni = i+2
hidden_pre_x2 = tfm.add(tfl.matmul(hidden_post_x2, self.weights['W_h{}x2_to_h{}x2'.format(ni-1,ni)]), self.weights['b_h{}x2_to_h{}x2'.format(ni-1,ni)])
hidden_post_x2 = self.nonlinearity(hidden_pre_x2)
hidden_post = tf.concat([hidden_post_y,hidden_post_x,hidden_post_x2],1)
# Channel after combining the inputs
hidden_pre = tfm.add(tfl.matmul(hidden_post, self.weights['W_h0_to_h1']), self.weights['b_h0_to_h1'])
hidden_post = self.nonlinearity(hidden_pre)
num_layers_middle = np.shape(self.N_h)[0]-1
for i in range(num_layers_middle):
ni = i+2
hidden_pre = tfm.add(tfl.matmul(hidden_post, self.weights['W_h{}_to_h{}'.format(ni-1,ni)]), self.weights['b_h{}_to_h{}'.format(ni-1,ni)])
hidden_post = self.nonlinearity(hidden_pre)
mu_z = tfm.add(tfl.matmul(hidden_post, self.weights['W_h{}_to_muz'.format(ni)]), self.weights['b_h{}_to_muz'.format(ni)])
log_sig_sq_z = tfm.add(tfl.matmul(hidden_post, self.weights['W_h{}_to_sz'.format(ni)]), self.weights['b_h{}_to_sz'.format(ni)])
log_sig_sq_z = self.sig_lim*(tf.nn.sigmoid(log_sig_sq_z/self.sig_lim)-0.5)
return mu_z, log_sig_sq_z
def _create_weights(self):
'''
Initialise weights
'''
all_weights = collections.OrderedDict()
all_weights['W_y_to_h1y'] = tf.Variable(vae_utils.xavier_init(self.n_y, self.N_hy[0]), dtype=tf.float32)
all_weights['b_y_to_h1y'] = tf.Variable(tf.zeros([self.N_hy[0]], dtype=tf.float32) * self.bias_start)
num_layers_middle_y = np.shape(self.N_hy)[0]-1
for i in range(num_layers_middle_y):
ni = i+2
all_weights['W_h{}y_to_h{}y'.format(ni-1,ni)] = tf.Variable(vae_utils.xavier_init(self.N_hy[ni-2], self.N_hy[ni-1]), dtype=tf.float32)
all_weights['b_h{}y_to_h{}y'.format(ni-1,ni)] = tf.Variable(tf.zeros([self.N_hy[ni-1]], dtype=tf.float32) * self.bias_start)
all_weights['W_x_to_h1x'] = tf.Variable(vae_utils.xavier_init(self.n_x, self.N_hx[0]), dtype=tf.float32)
all_weights['b_x_to_h1x'] = tf.Variable(tf.zeros([self.N_hx[0]], dtype=tf.float32) * self.bias_start)
num_layers_middle_x = np.shape(self.N_hx)[0]-1
for i in range(num_layers_middle_x):
ni = i+2
all_weights['W_h{}x_to_h{}x'.format(ni-1,ni)] = tf.Variable(vae_utils.xavier_init(self.N_hx[ni-2], self.N_hx[ni-1]), dtype=tf.float32)
all_weights['b_h{}x_to_h{}x'.format(ni-1,ni)] = tf.Variable(tf.zeros([self.N_hx[ni-1]], dtype=tf.float32) * self.bias_start)
all_weights['W_x2_to_h1x2'] = tf.Variable(vae_utils.xavier_init(self.n_x2, self.N_hx2[0]), dtype=tf.float32)
all_weights['b_x2_to_h1x2'] = tf.Variable(tf.zeros([self.N_hx2[0]], dtype=tf.float32) * self.bias_start)
num_layers_middle_x2 = np.shape(self.N_hx2)[0]-1
for i in range(num_layers_middle_x2):
ni = i+2
all_weights['W_h{}x2_to_h{}x2'.format(ni-1,ni)] = tf.Variable(vae_utils.xavier_init(self.N_hx2[ni-2], self.N_hx2[ni-1]), dtype=tf.float32)
all_weights['b_h{}x2_to_h{}x2'.format(ni-1,ni)] = tf.Variable(tf.zeros([self.N_hx2[ni-1]], dtype=tf.float32) * self.bias_start)
all_weights['W_h0_to_h1'] = tf.Variable(vae_utils.xavier_init(self.N_hy[-1]+self.N_hx[-1]+self.N_hx2[-1], self.N_h[0]), dtype=tf.float32)
all_weights['b_h0_to_h1'] = tf.Variable(tf.zeros([self.N_h[0]], dtype=tf.float32) * self.bias_start)
num_layers_middle = np.shape(self.N_h)[0]-1
for i in range(num_layers_middle):
ni = i+2
all_weights['W_h{}_to_h{}'.format(ni-1,ni)] = tf.Variable(vae_utils.xavier_init(self.N_h[ni-2], self.N_h[ni-1]), dtype=tf.float32)
all_weights['b_h{}_to_h{}'.format(ni-1,ni)] = tf.Variable(tf.zeros([self.N_h[ni-1]], dtype=tf.float32) * self.bias_start)
all_weights['W_h{}_to_muz'.format(ni)] = tf.Variable(vae_utils.xavier_init(self.N_h[ni-1], self.n_z), dtype=tf.float32)
all_weights['b_h{}_to_muz'.format(ni)] = tf.Variable(tf.zeros([self.n_z], dtype=tf.float32) * self.bias_start)
all_weights['W_h{}_to_sz'.format(ni)] = tf.Variable(vae_utils.xavier_init(self.N_h[ni-1], self.n_z), dtype=tf.float32)
all_weights['b_h{}_to_sz'.format(ni)] = tf.Variable(tf.zeros([self.n_z], dtype=tf.float32) * self.bias_start)
return all_weights
| UTF-8 | Python | false | false | 38,074 | py | 17 | VAE.py | 16 | 0.576246 | 0.557835 | 0 | 789 | 47.250951 | 163 |
erikvanzijst/wrapped_pong_fork | 7,602,092,126,692 | 85aceb0bcba27176e8beae0d3a6222bf08343fe0 | 4eab7329e0b5d01153f6742ce6a8dfe036d11c59 | /caravel_test/encoder.py | 6cbaa97c8e55b9895c005737b3e0406f9d0b0c2d | [
"Apache-2.0"
]
| permissive | https://github.com/erikvanzijst/wrapped_pong_fork | c1058fdbbfc974db04f54fac3471f6882e982c91 | d95ef1fda9ed6bf23e24bc1b3c6ecd760f96f7e4 | refs/heads/main | 2023-05-30T17:36:39.042440 | 2021-05-25T07:37:33 | 2021-05-25T07:37:33 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | ../pong/test/encoder.py | UTF-8 | Python | false | false | 23 | py | 9 | encoder.py | 2 | 0.73913 | 0.73913 | 0 | 1 | 23 | 23 |
DreAymi/SAXS_reconstruction | 8,512,625,200,349 | 9b37be9b13e80d38037f9a9adddc6a084e428905 | 661445c651c2fa1ed9b5b4d70d700df0fc9de15a | /auto_encoder_t.py | e6785bcc5755afb765df4ef4c770f251a4018f87 | []
| no_license | https://github.com/DreAymi/SAXS_reconstruction | 4f085b869f74667fbfa0258ea1a4bcb7cdb87fb3 | d20482c42558e8e806103aa7fa072e033cd5ef07 | refs/heads/master | 2021-07-02T04:19:57.682491 | 2021-06-29T04:20:33 | 2021-06-29T04:20:33 | 178,705,919 | 2 | 2 | null | false | 2019-12-28T06:42:21 | 2019-03-31T15:24:03 | 2019-12-26T09:17:43 | 2019-12-28T06:41:52 | 467 | 1 | 1 | 0 | Python | false | false | import numpy as np
import tensorflow.compat.v1 as tf
tf.disable_v2_behavior()
import random
import os
import exceptions
BATCH_SIZE = 100
SEED=56297
z_dim=200
def variable_on_cpu(name,shape,stddev,trainable=False):
with tf.device('/cpu:0'):
var=tf.get_variable(name,shape,initializer=tf.truncated_normal_initializer(stddev=stddev, dtype=tf.float32),trainable=trainable)
return var
def encode(input,batchsize):
with tf.variable_scope('conv1') as scope:
weight1_1=variable_on_cpu('weight1',[3,3,3,1,64],np.sqrt(2./(3*3*3)))
bias1_1=var=variable_on_cpu('bias1',[64],0)
conv=tf.nn.conv3d(input,weight1_1,strides=[1,1,1,1,1],padding='SAME') + bias1_1
weight1_2=variable_on_cpu('weight2',[3,3,3,64,64],np.sqrt(2./(3*3*3*64)))
bias1_2=var=variable_on_cpu('bias2',[64],0)
conv1=tf.nn.conv3d(conv,weight1_2,strides=[1,1,1,1,1],padding='SAME')
relu=tf.nn.relu(conv1 + bias1_2)
pool1=tf.nn.max_pool3d(relu,ksize=[1,2,2,2,1],strides=[1,2,2,2,1],padding='SAME')
with tf.variable_scope('conv2') as scope:
weight2_1=variable_on_cpu('weight1',[3,3,3,64,128],np.sqrt(2./(3*3*3*64)))
bias2_1=var=variable_on_cpu('bias1',[128],0)
conv=tf.nn.conv3d(pool1,weight2_1,strides=[1,1,1,1,1],padding='SAME') + bias2_1
weight2_2=variable_on_cpu('weight2',[3,3,3,128,128],np.sqrt(2./(3*3*3*128)))
bias2_2=var=variable_on_cpu('bias2',[128],0)
conv2=tf.nn.conv3d(conv,weight2_2,strides=[1,1,1,1,1],padding='SAME')
relu=tf.nn.relu(conv2 + bias2_2)
pool2=tf.nn.max_pool3d(relu,ksize=[1,2,2,2,1],strides=[1,2,2,2,1],padding='SAME')
with tf.variable_scope('conv3') as scope:
weight3_1=variable_on_cpu('weight1',[3,3,3,128,128],np.sqrt(2./(3*3*3*128)))
bias3_1=var=variable_on_cpu('bias1',[128],0)
conv=tf.nn.conv3d(pool2,weight3_1,strides=[1,1,1,1,1],padding='SAME') + bias3_1
weight3_2=variable_on_cpu('weight2',[3,3,3,128,128],np.sqrt(2./(3*3*3*128)))
bias3_2=var=variable_on_cpu('bias2',[128],0)
conv=tf.nn.conv3d(conv,weight3_2,strides=[1,1,1,1,1],padding='SAME') + bias3_2
weight3_3=variable_on_cpu('weight3',[3,3,3,128,128],np.sqrt(2./(3*3*3*128)))
bias3_3=var=variable_on_cpu('bias3',[128],0)
conv3=tf.nn.conv3d(conv,weight3_3,strides=[1,1,1,1,1],padding='SAME')
relu=tf.nn.relu(conv3 + bias3_3)
reshape_=tf.reshape(relu,[batchsize,-1])
dim=reshape_.get_shape()[-1].value
with tf.variable_scope('fc1') as scope:
weight4=variable_on_cpu('weight',[dim,z_dim],np.sqrt(2./dim),trainable=True)
bias4=variable_on_cpu('bias',[z_dim],0,trainable=True)
z=tf.nn.relu(tf.matmul(reshape_,weight4) + bias4)
#z=tf.nn.sigmoid(tf.matmul(reshape_,weight4) + bias4)
return z
def decode(z,batchsize):
with tf.variable_scope('fc2',reuse=tf.AUTO_REUSE) as scope:
weight5=variable_on_cpu('weight',[z_dim,8*8*8*32],np.sqrt(2./z_dim),trainable=True)
bias5=variable_on_cpu('bias',[8*8*8*32],0,trainable=True)
h=tf.nn.relu(tf.matmul(z,weight5) + bias5)
h=tf.reshape(h,[-1,8,8,8,32])
with tf.variable_scope('deconv1',reuse=tf.AUTO_REUSE) as scope:
weight6=variable_on_cpu('weight',[5,5,5,64,32],np.sqrt(2./(5*5*5*32)))
bias6=variable_on_cpu('bias',[64],0)
deconv=tf.nn.conv3d_transpose(h,weight6,[batchsize,16,16,16,64],[1,2,2,2,1],padding='SAME')
deconv1=tf.nn.relu(deconv+bias6)
with tf.variable_scope('deconv2',reuse=tf.AUTO_REUSE) as scope:
weight7=variable_on_cpu('weight',[5,5,5,128,64],np.sqrt(2./(5*5*5*64)))
bias7=variable_on_cpu('bias',[128],0)
deconv=tf.nn.conv3d_transpose(deconv1,weight7,[batchsize,32,32,32,128],[1,2,2,2,1],padding='SAME')
deconv2=tf.nn.relu(deconv+bias7)
with tf.variable_scope('conv4',reuse=tf.AUTO_REUSE) as scope:
weight8=variable_on_cpu('weight',[3,3,3,128,1],np.sqrt(2./(3*3*3*128)))
bias8=variable_on_cpu('bias',[1],0)
conv=tf.nn.conv3d(deconv2,weight8,strides=[1,1,1,1,1],padding='SAME')
logits=conv+bias8
return logits
def generate_session_decode(gpu_num):
print 'BATCH_SIZE:::',BATCH_SIZE
in_=[]
out=[]
for ii in range(gpu_num):
with tf.device('/gpu:%d'%ii):
train_in=tf.placeholder(shape=[BATCH_SIZE,z_dim],dtype=tf.float32)
train_logits =decode(train_in,BATCH_SIZE)
train_out=tf.nn.sigmoid(train_logits)
in_.append(train_in)
out.append(train_out)
tf.get_variable_scope().reuse_variables()
return in_,out
def generate_session(gpu_num):
in_=[]
z=[]
out=[]
for ii in range(gpu_num):
with tf.device('/gpu:%d'%ii):
train_in=tf.placeholder(shape=[1,32,32,32,1],dtype=tf.float32)
train_z=encode(train_in,1)
train_logits =decode(train_z,1)
train_out=tf.nn.sigmoid(train_logits)
tf.get_variable_scope().reuse_variables()
in_.append(train_in)
z.append(train_z)
out.append(train_out)
return in_,z,out
def generate_encode_session(gpu_num):
in_=[]
z=[]
for ii in range(gpu_num):
with tf.device('/gpu:%d'%ii):
train_in=tf.placeholder(shape=[1,32,32,32,1],dtype=tf.float32)
train_z=encode(train_in,1)
tf.get_variable_scope().reuse_variables()
in_.append(train_in)
z.append(train_z)
return in_,z
def generate_decode_session(gpu_num):
z=[]
out=[]
for ii in range(gpu_num):
with tf.device('/gpu:%d'%ii):
train_z=tf.placeholder(shape=[1,z_dim],dtype=tf.float32)
train_logits =decode(train_z,1)
train_out=tf.nn.sigmoid(train_logits)
tf.get_variable_scope().reuse_variables()
z.append(train_z)
out.append(train_out)
return z,out
| UTF-8 | Python | false | false | 5,338 | py | 21 | auto_encoder_t.py | 17 | 0.679655 | 0.594043 | 0 | 144 | 35.993056 | 130 |
FaithKoech/Classes-Objects | 15,985,868,275,806 | 6b12d69d7eb81281e145d937ae83223e1f8dbf9e | ce792818d31f232227a3871a20a41e1d497be126 | /Classes&Objects.py | 64a152eef8374ad04998d8411e9b0e8b86ae3591 | []
| no_license | https://github.com/FaithKoech/Classes-Objects | 882294c4e17825e978da0c26ef8923488f8835bb | 29a543b9946c53da1ddf8193a5eab1714dec04c6 | refs/heads/master | 2020-09-19T18:16:13.062283 | 2019-11-26T18:45:11 | 2019-11-26T18:45:11 | 224,261,968 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # name="Faith"
# print(type(name))
# Class
# class person:
# # class atribute(properties)
# species = "Homo sapien"
#
# # Method are funtions defined inside a class
# def walk(self):
# print("is walking")
# def sleep(self):
# print("{}is sleeping".format(self.name))
# p1=person()
# p2=person()
# p3=person()
# p1.species
# # print(p1.species)
# p1.name = "Faith"
# p2.name = "Ann"
# p3.name = "val"
# # print(p1.name)
# # p1.age=20
# # print(p1.age)
# # p1.race = "black"
# # print(p1.race)
# # p1.origin="African"
# # print(p1.origin)
# # p1.citizenship="Kenyan"
# # print(p1.citizenship)
# p1.walk()
# p1.sleep()
#
#
# class car:
# make="BMW"
#
# c1=car()
# c1.name="Koech's Objects"
# c2=car()
# c3=car()
# c1.year="1982"
#
# def speed(self):
# print("{}is moving at 60km per hr".format(self.name))
#
# print(c1.make)
# print(c1.year)
# c1.speed()
# Runs as soon as you create
# def __init__(self):
# print("I am a constructor method")
#
# self.name="Faith"
# self.age=26
#
# p12=person("Kivuti", 26)
class Person:
def __init__(self, name, age):
self.name = name
self.age = age
p1 = Person("John", 36)
print(p1.name)
print(p1.age)
print(p1) | UTF-8 | Python | false | false | 1,238 | py | 1 | Classes&Objects.py | 1 | 0.575121 | 0.537157 | 0 | 70 | 16.7 | 59 |
JangGiWon/Beakjoon | 16,363,825,435,110 | 614a2c712417a54277b663c3b8f7f1a4f6056994 | e2b8ec06a1e3d31319f50f95b9bc60a6abf268ef | /10808.py | e5981aa5c93d327cdc90efb65ce92fd2b725377c | []
| no_license | https://github.com/JangGiWon/Beakjoon | 7d9f9a9b5e5cedc990e3635275374f1cdab47cbf | e4230f20f37ac9488cb12a8fb877526460b79b62 | refs/heads/master | 2022-12-15T10:09:33.506984 | 2020-08-26T07:46:34 | 2020-08-26T07:46:34 | 279,236,574 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | s = input()
result = [0] * 26
for i in s:
result[ord(i) - 97] = s.count(i)
for i in result:
print(i, end=' ')
| UTF-8 | Python | false | false | 120 | py | 48 | 10808.py | 47 | 0.516667 | 0.475 | 0 | 8 | 14 | 36 |
rogerioita/ICPChain | 13,975,823,586,309 | 3c7b81e421909a800ed73f1596555b456ef63e88 | 236ded75aefcf965c582538f1b56d21b161cd3c5 | /ethpki-Criptografado/testDir/callct040.py | 6a03da4716499ec4dcb7993ba58c54eb9fdc7b36 | []
| no_license | https://github.com/rogerioita/ICPChain | 7fc1c7c5df289ed31fd2ad71cee400eac3d253cb | 87f2b001f5297396be6cb3aa9dd0c42236587038 | refs/heads/master | 2023-03-10T20:57:49.479052 | 2021-02-16T16:27:23 | 2021-02-16T16:27:23 | 336,806,143 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/env python
import os
os.system('node ct040.js')
| UTF-8 | Python | false | false | 60 | py | 641 | callct040.py | 638 | 0.7 | 0.65 | 0 | 4 | 14 | 26 |
HIGHWAY99/service.lan.ftp | 11,484,742,570,023 | 532cad3ed4505bd06fddeaa27f007ec54cb584e9 | 062e89787371e23139a1cb4dc33da2c164a34908 | /pyftpdlib/handlers.py | 8cf8d74ac89deb78f158f106160fb31f5e04661b | []
| no_license | https://github.com/HIGHWAY99/service.lan.ftp | 39f1f1d8c1635b090ec86bb6010dd834afb51ba3 | d597cecbb5e6be147e5e2340fba06df214422f9d | refs/heads/master | 2016-08-07T07:50:50.976819 | 2014-02-02T19:18:06 | 2014-02-02T19:18:06 | 16,396,848 | 1 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/env python
# $Id: handlers.py 1218 2013-04-19 01:48:39Z g.rodola $
# ======================================================================
# Copyright (C) 2007-2013 Giampaolo Rodola' <g.rodola@gmail.com>
# All Rights Reserved
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following
# conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
# ======================================================================
import asynchat,time,sys,os,socket,random,glob,errno,traceback,warnings,logging
try: import pwd,grp
except ImportError: pwd=grp=None
#__ver__="1.3.0";
from __init__ import __ver__
from log import logger
from filesystems import FilesystemError, AbstractedFS
from _compat import PY3,b,u,getcwdu,unicode,xrange,next
from ioloop import AsyncChat,Connector,Acceptor,timer,_DISCONNECTED
from authorizers import (DummyAuthorizer,AuthenticationFailed,AuthorizerError)
# ===================================================================
# --- Additions for Addon Workings.
# ===================================================================
import xbmcaddon
addon_id="service.lan.ftp"; addon=xbmcaddon.Addon(id=addon_id)
def getAddonSetting(setting,default=""):
try: return addon.getSetting(setting)
except: return default
#getAddonSetting("","")
#def IsServerOn(self): return tfalse(getAddonSetting("server_enable","false"))
def tfalse(r,d=False): ## Get True / False
if (r.lower()=='true' ) or (r.lower()=='t') or (r.lower()=='y') or (r.lower()=='1') or (r.lower()=='yes'): return True
elif (r.lower()=='false') or (r.lower()=='f') or (r.lower()=='n') or (r.lower()=='0') or (r.lower()=='no'): return False
else: return d
# ===================================================================
def _import_sendfile():
# By default attempt to use os.sendfile introduced in Python 3.3:
# http://bugs.python.org/issue10882
# ...otherwise fallback on using third-party pysendfile module:
# http://code.google.com/p/pysendfile/
if os.name=='posix':
try: return os.sendfile # py >= 3.3
except AttributeError:
try:
import sendfile as sf
if hasattr(sf, 'has_sf_hdtr'): raise ImportError # dirty hack to detect whether old 1.2.4 version is installed
return sf.sendfile
except ImportError: pass
sendfile=_import_sendfile()
proto_cmds={
'ABOR':dict(perm=None, auth=True, arg=False, help='Syntax: ABOR (abort transfer).'),
'ALLO':dict(perm=None, auth=True, arg=True, help='Syntax: ALLO <SP> bytes (noop; allocate storage).'),
'APPE':dict(perm='a', auth=True, arg=True, help='Syntax: APPE <SP> file-name (append data to file).'),
'CDUP':dict(perm='e', auth=True, arg=False, help='Syntax: CDUP (go to parent directory).'),
'CWD' :dict(perm='e', auth=True, arg=None, help='Syntax: CWD [<SP> dir-name] (change working directory).'),
'DELE':dict(perm='d', auth=True, arg=True, help='Syntax: DELE <SP> file-name (delete file).'),
'EPRT':dict(perm=None, auth=True, arg=True, help='Syntax: EPRT <SP> |proto|ip|port| (extended active mode).'),
'EPSV':dict(perm=None, auth=True, arg=None, help='Syntax: EPSV [<SP> proto/"ALL"] (extended passive mode).'),
'FEAT':dict(perm=None, auth=False, arg=False, help='Syntax: FEAT (list all new features supported).'),
'HELP':dict(perm=None, auth=False, arg=None, help='Syntax: HELP [<SP> cmd] (show help).'),
'LIST':dict(perm='l', auth=True, arg=None, help='Syntax: LIST [<SP> path] (list files).'),
'MDTM':dict(perm='l', auth=True, arg=True, help='Syntax: MDTM [<SP> path] (file last modification time).'),
'MLSD':dict(perm='l', auth=True, arg=None, help='Syntax: MLSD [<SP> path] (list directory).'),
'MLST':dict(perm='l', auth=True, arg=None, help='Syntax: MLST [<SP> path] (show information about path).'),
'MODE':dict(perm=None, auth=True, arg=True, help='Syntax: MODE <SP> mode (noop; set data transfer mode).'),
'MKD' :dict(perm='m', auth=True, arg=True, help='Syntax: MKD <SP> path (create directory).'),
'NLST':dict(perm='l', auth=True, arg=None, help='Syntax: NLST [<SP> path] (list path in a compact form).'),
'NOOP':dict(perm=None, auth=False, arg=False, help='Syntax: NOOP (just do nothing).'),
'OPTS':dict(perm=None, auth=True, arg=True, help='Syntax: OPTS <SP> cmd [<SP> option] (set option for command).'),
'PASS':dict(perm=None, auth=False, arg=None, help='Syntax: PASS [<SP> password] (set user password).'),
'PASV':dict(perm=None, auth=True, arg=False, help='Syntax: PASV (open passive data connection).'),
'PORT':dict(perm=None, auth=True, arg=True, help='Syntax: PORT <sp> h1,h2,h3,h4,p1,p2 (open active data connection).'),
'PWD' :dict(perm=None, auth=True, arg=False, help='Syntax: PWD (get current working directory).'),
'QUIT':dict(perm=None, auth=False, arg=False, help='Syntax: QUIT (quit current session).'),
'REIN':dict(perm=None, auth=True, arg=False, help='Syntax: REIN (flush account).'),
'REST':dict(perm=None, auth=True, arg=True, help='Syntax: REST <SP> offset (set file offset).'),
'RETR':dict(perm='r', auth=True, arg=True, help='Syntax: RETR <SP> file-name (retrieve a file).'),
'RMD' :dict(perm='d', auth=True, arg=True, help='Syntax: RMD <SP> dir-name (remove directory).'),
'RNFR':dict(perm='f', auth=True, arg=True, help='Syntax: RNFR <SP> file-name (rename (source name)).'),
'RNTO':dict(perm='f', auth=True, arg=True, help='Syntax: RNTO <SP> file-name (rename (destination name)).'),
'SITE':dict(perm=None, auth=False, arg=True, help='Syntax: SITE <SP> site-command (execute SITE command).'),
'SITE HELP' :dict(perm=None, auth=False, arg=None, help='Syntax: SITE HELP [<SP> site-command] (show SITE command help).'),
'SITE CHMOD':dict(perm='M', auth=True, arg=True, help='Syntax: SITE CHMOD <SP> mode path (change file mode).'),
'SIZE':dict(perm='l', auth=True, arg=True, help='Syntax: SIZE <SP> file-name (get file size).'),
'STAT':dict(perm='l', auth=False, arg=None, help='Syntax: STAT [<SP> path name] (server stats [list files]).'),
'STOR':dict(perm='w', auth=True, arg=True, help='Syntax: STOR <SP> file-name (store a file).'),
'STOU':dict(perm='w', auth=True, arg=None, help='Syntax: STOU [<SP> file-name] (store a file with a unique name).'),
'STRU':dict(perm=None, auth=True, arg=True, help='Syntax: STRU <SP> type (noop; set file structure).'),
'SYST':dict(perm=None, auth=False, arg=False, help='Syntax: SYST (get operating system type).'),
'TYPE':dict(perm=None, auth=True, arg=True, help='Syntax: TYPE <SP> [A | I] (set transfer type).'),
'USER':dict(perm=None, auth=False, arg=True, help='Syntax: USER <SP> user-name (set username).'),
'XCUP':dict(perm='e', auth=True, arg=False, help='Syntax: XCUP (obsolete; go to parent directory).'),
'XCWD':dict(perm='e', auth=True, arg=None, help='Syntax: XCWD [<SP> dir-name] (obsolete; change directory).'),
'XMKD':dict(perm='m', auth=True, arg=True, help='Syntax: XMKD <SP> dir-name (obsolete; create directory).'),
'XPWD':dict(perm=None, auth=True, arg=False, help='Syntax: XPWD (obsolete; get current dir).'),
'XRMD':dict(perm='d', auth=True, arg=True, help='Syntax: XRMD <SP> dir-name (obsolete; remove directory).'),
}
if not hasattr(os,'chmod'): del proto_cmds['SITE CHMOD']
def _strerror(err):
if isinstance(err,EnvironmentError):
try: return os.strerror(err.errno)
except AttributeError:
# not available on PythonCE
if not hasattr(os,'strerror'): return err.strerror
raise
else: return str(err)
def _support_hybrid_ipv6(): #"""Return True if it is possible to use hybrid IPv6/IPv4 socketson this platform."""
# Note: IPPROTO_IPV6 constant is broken on Windows, see:
# http://bugs.python.org/issue6926
sock=None
try:
try:
if not socket.has_ipv6: return False
sock=socket.socket(socket.AF_INET6); return not sock.getsockopt(socket.IPPROTO_IPV6,socket.IPV6_V6ONLY)
except (socket.error,AttributeError): return False
finally:
if sock is not None: sock.close()
SUPPORTS_HYBRID_IPV6=_support_hybrid_ipv6()
class _FileReadWriteError(OSError):
"""Exception raised when reading or writing a file during a transfer."""
# --- DTP classes
class PassiveDTP(Acceptor):
"""Creates a socket listening on a local port, dispatching the
resultant connection to DTPHandler. Used for handling PASV command.
- (int) timeout: the timeout for a remote client to establish
connection with the listening socket. Defaults to 30 seconds.
- (int) backlog: the maximum number of queued connections passed
to listen(). If a connection request arrives when the queue is
full the client may raise ECONNRESET. Defaults to 5."""
timeout=30; backlog=None
def __init__(self,cmd_channel,extmode=False):
"""Initialize the passive data server.
- (instance) cmd_channel: the command channel class instance.
- (bool) extmode: wheter use extended passive mode response type."""
self.cmd_channel=cmd_channel; self.log=cmd_channel.log; self.log_exception=cmd_channel.log_exception; self._closed=False; self._idler=None; Acceptor.__init__(self,ioloop=cmd_channel.ioloop); local_ip=self.cmd_channel.socket.getsockname()[0]
if local_ip in self.cmd_channel.masquerade_address_map: masqueraded_ip=self.cmd_channel.masquerade_address_map[local_ip]
elif self.cmd_channel.masquerade_address: masqueraded_ip=self.cmd_channel.masquerade_address
else: masqueraded_ip=None
if self.cmd_channel.server._af != socket.AF_INET: af=self.bind_af_unspecified((local_ip,0)); self.socket.close() # dual stack IPv4/IPv6 support
else: af=self.cmd_channel._af
self.create_socket(af,socket.SOCK_STREAM)
if self.cmd_channel.passive_ports is None:
# By using 0 as port number value we let kernel choose a
# free unprivileged random port.
self.bind((local_ip,0))
else:
ports=list(self.cmd_channel.passive_ports)
while ports:
port=ports.pop(random.randint(0,len(ports)-1)); self.set_reuse_addr()
try: self.bind((local_ip,port))
except socket.error:
err=sys.exc_info()[1]
if err.args[0]==errno.EADDRINUSE: # port already in use
if ports: continue
# If cannot use one of the ports in the configured
# range we'll use a kernel-assigned port, and log
# a message reporting the issue.
# By using 0 as port number value we let kernel
# choose a free unprivileged random port.
else: self.bind((local_ip,0)); self.cmd_channel.log("Can't find a valid passive port in the "+"configured range. A random kernel-assigned "+"port will be used.",logfun=logger.warning)
else: raise
else: break
self.listen(self.backlog or self.cmd_channel.server.backlog)
port=self.socket.getsockname()[1]
if not extmode:
ip=masqueraded_ip or local_ip
if ip.startswith('::ffff:'):
# In this scenario, the server has an IPv6 socket, but
# the remote client is using IPv4 and its address is
# represented as an IPv4-mapped IPv6 address which
# looks like this ::ffff:151.12.5.65, see:
# http://en.wikipedia.org/wiki/IPv6#IPv4-mapped_addresses
# http://tools.ietf.org/html/rfc3493.html#section-3.7
# We truncate the first bytes to make it look like a
# common IPv4 address.
ip=ip[7:]
# The format of 227 response in not standardized.
# This is the most expected:
self.cmd_channel.respond('227 Entering passive mode (%s,%d,%d).' % (ip.replace('.', ','), port // 256, port % 256))
else:
self.cmd_channel.respond('229 Entering extended passive mode '+'(|||%d|).' % port)
if self.timeout: self._idler=self.ioloop.call_later(self.timeout,self.handle_timeout,_errback=self.handle_error)
# --- connection / overridden
def handle_accepted(self,sock,addr):
"""Called when remote client initiates a connection."""
if not self.cmd_channel.connected: return self.close()
# Check the origin of data connection. If not expressively
# configured we drop the incoming data connection if remote
# IP address does not match the client's IP address.
if self.cmd_channel.remote_ip != addr[0]:
if not self.cmd_channel.permit_foreign_addresses:
try: sock.close()
except socket.error: pass
msg='425 Rejected data connection from foreign address %s:%s.' %(addr[0], addr[1])
self.cmd_channel.respond_w_warning(msg)
# do not close listening socket: it couldn't be client's blame
return
else:
# site-to-site FTP allowed
msg='Established data connection with foreign address %s:%s.' % (addr[0], addr[1])
self.cmd_channel.log(msg,logfun=logger.warning)
# Immediately close the current channel (we accept only one
# connection at time) and avoid running out of max connections
# limit.
self.close()
# delegate such connection to DTP handler
if self.cmd_channel.connected:
handler=self.cmd_channel.dtp_handler(sock,self.cmd_channel)
if handler.connected: self.cmd_channel.data_channel=handler; self.cmd_channel._on_dtp_connection()
def handle_timeout(self):
if self.cmd_channel.connected: self.cmd_channel.respond("421 Passive data channel timed out.",logfun=logging.info)
self.close()
def handle_error(self):
"""Called to handle any uncaught exceptions."""
try: raise
except Exception: logger.error(traceback.format_exc())
try: self.close()
except Exception: logger.critical(traceback.format_exc())
def close(self):
if not self._closed:
self._closed=True
Acceptor.close(self)
if self._idler is not None and not self._idler.cancelled: self._idler.cancel()
class ActiveDTP(Connector):
"""Connects to remote client and dispatches the resulting connection
to DTPHandler. Used for handling PORT command.
- (int) timeout: the timeout for us to establish connection with
the client's listening data socket."""
timeout=30
def __init__(self,ip,port,cmd_channel):
"""Initialize the active data channel attemping to connect
to remote data socket.
- (str) ip: the remote IP address.
- (int) port: the remote port.
- (instance) cmd_channel: the command channel class instance."""
Connector.__init__(self,ioloop=cmd_channel.ioloop)
self.cmd_channel=cmd_channel
self.log=cmd_channel.log
self.log_exception=cmd_channel.log_exception
self._closed=False
self._idler=None
if self.timeout: self._idler=self.ioloop.call_later(self.timeout,self.handle_timeout,_errback=self.handle_error)
if ip.count('.')==4:
self._cmd="PORT"; self._normalized_addr="%s:%s" % (ip,port)
else:
self._cmd="EPRT"; self._normalized_addr="[%s]:%s" % (ip,port)
source_ip=self.cmd_channel.socket.getsockname()[0]
# dual stack IPv4/IPv6 support
try: self.connect_af_unspecified((ip,port),(source_ip,0))
except (socket.gaierror,socket.error): self.handle_close()
def readable(self): return False
def handle_write(self): pass # overridden to prevent unhandled read/write event messages to # be printed by asyncore on Python < 2.6
def handle_connect(self): #"""Called when connection is established."""
self.del_channel()
if self._idler is not None and not self._idler.cancelled: self._idler.cancel()
if not self.cmd_channel.connected: return self.close()
# fix for asyncore on python < 2.6, meaning we aren't # actually connected.
# test_active_conn_error tests this condition
err=self.socket.getsockopt(socket.SOL_SOCKET, socket.SO_ERROR)
if err != 0:
raise socket.error(err)
#
msg='Active data connection established.'
self.cmd_channel.respond('200 '+msg); self.cmd_channel.log_cmd(self._cmd,self._normalized_addr,200,msg)
#
if not self.cmd_channel.connected: return self.close()
# delegate such connection to DTP handler
handler=self.cmd_channel.dtp_handler(self.socket,self.cmd_channel); self.cmd_channel.data_channel=handler; self.cmd_channel._on_dtp_connection()
def handle_timeout(self):
if self.cmd_channel.connected: msg="Active data channel timed out."; self.cmd_channel.respond("421 "+msg,logfun=logger.info); self.cmd_channel.log_cmd(self._cmd,self._normalized_addr,421,msg)
self.close()
def handle_close(self):
# With the new IO loop, handle_close() gets called in case
# the fd appears in the list of exceptional fds.
# This means connect() failed.
if not self._closed:
self.close()
if self.cmd_channel.connected: msg="Can't connect to specified address."; self.cmd_channel.respond("425 "+msg); self.cmd_channel.log_cmd(self._cmd,self._normalized_addr,425,msg)
def handle_error(self): #"""Called to handle any uncaught exceptions."""
try: raise
except (socket.gaierror,socket.error): pass
except Exception: self.log_exception(self)
try: self.handle_close()
except Exception: logger.critical(traceback.format_exc())
def close(self):
if not self._closed:
self._closed=True
if self.socket is not None: Connector.close(self)
if self._idler is not None and not self._idler.cancelled: self._idler.cancel()
class DTPHandler(AsyncChat):
"""Class handling server-data-transfer-process (server-DTP, see RFC-959) managing data-transfer operations involving sending and receiving data.
Class attributes:
- (int) timeout: the timeout which roughly is the maximum time we permit data transfers to stall for with
no progress. If the timeout triggers, the remote client will be kicked off (defaults 300).
- (int) ac_in_buffer_size: incoming data buffer size (defaults 65536)
- (int) ac_out_buffer_size: outgoing data buffer size (defaults 65536)"""
timeout=300; ac_in_buffer_size=65536; ac_out_buffer_size=65536
def __init__(self,sock,cmd_channel):
"""Initialize the command channel.
- (instance) sock: the socket object instance of the newly established connection.
- (instance) cmd_channel: the command channel class instance."""
self.cmd_channel=cmd_channel; self.file_obj=None; self.receive=False; self.transfer_finished=False; self.tot_bytes_sent=0; self.tot_bytes_received=0; self.cmd=None; self.log=cmd_channel.log; self.log_exception=cmd_channel.log_exception; self._data_wrapper=None; self._lastdata=0; self._closed=False; self._had_cr=False; self._start_time=timer(); self._resp=(); self._offset=None; self._filefd=None; self._idler=None; self._initialized=False
try: AsyncChat.__init__(self,sock,ioloop=cmd_channel.ioloop)
except socket.error:
err=sys.exc_info()[1]
# if we get an exception here we want the dispatcher instance to set socket attribute before closing, see:
# http://code.google.com/p/pyftpdlib/issues/detail?id=188
AsyncChat.__init__(self,socket.socket(),ioloop=cmd_channel.ioloop)
# http://code.google.com/p/pyftpdlib/issues/detail?id=143
self.close()
if err.args[0]==errno.EINVAL: return
self.handle_error(); return
# remove this instance from IOLoop's socket map
if not self.connected: self.close(); return
if self.timeout: self._idler=self.ioloop.call_every(self.timeout,self.handle_timeout,_errback=self.handle_error)
def __repr__(self):
try: addr="%s:%s" % self.socket.getpeername()[:2]
except socket.error: addr=None
status=[self.__class__.__module__+"."+self.__class__.__name__]
status.append("(addr=%s, user=%r, receive=%r, file=%r)" % (addr,self.cmd_channel.username or '',self.receive,getattr(self.file_obj,'name','')))
return '<%s at %#x>' % (' '.join(status),id(self))
__str__=__repr__
def _use_sendfile(self, producer): return self.cmd_channel.use_sendfile and isinstance(producer, FileProducer) and producer.type == 'i'
def push(self,data): self._initialized=True; self.ioloop.modify(self._fileno,self.ioloop.WRITE); AsyncChat.push(self,data)
def push_with_producer(self,producer):
self._initialized=True; self.ioloop.modify(self._fileno,self.ioloop.WRITE)
if self._use_sendfile(producer): self._offset=producer.file.tell(); self._filefd=self.file_obj.fileno(); self.initiate_sendfile(); self.initiate_send=self.initiate_sendfile
else: AsyncChat.push_with_producer(self,producer)
def close_when_done(self): asynchat.async_chat.close_when_done(self)
def initiate_send(self): asynchat.async_chat.initiate_send(self)
def initiate_sendfile(self): #"""A wrapper around sendfile."""
try: sent=sendfile(self._fileno,self._filefd,self._offset,self.ac_out_buffer_size)
except OSError:
err=sys.exc_info()[1]
if err.errno in (errno.EAGAIN,errno.EWOULDBLOCK,errno.EBUSY): return
elif err.errno in _DISCONNECTED: self.handle_close()
else: raise
else:
if sent==0: self.discard_buffers(); self.handle_close() # this signals the channel that the transfer is completed
else: self._offset +=sent; self.tot_bytes_sent +=sent
# --- utility methods
def _posix_ascii_data_wrapper(self, chunk): #"""The data wrapper used for receiving data in ASCII mode on systems using a single line terminator, handling those cases where CRLF ('\r\n') gets delivered in two chunks."""
if self._had_cr: chunk=b('\r')+chunk
if chunk.endswith(b('\r')): self._had_cr=True; chunk=chunk[:-1]
else: self._had_cr=False
return chunk.replace(b('\r\n'),b(os.linesep))
def enable_receiving(self, type, cmd):
"""Enable receiving of data over the channel. Depending on the TYPE currently in use it creates an appropriate wrapper for the incoming data.
- (str) type: current transfer type, 'a' (ASCII) or 'i' (binary)."""
self._initialized=True
self.ioloop.modify(self._fileno,self.ioloop.READ)
self.cmd=cmd
if type=='a':
if os.linesep=='\r\n': self._data_wrapper=None
else: self._data_wrapper=self._posix_ascii_data_wrapper
elif type=='i':
self._data_wrapper=None
else: raise TypeError("unsupported type")
self.receive=True
def get_transmitted_bytes(self): return self.tot_bytes_sent + self.tot_bytes_received #"""Return the number of transmitted bytes."""
def get_elapsed_time(self): return timer() - self._start_time #"""Return the transfer elapsed time in seconds."""
def transfer_in_progress(self): return self.get_transmitted_bytes() != 0 #"""Return True if a transfer is in progress, else False."""
# --- connection
def send(self, data): result=AsyncChat.send(self,data); self.tot_bytes_sent +=result; return result
def refill_buffer(self):
"""Overridden as a fix around http://bugs.python.org/issue1740572
(when the producer is consumed, close() was called instead of handle_close()). """
while 1:
if len(self.producer_fifo):
p=self.producer_fifo.first()
# a 'None' in the producer fifo is a sentinel, # telling us to close the channel.
if p is None:
if not self.ac_out_buffer:
self.producer_fifo.pop()
#self.close()
self.handle_close()
return
elif isinstance(p,str): self.producer_fifo.pop(); self.ac_out_buffer +=p; return
data=p.more()
if data: self.ac_out_buffer=self.ac_out_buffer+data; return
else: self.producer_fifo.pop()
else: return
def handle_read(self): #"""Called when there is data waiting to be read."""
try: chunk=self.recv(self.ac_in_buffer_size)
except socket.error: self.handle_error()
else:
self.tot_bytes_received +=len(chunk)
if not chunk:
self.transfer_finished=True
#self.close() # <-- asyncore.recv() already do that...
return
if self._data_wrapper is not None: chunk = self._data_wrapper(chunk)
try: self.file_obj.write(chunk)
except OSError: err=sys.exc_info()[1]; raise _FileReadWriteError(err)
handle_read_event=handle_read # small speedup
def readable(self): #"""Predicate for inclusion in the readable for select()."""
# It the channel is not supposed to be receiving but yet it's
# in the list of readable events, that means it has been
# disconnected, in which case we explicitly close() it.
# This is necessary as differently from FTPHandler this channel
# is not supposed to be readable/writable at first, meaning the
# upper IOLoop might end up calling readable() repeatedly,
# hogging CPU resources.
if not self.receive and not self._initialized: return self.close()
return self.receive
def writable(self): return not self.receive and asynchat.async_chat.writable(self) #"""Predicate for inclusion in the writable for select()."""
def handle_timeout(self): #"""Called cyclically to check if data trasfer is stalling with no progress in which case the client is kicked off. """
if self.get_transmitted_bytes() > self._lastdata: self._lastdata=self.get_transmitted_bytes()
else: msg="Data connection timed out."; self._resp=("421 "+msg,logger.info); self.close(); self.cmd_channel.close_when_done()
def handle_error(self): #"""Called when an exception is raised and not otherwise handled."""
try: raise
# an error could occur in case we fail reading / writing
# from / to file (e.g. file system gets full)
except _FileReadWriteError: err=sys.exc_info()[1]; error=_strerror(err.args[0])
except Exception:
# some other exception occurred; we don't want to provide
# confidential error messages
self.log_exception(self); error="Internal error"
try: self._resp=("426 %s; transfer aborted." % error,logger.warning); self.close()
except Exception: logger.critical(traceback.format_exc())
def handle_close(self): #"""Called when the socket is closed."""
# If we used channel for receiving we assume that transfer is
# finished when client closes the connection, if we used channel
# for sending we have to check that all data has been sent
# (responding with 226) or not (responding with 426).
# In both cases handle_close() is automatically called by the
# underlying asynchat module.
if not self._closed:
if self.receive: self.transfer_finished=True
else: self.transfer_finished=len(self.producer_fifo)==0
try:
if self.transfer_finished: self._resp=("226 Transfer complete.",logger.debug)
else: tot_bytes=self.get_transmitted_bytes(); self._resp=("426 Transfer aborted; %d bytes transmitted." % tot_bytes, logger.debug)
finally: self.close()
def close(self): #"""Close the data channel, first attempting to close any remaining file handles."""
if not self._closed:
self._closed=True
# RFC-959 says we must close the connection before replying
AsyncChat.close(self)
if self._resp: self.cmd_channel.respond(self._resp[0],logfun=self._resp[1])
if self.file_obj is not None and not self.file_obj.closed: self.file_obj.close()
if self._idler is not None and not self._idler.cancelled: self._idler.cancel()
if self.file_obj is not None:
filename=self.file_obj.name; elapsed_time=round(self.get_elapsed_time(),3); self.cmd_channel.log_transfer(cmd=self.cmd,filename=self.file_obj.name,receive=self.receive,completed=self.transfer_finished,elapsed=elapsed_time,bytes=self.get_transmitted_bytes())
if self.transfer_finished:
if self.receive: self.cmd_channel.on_file_received(filename)
else: self.cmd_channel.on_file_sent(filename)
else:
if self.receive: self.cmd_channel.on_incomplete_file_received(filename)
else: self.cmd_channel.on_incomplete_file_sent(filename)
self.cmd_channel._on_dtp_close()
# dirty hack in order to turn AsyncChat into a new style class in
# python 2.x so that we can use super()
if PY3:
class _AsyncChatNewStyle(AsyncChat): pass
else:
class _AsyncChatNewStyle(object, AsyncChat):
def __init__(self,*args,**kwargs): super(object, self).__init__(*args,**kwargs) # bypass object
class ThrottledDTPHandler(_AsyncChatNewStyle,DTPHandler):
"""A DTPHandler subclass which wraps sending and receiving in a data
counter and temporarily "sleeps" the channel so that you burst to no
more than x Kb/sec average.
- (int) read_limit: the maximum number of bytes to read (receive)
in one second (defaults to 0 == no limit).
- (int) write_limit: the maximum number of bytes to write (send)
in one second (defaults to 0 == no limit).
- (bool) auto_sized_buffers: this option only applies when read
and/or write limits are specified. When enabled it bumps down
the data buffer sizes so that they are never greater than read
and write limits which results in a less bursty and smoother
throughput (default: True)."""
read_limit=0; write_limit=0; auto_sized_buffers=True
def __init__(self,sock,cmd_channel):
super(ThrottledDTPHandler, self).__init__(sock,cmd_channel)
self._timenext=0; self._datacount=0; self.sleeping=False; self._throttler=None
if self.auto_sized_buffers:
if self.read_limit:
while self.ac_in_buffer_size > self.read_limit: self.ac_in_buffer_size /= 2
if self.write_limit:
while self.ac_out_buffer_size > self.write_limit: self.ac_out_buffer_size /= 2
self.ac_in_buffer_size=int(self.ac_in_buffer_size)
self.ac_out_buffer_size=int(self.ac_out_buffer_size)
def _use_sendfile(self,producer): return False
def recv(self,buffer_size):
chunk=super(ThrottledDTPHandler,self).recv(buffer_size)
if self.read_limit: self._throttle_bandwidth(len(chunk),self.read_limit)
return chunk
def send(self,data):
num_sent=super(ThrottledDTPHandler,self).send(data)
if self.write_limit: self._throttle_bandwidth(num_sent,self.write_limit)
return num_sent
def _cancel_throttler(self):
if self._throttler is not None and not self._throttler.cancelled: self._throttler.cancel()
def _throttle_bandwidth(self, len_chunk, max_speed): #"""A method which counts data transmitted so that you burst to no more than x Kb/sec average."""
self._datacount +=len_chunk
if self._datacount >= max_speed:
self._datacount=0
now=timer()
sleepfor=(self._timenext - now) * 2
if sleepfor > 0:
# we've passed bandwidth limits
def unsleep():
if self.receive: event=self.ioloop.READ
else: event=self.ioloop.WRITE
self.add_channel(events=event)
self.del_channel(); self._cancel_throttler(); self._throttler=self.ioloop.call_later(sleepfor,unsleep,_errback=self.handle_error)
self._timenext=now + 1
def close(self): self._cancel_throttler(); super(ThrottledDTPHandler,self).close()
# --- producers
class FileProducer(object): #"""Producer wrapper for file[-like] objects."""
buffer_size=65536
def __init__(self,file,type):
"""Initialize the producer with a data_wrapper appropriate to TYPE.
- (file) file: the file[-like] object.
- (str) type: the current TYPE, 'a' (ASCII) or 'i' (binary)."""
self.file=file; self.type=type
if (type=='a') and (os.linesep != '\r\n'): self._data_wrapper=lambda x: x.replace(b(os.linesep),b('\r\n'))
else: self._data_wrapper=None
def more(self): #"""Attempt a chunk of data of size self.buffer_size."""
try: data = self.file.read(self.buffer_size)
except OSError: err=sys.exc_info()[1]; raise _FileReadWriteError(err)
else:
if self._data_wrapper is not None: data=self._data_wrapper(data)
return data
class BufferedIteratorProducer(object): #"""Producer for iterator objects with buffer capabilities."""
# how many times iterator.next() will be called before # returning some data
loops=20
def __init__(self,iterator): self.iterator=iterator
def more(self): #"""Attempt a chunk of data from iterator by calling its next() method different times."""
buffer=[]
for x in xrange(self.loops):
try: buffer.append(next(self.iterator))
except StopIteration: break
return b('').join(buffer)
# --- FTP
class FTPHandler(AsyncChat):
"""Implements the FTP server Protocol Interpreter (see RFC-959), handling commands received from the client on the control channel.
All relevant session information is stored in class attributes reproduced below and can be modified before instantiating this class.
- (int) timeout:
The timeout which is the maximum time a remote client may spend
between FTP commands. If the timeout triggers, the remote client
will be kicked off. Defaults to 300 seconds.
- (str) banner: the string sent when client connects.
- (int) max_login_attempts:
the maximum number of wrong authentications before disconnecting the client (default 3).
- (bool)permit_foreign_addresses:
FTP site-to-site transfer feature: also referenced as "FXP" it
permits for transferring a file between two remote FTP servers
without the transfer going through the client's host (not
recommended for security reasons as described in RFC-2577).
Having this attribute set to False means that all data
connections from/to remote IP addresses which do not match the
client's IP address will be dropped (defualt False).
- (bool) permit_privileged_ports:
set to True if you want to permit active data connections (PORT)
over privileged ports (not recommended, defaulting to False).
- (str) masquerade_address:
the "masqueraded" IP address to provide along PASV reply when
pyftpdlib is running behind a NAT or other types of gateways.
When configured pyftpdlib will hide its local address and
instead use the public address of your NAT (default None).
- (dict) masquerade_address_map:
in case the server has multiple IP addresses which are all
behind a NAT router, you may wish to specify individual
masquerade_addresses for each of them. The map expects a
dictionary containing private IP addresses as keys, and their
corresponding public (masquerade) addresses as values.
- (list) passive_ports:
what ports the ftpd will use for its passive data transfers.
Value expected is a list of integers (e.g. range(60000, 65535)).
When configured pyftpdlib will no longer use kernel-assigned random ports (default None).
- (bool) use_gmt_times:
when True causes the server to report all ls and MDTM times in
GMT and not local time (default True).
- (bool) use_sendfile: when True uses sendfile() system call to
send a file resulting in faster uploads (from server to client).
Works on UNIX only and requires pysendfile module to be installed separately:
http://code.google.com/p/pysendfile/
Automatically defaults to True if pysendfile module is installed.
- (bool) tcp_no_delay: controls the use of the TCP_NODELAY socket
option which disables the Nagle algorithm resulting in
significantly better performances (default True on all systems where it is supported).
- (str) unicode_errors:
the error handler passed to ''.encode() and ''.decode():
http://docs.python.org/library/stdtypes.html#str.decode
(detaults to 'replace').
- (str) log_prefix:
the prefix string preceding any log line; all instance attributes can be used as arguments.
All relevant instance attributes initialized when client connects are reproduced below.
You may be interested in them in case you want to subclass the original FTPHandler.
- (bool) authenticated: True if client authenticated himself.
- (str) username: the name of the connected user (if any).
- (int) attempted_logins: number of currently attempted logins.
- (str) current_type: the current transfer type (default "a")
- (int) af: the connection's address family (IPv4/IPv6)
- (instance) server: the FTPServer class instance.
- (instance) data_channel: the data channel instance (if any)."""
# these are overridable defaults
# default classes
authorizer=DummyAuthorizer(); active_dtp=ActiveDTP; passive_dtp=PassiveDTP; dtp_handler=DTPHandler; abstracted_fs=AbstractedFS; proto_cmds=proto_cmds
# session attributes (explained in the docstring)
timeout=300; banner="pyftpdlib %s ready." % __ver__; max_login_attempts=3; permit_foreign_addresses=False; permit_privileged_ports=False; masquerade_address=None; masquerade_address_map={}; passive_ports=None; use_gmt_times=True
use_sendfile=sendfile is not None
tcp_no_delay=hasattr(socket,"TCP_NODELAY"); unicode_errors='replace'; log_prefix='%(remote_ip)s:%(remote_port)s-[%(username)s]'
def __init__(self, conn, server, ioloop=None):
"""Initialize the command channel.
- (instance) conn: the socket object instance of the newly established connection.
- (instance) server: the ftp server class instance. """
#if self.IsServerOn==False:
# self.close()
# sys.exit()
# return
# public session attributes
self.server=server; self.fs=None; self.authenticated=False; self.username=""; self.password=""; self.attempted_logins=0; self.data_channel=None; self.remote_ip=""; self.remote_port=""
# private session attributes
self._last_response=""; self._current_type='a'; self._restart_position=0; self._quit_pending=False; self._af= -1; self._in_buffer=[]; self._in_buffer_len=0; self._epsvall=False; self._dtp_acceptor=None; self._dtp_connector=None; self._in_dtp_queue=None; self._out_dtp_queue=None; self._closed=False; self._extra_feats=[]; self._current_facts=['type','perm','size','modify']; self._rnfr=None; self._idler=None
self._log_debug=logging.getLogger('pyftpdlib').getEffectiveLevel() <= logging.DEBUG
if os.name=='posix': self._current_facts.append('unique')
self._available_facts=self._current_facts[:]
if pwd and grp: self._available_facts +=['unix.mode','unix.uid','unix.gid']
if os.name=='nt': self._available_facts.append('create')
try: AsyncChat.__init__(self,conn,ioloop=ioloop)
except socket.error:
err=sys.exc_info()[1]
# if we get an exception here we want the dispatcher
# instance to set socket attribute before closing, see:
# http://code.google.com/p/pyftpdlib/issues/detail?id=188
AsyncChat.__init__(self,socket.socket(),ioloop=ioloop); self.close()
if err.args[0]==errno.EINVAL: return # http://code.google.com/p/pyftpdlib/issues/detail?id=143
self.handle_error(); return
self.set_terminator(b("\r\n"))
# connection properties
try: self.remote_ip,self.remote_port=self.socket.getpeername()[:2]
except socket.error:
err=sys.exc_info()[1]
# A race condition may occur if the other end is closing
# before we can get the peername, hence ENOTCONN (see issue
# #100) while EINVAL can occur on OSX (see issue #143).
self.connected=False
if err.args[0] in (errno.ENOTCONN,errno.EINVAL): self.close()
else: self.handle_error()
return
else: self.log("FTP session opened (connect)")
if hasattr(self.socket,'family'): self._af=self.socket.family
else: # python < 2.5
ip, port=self.socket.getsockname()[:2]; self._af=socket.getaddrinfo(ip,port,socket.AF_UNSPEC,socket.SOCK_STREAM)[0][0]
# try to handle urgent data inline
try: self.socket.setsockopt(socket.SOL_SOCKET,socket.SO_OOBINLINE,1)
except socket.error: pass
# disable Nagle algorithm for the control socket only, resulting
# in significantly better performances
if self.tcp_no_delay:
try: self.socket.setsockopt(socket.SOL_TCP,socket.TCP_NODELAY,1)
except socket.error: pass
# remove this instance from IOLoop's socket_map
if not self.connected: self.close(); return
if self.timeout: self._idler=self.ioloop.call_later(self.timeout,self.handle_timeout,_errback=self.handle_error)
def __repr__(self):
status=[self.__class__.__module__+ "." + self.__class__.__name__]
status.append("(addr=%s:%s, user=%r)" % (self.remote_ip,self.remote_port,self.username or ''))
return '<%s at %#x>' % (' '.join(status),id(self))
__str__=__repr__
def IsServerOn(self): return tfalse(getAddonSetting("server_enable","false"))
def handle(self): #"""Return a 220 'ready' response to the client over the command channel."""
if (self.IsServerOn()==False): sys.exit();
self.on_connect()
if not self._closed and not self._closing:
if len(self.banner) <= 75: self.respond("220 %s" % str(self.banner))
else: self.push('220-%s\r\n' % str(self.banner)); self.respond('220 ')
def handle_max_cons(self): #"""Called when limit for maximum number of connections is reached."""
#if (self.IsServerOn()==False): sys.exit();
msg="421 Too many connections. Service temporarily unavailable."
self.respond_w_warning(msg)
# If self.push is used, data could not be sent immediately in
# which case a new "loop" will occur exposing us to the risk of
# accepting new connections. Since this could cause asyncore to
# run out of fds in case we're using select() on Windows we
# immediately close the channel by using close() instead of
# close_when_done(). If data has not been sent yet client will
# be silently disconnected.
self.close()
def handle_max_cons_per_ip(self): #"""Called when too many clients are connected from the same IP."""
#if (self.IsServerOn()==False): sys.exit();
msg="421 Too many connections from the same IP address."; self.respond_w_warning(msg); self.close_when_done()
def handle_timeout(self): #"""Called when client does not send any command within the time specified in <timeout> attribute."""
#if (self.IsServerOn()==False): sys.exit();
msg="Control connection timed out."; self.respond("421 "+msg,logfun=logger.info); self.close_when_done()
# --- asyncore / asynchat overridden methods
def readable(self):
# Checking for self.connected seems to be necessary as per:
# http://code.google.com/p/pyftpdlib/issues/detail?id=188#c18
# In contrast to DTPHandler, here we are not interested in
# attempting to receive any further data from a closed socket.
#if (self.IsServerOn()==False): sys.exit();
return self.connected and AsyncChat.readable(self)
def writable(self):
#if (self.IsServerOn()==False): sys.exit();
return self.connected and AsyncChat.writable(self)
def collect_incoming_data(self,data): #"""Read incoming data and append to the input buffer."""
self._in_buffer.append(data)
self._in_buffer_len +=len(data)
# Flush buffer if it gets too long (possible DoS attacks).
# RFC-959 specifies that a 500 response could be given in
# such cases
buflimit=2048
if self._in_buffer_len > buflimit: self.respond_w_warning('500 Command too long.'); self._in_buffer=[]; self._in_buffer_len=0
def decode(self,bytes): return bytes.decode('utf8',self.unicode_errors)
def found_terminator(self):
r"""Called when the incoming data stream matches the \r\n terminator."""
#if (self.IsServerOn()==False): sys.exit();
if self._idler is not None and not self._idler.cancelled: self._idler.reset()
line=b('').join(self._in_buffer)
try: line=self.decode(line)
except UnicodeDecodeError:
# By default we'll never get here as we replace errors
# but user might want to override this behavior.
# RFC-2640 doesn't mention what to do in this case so
# we'll just return 501 (bad arg).
return self.respond("501 Can't decode command.")
self._in_buffer=[]; self._in_buffer_len=0; cmd=line.split(' ')[0].upper(); arg=line[len(cmd)+1:]
try: self.pre_process_command(line,cmd,arg)
except UnicodeEncodeError: self.respond("501 can't decode path (server filesystem encoding is %s)" % sys.getfilesystemencoding())
def pre_process_command(self,line,cmd,arg):
if (self.IsServerOn()==False): sys.exit();
##if self.IsServerOn==False: cmd='QUIT'; return
kwargs={}
if cmd == "SITE" and arg: cmd="SITE %s" % arg.split(' ')[0].upper(); arg=line[len(cmd)+1:]
if cmd != 'PASS': self.logline("<- %s" % line)
else: self.logline("<- %s %s" % (line.split(' ')[0],'*' * 6))
# Recognize those commands having a "special semantic". They
# should be sent by following the RFC-959 procedure of sending
# Telnet IP/Synch sequence (chr 242 and 255) as OOB data but
# since many ftp clients don't do it correctly we check the
# last 4 characters only.
if not cmd in self.proto_cmds:
if cmd[-4:] in ('ABOR','STAT','QUIT'): cmd=cmd[-4:]
else:
msg='Command "%s" not understood.' % cmd; self.respond('500 '+msg)
if cmd: self.log_cmd(cmd,arg,500,msg)
return
if not arg and self.proto_cmds[cmd]['arg'] == True: msg="Syntax error: command needs an argument."; self.respond("501 "+msg); self.log_cmd(cmd,"",501,msg); return
if arg and self.proto_cmds[cmd]['arg'] == False: msg="Syntax error: command does not accept arguments."; self.respond("501 "+msg); self.log_cmd(cmd,arg,501,msg); return
if not self.authenticated:
if self.proto_cmds[cmd]['auth'] or (cmd == 'STAT' and arg): msg="Log in with USER and PASS first."; self.respond("530 "+msg); self.log_cmd(cmd,arg,530,msg)
else: self.process_command(cmd,arg); return # call the proper ftp_* method
else:
if (cmd=='STAT') and not arg: self.ftp_STAT(u('')); return
# for file-system related commands check whether real path
# destination is valid
if self.proto_cmds[cmd]['perm'] and (cmd != 'STOU'):
if cmd in ('CWD','XCWD'): arg=self.fs.ftp2fs(arg or u('/'))
elif cmd in ('CDUP','XCUP'): arg=self.fs.ftp2fs(u('..'))
elif cmd=='LIST':
if arg.lower() in ('-a','-l','-al','-la'): arg=self.fs.ftp2fs(self.fs.cwd)
else: arg=self.fs.ftp2fs(arg or self.fs.cwd)
elif cmd=='STAT':
if glob.has_magic(arg): msg='Globbing not supported.'; self.respond('550 '+msg); self.log_cmd(cmd,arg,550,msg); return
arg=self.fs.ftp2fs(arg or self.fs.cwd)
elif cmd=='SITE CHMOD':
if not ' ' in arg: msg="Syntax error: command needs two arguments."; self.respond("501 "+msg); self.log_cmd(cmd,"",501,msg); return
else: mode,arg=arg.split(' ',1); arg=self.fs.ftp2fs(arg); kwargs=dict(mode=mode)
else: arg=self.fs.ftp2fs(arg or self.fs.cwd) # LIST, NLST, MLSD, MLST
if not self.fs.validpath(arg): line=self.fs.fs2ftp(arg); msg='"%s" points to a path which is outside '+"the user's root directory"; msg=msg % line; self.respond("550 %s." % msg); self.log_cmd(cmd,arg,550,msg); return
# check permission
perm=self.proto_cmds[cmd]['perm']
if perm is not None and cmd != 'STOU':
if not self.authorizer.has_perm(self.username,perm,arg): msg="Not enough privileges."; self.respond("550 "+msg); self.log_cmd(cmd,arg,550,msg); return
# call the proper ftp_* method
self.process_command(cmd,arg,**kwargs)
def process_command(self,cmd,*args,**kwargs):
"""Process command by calling the corresponding ftp_* class
method (e.g. for received command "MKD pathname", ftp_MKD()
method is called with "pathname" as the argument)."""
#if (self.IsServerOn()==False): sys.exit();
if self._closed: return
self._last_response=""; method=getattr(self,'ftp_'+cmd.replace(' ','_'))
method(*args,**kwargs)
if self._last_response: code=int(self._last_response[:3]); resp=self._last_response[4:]; self.log_cmd(cmd,args[0],code,resp)
def handle_error(self):
try: self.log_exception(self); self.close()
except Exception: logger.critical(traceback.format_exc())
def handle_close(self): self.close()
def close(self):
"""Close the current channel disconnecting the client."""
if not self._closed:
self._closed=True; self._closing=False; self.connected=False; AsyncChat.close(self); self._shutdown_connecting_dtp()
if self.data_channel is not None: self.data_channel.close(); del self.data_channel
if self._out_dtp_queue is not None:
file=self._out_dtp_queue[2]
if file is not None: file.close()
if self._in_dtp_queue is not None:
file=self._in_dtp_queue[0]
if file is not None: file.close()
del self._out_dtp_queue; del self._in_dtp_queue
if self._idler is not None and not self._idler.cancelled: self._idler.cancel()
# remove client IP address from ip map
if self.remote_ip in self.server.ip_map: self.server.ip_map.remove(self.remote_ip)
if self.fs is not None: self.fs.cmd_channel=None; self.fs=None
self.log("FTP session closed (disconnect).")
# Having self.remote_ip not set means that no connection
# actually took place, hence we're not interested in
# invoking the callback.
if self.remote_ip: self.ioloop.call_later(0,self.on_disconnect,_errback=self.handle_error)
def _shutdown_connecting_dtp(self):
"""Close any ActiveDTP or PassiveDTP instance waiting to
establish a connection (passive or active)."""
if self._dtp_acceptor is not None: self._dtp_acceptor.close(); self._dtp_acceptor=None
if self._dtp_connector is not None: self._dtp_connector.close(); self._dtp_connector=None
# --- public callbacks
# Note: to run a time consuming task make sure to use a separate
# process or thread (see FAQs).
def on_connect(self):
"""Called when client connects, *before* sending the initial 220 reply."""
def on_disconnect(self):
"""Called when connection is closed."""
def on_login(self, username):
"""Called on user login."""
def on_login_failed(self, username, password):
"""Called on failed login attempt.
At this point client might have already been disconnected if it
failed too many times."""
def on_logout(self, username):
"""Called when user "cleanly" logs out due to QUIT or USER
issued twice (re-login). This is not called if the connection
is simply closed by client."""
def on_file_sent(self, file):
"""Called every time a file has been succesfully sent.
"file" is the absolute name of the file just being sent. """
def on_file_received(self, file):
"""Called every time a file has been succesfully received.
"file" is the absolute name of the file just being received. """
def on_incomplete_file_sent(self, file):
"""Called every time a file has not been entirely sent.
(e.g. ABOR during transfer or client disconnected).
"file" is the absolute name of that file. """
def on_incomplete_file_received(self, file):
"""Called every time a file has not been entirely received
(e.g. ABOR during transfer or client disconnected).
"file" is the absolute name of that file.
"""
# --- internal callbacks
def _on_dtp_connection(self):
"""Called every time data channel connects, either active or
passive.
Incoming and outgoing queues are checked for pending data.
If outbound data is pending, it is pushed into the data channel.
If awaiting inbound data, the data channel is enabled for
receiving.
"""
# Close accepting DTP only. By closing ActiveDTP DTPHandler
# would receive a closed socket object.
#self._shutdown_connecting_dtp()
if self._dtp_acceptor is not None:
self._dtp_acceptor.close()
self._dtp_acceptor = None
# stop the idle timer as long as the data transfer is not finished
if self._idler is not None and not self._idler.cancelled:
self._idler.cancel()
# check for data to send
if self._out_dtp_queue is not None:
data, isproducer, file, cmd = self._out_dtp_queue
self._out_dtp_queue = None
self.data_channel.cmd = cmd
if file:
self.data_channel.file_obj = file
try:
if not isproducer:
self.data_channel.push(data)
else:
self.data_channel.push_with_producer(data)
if self.data_channel is not None:
self.data_channel.close_when_done()
except:
# dealing with this exception is up to DTP (see bug #84)
self.data_channel.handle_error()
# check for data to receive
elif self._in_dtp_queue is not None:
file, cmd = self._in_dtp_queue
self.data_channel.file_obj = file
self._in_dtp_queue = None
self.data_channel.enable_receiving(self._current_type, cmd)
def _on_dtp_close(self):
"""Called every time the data channel is closed."""
self.data_channel = None
if self._quit_pending:
self.close()
elif self.timeout:
# data transfer finished, restart the idle timer
if self._idler is not None and not self._idler.cancelled:
self._idler.cancel()
self._idler = self.ioloop.call_later(self.timeout,
self.handle_timeout,
_errback=self.handle_error)
# --- utility
def push(self, s):
asynchat.async_chat.push(self, s.encode('utf8'))
def respond(self, resp, logfun=logger.debug):
"""Send a response to the client using the command channel."""
self._last_response = resp
self.push(resp + '\r\n')
if self._log_debug:
self.logline('-> %s' % resp, logfun=logfun)
else:
self.log(resp[4:], logfun=logfun)
def respond_w_warning(self, resp):
self.respond(resp, logfun=logger.warning)
def push_dtp_data(self, data, isproducer=False, file=None, cmd=None):
"""Pushes data into the data channel.
It is usually called for those commands requiring some data to
be sent over the data channel (e.g. RETR).
If data channel does not exist yet, it queues the data to send
later; data will then be pushed into data channel when
_on_dtp_connection() will be called.
- (str/classobj) data: the data to send which may be a string
or a producer object).
- (bool) isproducer: whether treat data as a producer.
- (file) file: the file[-like] object to send (if any).
"""
if self.data_channel is not None:
self.respond("125 Data connection already open. Transfer starting.")
if file:
self.data_channel.file_obj = file
try:
if not isproducer:
self.data_channel.push(data)
else:
self.data_channel.push_with_producer(data)
if self.data_channel is not None:
self.data_channel.cmd = cmd
self.data_channel.close_when_done()
except:
# dealing with this exception is up to DTP (see bug #84)
self.data_channel.handle_error()
else:
self.respond("150 File status okay. About to open data connection.")
self._out_dtp_queue = (data, isproducer, file, cmd)
def flush_account(self):
"""Flush account information by clearing attributes that need
to be reset on a REIN or new USER command.
"""
self._shutdown_connecting_dtp()
# if there's a transfer in progress RFC-959 states we are
# supposed to let it finish
if self.data_channel is not None:
if not self.data_channel.transfer_in_progress():
self.data_channel.close()
self.data_channel = None
username = self.username
if self.authenticated and username:
self.on_logout(username)
self.authenticated = False
self.username = ""
self.password = ""
self.attempted_logins = 0
self._current_type = 'a'
self._restart_position = 0
self._quit_pending = False
self._in_dtp_queue = None
self._rnfr = None
self._out_dtp_queue = None
def run_as_current_user(self, function, *args, **kwargs):
"""Execute a function impersonating the current logged-in user."""
self.authorizer.impersonate_user(self.username, self.password)
try:
return function(*args, **kwargs)
finally:
self.authorizer.terminate_impersonation(self.username)
# --- logging wrappers
# this is defined earlier
#log_prefix = '%(remote_ip)s:%(remote_port)s-[%(username)s]'
def log(self, msg, logfun=logger.info):
"""Log a message, including additional identifying session data."""
prefix = self.log_prefix % self.__dict__
logfun("%s %s" % (prefix, msg))
def logline(self, msg, logfun=logger.debug):
"""Log a line including additional indentifying session data.
By default this is disabled unless logging level == DEBUG.
"""
if self._log_debug:
prefix = self.log_prefix % self.__dict__
logfun("%s %s" % (prefix, msg))
def logerror(self, msg):
"""Log an error including additional indentifying session data."""
prefix = self.log_prefix % self.__dict__
logger.error("%s %s" % (prefix, msg))
def log_exception(self, instance):
"""Log an unhandled exception. 'instance' is the instance
where the exception was generated.
"""
logger.exception("unhandled exception in instance %r", instance)
# the list of commands which gets logged when logging level
# is >= logging.INFO
log_cmds_list = ["DELE", "RNFR", "RNTO", "MKD", "RMD", "CWD",
"XMKD", "XRMD", "XCWD",
"REIN", "SITE CHMOD"]
def log_cmd(self, cmd, arg, respcode, respstr):
"""Log commands and responses in a standardized format.
This is disabled in case the logging level is set to DEBUG.
- (str) cmd:
the command sent by client
- (str) arg:
the command argument sent by client.
For filesystem commands such as DELE, MKD, etc. this is
already represented as an absolute real filesystem path
like "/home/user/file.ext".
- (int) respcode:
the response code as being sent by server. Response codes
starting with 4xx or 5xx are returned if the command has
been rejected for some reason.
- (str) respstr:
the response string as being sent by server.
By default only DELE, RMD, RNTO, MKD, CWD, ABOR, REIN, SITE CHMOD
commands are logged and the output is redirected to self.log
method.
Can be overridden to provide alternate formats or to log
further commands.
"""
if not self._log_debug and cmd in self.log_cmds_list:
line = '%s %s' % (' '.join([cmd, arg]).strip(), respcode)
if str(respcode)[0] in ('4', '5'):
line += ' %r' % respstr
self.log(line)
def log_transfer(self, cmd, filename, receive, completed, elapsed, bytes):
"""Log all file transfers in a standardized format.
- (str) cmd:
the original command who caused the tranfer.
- (str) filename:
the absolutized name of the file on disk.
- (bool) receive:
True if the transfer was used for client uploading (STOR,
STOU, APPE), False otherwise (RETR).
- (bool) completed:
True if the file has been entirely sent, else False.
- (float) elapsed:
transfer elapsed time in seconds.
- (int) bytes:
number of bytes transmitted.
"""
line = '%s %s completed=%s bytes=%s seconds=%s' % \
(cmd, filename, completed and 1 or 0, bytes, elapsed)
self.log(line)
# --- connection
def _make_eport(self, ip, port):
"""Establish an active data channel with remote client which
issued a PORT or EPRT command.
"""
# FTP bounce attacks protection: according to RFC-2577 it's
# recommended to reject PORT if IP address specified in it
# does not match client IP address.
remote_ip = self.remote_ip
if remote_ip.startswith('::ffff:'):
# In this scenario, the server has an IPv6 socket, but
# the remote client is using IPv4 and its address is
# represented as an IPv4-mapped IPv6 address which
# looks like this ::ffff:151.12.5.65, see:
# http://en.wikipedia.org/wiki/IPv6#IPv4-mapped_addresses
# http://tools.ietf.org/html/rfc3493.html#section-3.7
# We truncate the first bytes to make it look like a
# common IPv4 address.
remote_ip = remote_ip[7:]
if not self.permit_foreign_addresses and ip != remote_ip:
msg = "501 Rejected data connection to foreign address %s:%s." \
% (ip, port)
self.respond_w_warning(msg)
return
# ...another RFC-2577 recommendation is rejecting connections
# to privileged ports (< 1024) for security reasons.
if not self.permit_privileged_ports and port < 1024:
msg = '501 PORT against the privileged port "%s" refused.' % port
self.respond_w_warning(msg)
return
# close establishing DTP instances, if any
self._shutdown_connecting_dtp()
if self.data_channel is not None:
self.data_channel.close()
self.data_channel = None
# make sure we are not hitting the max connections limit
if not self.server._accept_new_cons():
msg = "425 Too many connections. Can't open data channel."
self.respond_w_warning(msg)
return
# open data channel
self._dtp_connector = self.active_dtp(ip, port, self)
def _make_epasv(self, extmode=False):
"""Initialize a passive data channel with remote client which
issued a PASV or EPSV command.
If extmode argument is True we assume that client issued EPSV in
which case extended passive mode will be used (see RFC-2428).
"""
# close establishing DTP instances, if any
self._shutdown_connecting_dtp()
# close established data connections, if any
if self.data_channel is not None:
self.data_channel.close()
self.data_channel = None
# make sure we are not hitting the max connections limit
if not self.server._accept_new_cons():
msg = "425 Too many connections. Can't open data channel."
self.respond_w_warning(msg)
return
# open data channel
self._dtp_acceptor = self.passive_dtp(self, extmode)
def ftp_PORT(self, line):
"""Start an active data channel by using IPv4."""
if self._epsvall:
self.respond("501 PORT not allowed after EPSV ALL.")
return
# Parse PORT request for getting IP and PORT.
# Request comes in as:
# > h1,h2,h3,h4,p1,p2
# ...where the client's IP address is h1.h2.h3.h4 and the TCP
# port number is (p1 * 256) + p2.
try:
addr = list(map(int, line.split(',')))
if len(addr) != 6:
raise ValueError
for x in addr[:4]:
if not 0 <= x <= 255:
raise ValueError
ip = '%d.%d.%d.%d' % tuple(addr[:4])
port = (addr[4] * 256) + addr[5]
if not 0 <= port <= 65535:
raise ValueError
except (ValueError, OverflowError):
self.respond("501 Invalid PORT format.")
return
self._make_eport(ip, port)
def ftp_EPRT(self, line):
"""Start an active data channel by choosing the network protocol
to use (IPv4/IPv6) as defined in RFC-2428.
"""
if self._epsvall:
self.respond("501 EPRT not allowed after EPSV ALL.")
return
# Parse EPRT request for getting protocol, IP and PORT.
# Request comes in as:
# <d>proto<d>ip<d>port<d>
# ...where <d> is an arbitrary delimiter character (usually "|") and
# <proto> is the network protocol to use (1 for IPv4, 2 for IPv6).
try:
af, ip, port = line.split(line[0])[1:-1]
port = int(port)
if not 0 <= port <= 65535:
raise ValueError
except (ValueError, IndexError, OverflowError):
self.respond("501 Invalid EPRT format.")
return
if af == "1":
# test if AF_INET6 and IPV6_V6ONLY
if self._af == socket.AF_INET6 and not SUPPORTS_HYBRID_IPV6:
self.respond('522 Network protocol not supported (use 2).')
else:
try:
octs = list(map(int, ip.split('.')))
if len(octs) != 4:
raise ValueError
for x in octs:
if not 0 <= x <= 255:
raise ValueError
except (ValueError, OverflowError):
self.respond("501 Invalid EPRT format.")
else:
self._make_eport(ip, port)
elif af == "2":
if self._af == socket.AF_INET:
self.respond('522 Network protocol not supported (use 1).')
else:
self._make_eport(ip, port)
else:
if self._af == socket.AF_INET:
self.respond('501 Unknown network protocol (use 1).')
else:
self.respond('501 Unknown network protocol (use 2).')
def ftp_PASV(self, line):
"""Start a passive data channel by using IPv4."""
if self._epsvall:
self.respond("501 PASV not allowed after EPSV ALL.")
return
self._make_epasv(extmode=False)
def ftp_EPSV(self, line):
"""Start a passive data channel by using IPv4 or IPv6 as defined
in RFC-2428.
"""
# RFC-2428 specifies that if an optional parameter is given,
# we have to determine the address family from that otherwise
# use the same address family used on the control connection.
# In such a scenario a client may use IPv4 on the control channel
# and choose to use IPv6 for the data channel.
# But how could we use IPv6 on the data channel without knowing
# which IPv6 address to use for binding the socket?
# Unfortunately RFC-2428 does not provide satisfing information
# on how to do that. The assumption is that we don't have any way
# to know wich address to use, hence we just use the same address
# family used on the control connection.
if not line:
self._make_epasv(extmode=True)
# IPv4
elif line == "1":
if self._af != socket.AF_INET:
self.respond('522 Network protocol not supported (use 2).')
else:
self._make_epasv(extmode=True)
# IPv6
elif line == "2":
if self._af == socket.AF_INET:
self.respond('522 Network protocol not supported (use 1).')
else:
self._make_epasv(extmode=True)
elif line.lower() == 'all':
self._epsvall = True
self.respond('220 Other commands other than EPSV are now disabled.')
else:
if self._af == socket.AF_INET:
self.respond('501 Unknown network protocol (use 1).')
else:
self.respond('501 Unknown network protocol (use 2).')
def ftp_QUIT(self, line):
"""Quit the current session disconnecting the client."""
if self.authenticated:
msg_quit = self.authorizer.get_msg_quit(self.username)
else:
msg_quit = "Goodbye."
if len(msg_quit) <= 75:
self.respond("221 %s" % msg_quit)
else:
self.push("221-%s\r\n" % msg_quit)
self.respond("221 ")
# From RFC-959:
# If file transfer is in progress, the connection must remain
# open for result response and the server will then close it.
# We also stop responding to any further command.
if self.data_channel:
self._quit_pending = True
self.del_channel()
else:
self._shutdown_connecting_dtp()
self.close_when_done()
if self.authenticated and self.username:
self.on_logout(self.username)
# --- data transferring
def ftp_LIST(self, path):
"""Return a list of files in the specified directory to the
client.
On success return the directory path, else None.
"""
# - If no argument, fall back on cwd as default.
# - Some older FTP clients erroneously issue /bin/ls-like LIST
# formats in which case we fall back on cwd as default.
try:
iterator = self.run_as_current_user(self.fs.get_list_dir, path)
except (OSError, FilesystemError):
err = sys.exc_info()[1]
why = _strerror(err)
self.respond('550 %s.' % why)
else:
producer = BufferedIteratorProducer(iterator)
self.push_dtp_data(producer, isproducer=True, cmd="LIST")
return path
def ftp_NLST(self, path):
"""Return a list of files in the specified directory in a
compact form to the client.
On success return the directory path, else None.
"""
try:
if self.fs.isdir(path):
listing = self.run_as_current_user(self.fs.listdir, path)
else:
# if path is a file we just list its name
self.fs.lstat(path) # raise exc in case of problems
listing = [os.path.basename(path)]
except (OSError, FilesystemError):
err = sys.exc_info()[1]
self.respond('550 %s.' % _strerror(err))
else:
data = ''
if listing:
try:
listing.sort()
except UnicodeDecodeError:
# (Python 2 only) might happen on filesystem not
# supporting UTF8 meaning os.listdir() returned a list
# of mixed bytes and unicode strings:
# http://goo.gl/6DLHD
# http://bugs.python.org/issue683592
ls = []
for x in listing:
if not isinstance(x, unicode):
x = unicode(x, 'utf8')
ls.append(x)
listing = sorted(ls)
data = '\r\n'.join(listing) + '\r\n'
data = data.encode('utf8', self.unicode_errors)
self.push_dtp_data(data, cmd="NLST")
return path
# --- MLST and MLSD commands
# The MLST and MLSD commands are intended to standardize the file and
# directory information returned by the server-FTP process. These
# commands differ from the LIST command in that the format of the
# replies is strictly defined although extensible.
def ftp_MLST(self, path):
"""Return information about a pathname in a machine-processable
form as defined in RFC-3659.
On success return the path just listed, else None.
"""
line = self.fs.fs2ftp(path)
basedir, basename = os.path.split(path)
perms = self.authorizer.get_perms(self.username)
try:
iterator = self.run_as_current_user(self.fs.format_mlsx, basedir,
[basename], perms, self._current_facts, ignore_err=False)
data = b('').join(iterator)
except (OSError, FilesystemError):
err = sys.exc_info()[1]
self.respond('550 %s.' % _strerror(err))
else:
data = data.decode('utf8', self.unicode_errors)
# since TVFS is supported (see RFC-3659 chapter 6), a fully
# qualified pathname should be returned
data = data.split(' ')[0] + ' %s\r\n' % line
# response is expected on the command channel
self.push('250-Listing "%s":\r\n' % line)
# the fact set must be preceded by a space
self.push(' ' + data)
self.respond('250 End MLST.')
return path
def ftp_MLSD(self, path):
"""Return contents of a directory in a machine-processable form
as defined in RFC-3659.
On success return the path just listed, else None.
"""
# RFC-3659 requires 501 response code if path is not a directory
if not self.fs.isdir(path):
self.respond("501 No such directory.")
return
try:
listing = self.run_as_current_user(self.fs.listdir, path)
except (OSError, FilesystemError):
err = sys.exc_info()[1]
why = _strerror(err)
self.respond('550 %s.' % why)
else:
perms = self.authorizer.get_perms(self.username)
iterator = self.fs.format_mlsx(path, listing, perms,
self._current_facts)
producer = BufferedIteratorProducer(iterator)
self.push_dtp_data(producer, isproducer=True, cmd="MLSD")
return path
def ftp_RETR(self, file):
"""Retrieve the specified file (transfer from the server to the
client). On success return the file path else None.
"""
rest_pos = self._restart_position
self._restart_position = 0
try:
fd = self.run_as_current_user(self.fs.open, file, 'rb')
except (EnvironmentError, FilesystemError):
err = sys.exc_info()[1]
why = _strerror(err)
self.respond('550 %s.' % why)
return
if rest_pos:
# Make sure that the requested offset is valid (within the
# size of the file being resumed).
# According to RFC-1123 a 554 reply may result in case that
# the existing file cannot be repositioned as specified in
# the REST.
ok = 0
try:
if rest_pos > self.fs.getsize(file):
raise ValueError
fd.seek(rest_pos)
ok = 1
except ValueError:
why = "Invalid REST parameter"
except (EnvironmentError, FilesystemError):
err = sys.exc_info()[1]
why = _strerror(err)
if not ok:
fd.close()
self.respond('554 %s' % why)
return
producer = FileProducer(fd, self._current_type)
self.push_dtp_data(producer, isproducer=True, file=fd, cmd="RETR")
return file
def ftp_STOR(self, file, mode='w'):
"""Store a file (transfer from the client to the server).
On success return the file path, else None.
"""
# A resume could occur in case of APPE or REST commands.
# In that case we have to open file object in different ways:
# STOR: mode = 'w'
# APPE: mode = 'a'
# REST: mode = 'r+' (to permit seeking on file object)
if 'a' in mode:
cmd = 'APPE'
else:
cmd = 'STOR'
rest_pos = self._restart_position
self._restart_position = 0
if rest_pos:
mode = 'r+'
try:
fd = self.run_as_current_user(self.fs.open, file, mode + 'b')
except (EnvironmentError, FilesystemError):
err = sys.exc_info()[1]
why = _strerror(err)
self.respond('550 %s.' %why)
return
if rest_pos:
# Make sure that the requested offset is valid (within the
# size of the file being resumed).
# According to RFC-1123 a 554 reply may result in case
# that the existing file cannot be repositioned as
# specified in the REST.
ok = 0
try:
if rest_pos > self.fs.getsize(file):
raise ValueError
fd.seek(rest_pos)
ok = 1
except ValueError:
why = "Invalid REST parameter"
except (EnvironmentError, FilesystemError):
err = sys.exc_info()[1]
why = _strerror(err)
if not ok:
fd.close()
self.respond('554 %s' % why)
return
if self.data_channel is not None:
resp = "Data connection already open. Transfer starting."
self.respond("125 " + resp)
self.data_channel.file_obj = fd
self.data_channel.enable_receiving(self._current_type, cmd)
else:
resp = "File status okay. About to open data connection."
self.respond("150 " + resp)
self._in_dtp_queue = (fd, cmd)
return file
def ftp_STOU(self, line):
"""Store a file on the server with a unique name.
On success return the file path, else None.
"""
# Note 1: RFC-959 prohibited STOU parameters, but this
# prohibition is obsolete.
# Note 2: 250 response wanted by RFC-959 has been declared
# incorrect in RFC-1123 that wants 125/150 instead.
# Note 3: RFC-1123 also provided an exact output format
# defined to be as follow:
# > 125 FILE: pppp
# ...where pppp represents the unique path name of the
# file that will be written.
# watch for STOU preceded by REST, which makes no sense.
if self._restart_position:
self.respond("450 Can't STOU while REST request is pending.")
return
if line:
basedir, prefix = os.path.split(self.fs.ftp2fs(line))
prefix = prefix + '.'
else:
basedir = self.fs.ftp2fs(self.fs.cwd)
prefix = 'ftpd.'
try:
fd = self.run_as_current_user(self.fs.mkstemp, prefix=prefix,
dir=basedir)
except (EnvironmentError, FilesystemError):
err = sys.exc_info()[1]
# likely, we hit the max number of retries to find out a
# file with a unique name
if getattr(err, "errno", -1) == errno.EEXIST:
why = 'No usable unique file name found'
# something else happened
else:
why = _strerror(err)
self.respond("450 %s." % why)
return
if not self.authorizer.has_perm(self.username, 'w', fd.name):
try:
fd.close()
self.run_as_current_user(self.fs.remove, fd.name)
except (OSError, FilesystemError):
pass
self.respond("550 Not enough privileges.")
return
# now just acts like STOR except that restarting isn't allowed
filename = os.path.basename(fd.name)
if self.data_channel is not None:
self.respond("125 FILE: %s" % filename)
self.data_channel.file_obj = fd
self.data_channel.enable_receiving(self._current_type, "STOU")
else:
self.respond("150 FILE: %s" % filename)
self._in_dtp_queue = (fd, "STOU")
return filename
def ftp_APPE(self, file):
"""Append data to an existing file on the server.
On success return the file path, else None.
"""
# watch for APPE preceded by REST, which makes no sense.
if self._restart_position:
self.respond("450 Can't APPE while REST request is pending.")
else:
return self.ftp_STOR(file, mode='a')
def ftp_REST(self, line):
"""Restart a file transfer from a previous mark."""
if self._current_type == 'a':
self.respond('501 Resuming transfers not allowed in ASCII mode.')
return
try:
marker = int(line)
if marker < 0:
raise ValueError
except (ValueError, OverflowError):
self.respond("501 Invalid parameter.")
else:
self.respond("350 Restarting at position %s." % marker)
self._restart_position = marker
def ftp_ABOR(self, line):
"""Abort the current data transfer."""
# ABOR received while no data channel exists
if (self._dtp_acceptor is None) and (self._dtp_connector is None) \
and (self.data_channel is None):
self.respond("225 No transfer to abort.")
return
else:
# a PASV or PORT was received but connection wasn't made yet
if self._dtp_acceptor is not None or self._dtp_connector is not None:
self._shutdown_connecting_dtp()
resp = "225 ABOR command successful; data channel closed."
# If a data transfer is in progress the server must first
# close the data connection, returning a 426 reply to
# indicate that the transfer terminated abnormally, then it
# must send a 226 reply, indicating that the abort command
# was successfully processed.
# If no data has been transmitted we just respond with 225
# indicating that no transfer was in progress.
if self.data_channel is not None:
if self.data_channel.transfer_in_progress():
self.data_channel.close()
self.data_channel = None
self.respond("426 Transfer aborted via ABOR.",
logfun=logging.info)
resp = "226 ABOR command successful."
else:
self.data_channel.close()
self.data_channel = None
resp = "225 ABOR command successful; data channel closed."
self.respond(resp)
# --- authentication
def ftp_USER(self, line):
"""Set the username for the current session."""
# RFC-959 specifies a 530 response to the USER command if the
# username is not valid. If the username is valid is required
# ftpd returns a 331 response instead. In order to prevent a
# malicious client from determining valid usernames on a server,
# it is suggested by RFC-2577 that a server always return 331 to
# the USER command and then reject the combination of username
# and password for an invalid username when PASS is provided later.
if not self.authenticated:
self.respond('331 Username ok, send password.')
else:
# a new USER command could be entered at any point in order
# to change the access control flushing any user, password,
# and account information already supplied and beginning the
# login sequence again.
self.flush_account()
msg = 'Previous account information was flushed'
self.respond('331 %s, send password.' % msg, logfun=logging.info)
self.username = line
_auth_failed_timeout = 5
def ftp_PASS(self, line):
#if self.IsServerOn()==False: return
"""Check username's password against the authorizer."""
if self.authenticated: self.respond("503 User already authenticated."); return
if not self.username: self.respond("503 Login with USER first."); return
try:
self.authorizer.validate_authentication(self.username,line,self)
home = self.authorizer.get_home_dir(self.username)
msg_login = self.authorizer.get_msg_login(self.username)
except (AuthenticationFailed, AuthorizerError):
def auth_failed(username,password,msg):
self.add_channel()
if hasattr(self,'_closed') and not self._closed:
self.attempted_logins += 1
if self.attempted_logins >= self.max_login_attempts: msg+=" Disconnecting."; self.respond("530 "+msg); self.close_when_done()
else: self.respond("530 "+msg)
self.log("USER '%s' failed login." % username)
self.on_login_failed(username,password)
msg=str(sys.exc_info()[1])
if not msg:
if self.username=='anonymous': msg="Anonymous access not allowed."
else: msg="Authentication failed."
else:
# response string should be capitalized as per RFC-959
msg=msg.capitalize()
self.del_channel()
self.ioloop.call_later(self._auth_failed_timeout,auth_failed,self.username,line,msg,_errback=self.handle_error)
self.username=""
else:
if not isinstance(home, unicode):
if PY3: raise ValueError('type(home) != text')
else:
warnings.warn(
'%s.get_home_dir returned a non-unicode string; now ' \
'casting to unicode' % self.authorizer.__class__.__name__,
RuntimeWarning)
home=home.decode('utf8')
if len(msg_login) <= 75: self.respond('230 %s' % msg_login)
else: self.push("230-%s\r\n" % msg_login); self.respond("230 ")
self.log("USER '%s' logged in." % self.username)
self.authenticated=True
self.password=line
self.attempted_logins=0
self.fs=self.abstracted_fs(home,self)
self.on_login(self.username)
def ftp_REIN(self, line):
"""Reinitialize user's current session."""
# From RFC-959:
# REIN command terminates a USER, flushing all I/O and account
# information, except to allow any transfer in progress to be
# completed. All parameters are reset to the default settings
# and the control connection is left open. This is identical
# to the state in which a user finds himself immediately after
# the control connection is opened.
self.flush_account()
# Note: RFC-959 erroneously mention "220" as the correct response
# code to be given in this case, but this is wrong...
self.respond("230 Ready for new user.")
# --- filesystem operations
def ftp_PWD(self, line):
"""Return the name of the current working directory to the client."""
# The 257 response is supposed to include the directory
# name and in case it contains embedded double-quotes
# they must be doubled (see RFC-959, chapter 7, appendix 2).
cwd = self.fs.cwd
assert isinstance(cwd, unicode), cwd
self.respond('257 "%s" is the current directory.'
% cwd.replace('"', '""'))
def ftp_CWD(self, path):
"""Change the current working directory.
On success return the new directory path, else None.
"""
# Temporarily join the specified directory to see if we have
# permissions to do so, then get back to original process's
# current working directory.
# Note that if for some reason os.getcwd() gets removed after
# the process is started we'll get into troubles (os.getcwd()
# will fail with ENOENT) but we can't do anything about that
# except logging an error.
init_cwd=getcwdu()
try: self.run_as_current_user(self.fs.chdir,path)
except (OSError,FilesystemError):
err=sys.exc_info()[1]
why=_strerror(err)
self.respond('550 %s.' % why)
else:
cwd=self.fs.cwd
assert isinstance(cwd,unicode),cwd
self.respond('250 "%s" is the current directory.' % cwd)
if getcwdu() != init_cwd: os.chdir(init_cwd)
return path
def ftp_CDUP(self, path):
"""Change into the parent directory.
On success return the new directory, else None."""
# Note: RFC-959 says that code 200 is required but it also says
# that CDUP uses the same codes as CWD.
return self.ftp_CWD(path)
def ftp_SIZE(self, path):
"""Return size of file in a format suitable for using with
RESTart as defined in RFC-3659."""
# Implementation note: properly handling the SIZE command when
# TYPE ASCII is used would require to scan the entire file to
# perform the ASCII translation logic
# (file.read().replace(os.linesep, '\r\n')) and then calculating
# the len of such data which may be different than the actual
# size of the file on the server. Considering that calculating
# such result could be very resource-intensive and also dangerous
# (DoS) we reject SIZE when the current TYPE is ASCII.
# However, clients in general should not be resuming downloads
# in ASCII mode. Resuming downloads in binary mode is the
# recommended way as specified in RFC-3659.
line=self.fs.fs2ftp(path)
if self._current_type=='a': why="SIZE not allowed in ASCII mode"; self.respond("550 %s." %why); return
if not self.fs.isfile(self.fs.realpath(path)): why="%s is not retrievable" % line; self.respond("550 %s." % why); return
try: size=self.run_as_current_user(self.fs.getsize,path)
except (OSError,FilesystemError): err=sys.exc_info()[1]; why=_strerror(err); self.respond('550 %s.' % why)
else: self.respond("213 %s" % size)
def ftp_MDTM(self, path):
"""Return last modification time of file to the client as an ISO
3307 style timestamp (YYYYMMDDHHMMSS) as defined in RFC-3659.
On success return the file path, else None."""
line=self.fs.fs2ftp(path)
if not self.fs.isfile(self.fs.realpath(path)): self.respond("550 %s is not retrievable" % line); return
if self.use_gmt_times: timefunc=time.gmtime
else: timefunc=time.localtime
try: secs=self.run_as_current_user(self.fs.getmtime,path); lmt=time.strftime("%Y%m%d%H%M%S",timefunc(secs))
except (ValueError,OSError,FilesystemError):
err=sys.exc_info()[1]
if isinstance(err, ValueError):
# It could happen if file's last modification time
# happens to be too old (prior to year 1900)
why="Can't determine file's last modification time"
else: why=_strerror(err)
self.respond('550 %s.' % why)
else: self.respond("213 %s" % lmt); return path
def ftp_MKD(self, path):
"""Create the specified directory.
On success return the directory path, else None."""
line=self.fs.fs2ftp(path)
try:
self.run_as_current_user(self.fs.mkdir, path)
except (OSError, FilesystemError):
err=sys.exc_info()[1]
why=_strerror(err)
self.respond('550 %s.' %why)
else: self.respond('257 "%s" directory created.' % line.replace('"', '""')); return path # The 257 response is supposed to include the directory name and in case it contains embedded double-quotes they must be doubled (see RFC-959, chapter 7, appendix 2).
def ftp_RMD(self, path):
"""Remove the specified directory.
On success return the directory path, else None."""
if self.fs.realpath(path)==self.fs.realpath(self.fs.root): msg="Can't remove root directory."; self.respond("550 %s" % msg); return
try: self.run_as_current_user(self.fs.rmdir,path)
except (OSError,FilesystemError): err=sys.exc_info()[1]; why=_strerror(err); self.respond('550 %s.' % why)
else: self.respond("250 Directory removed.")
def ftp_DELE(self, path):
"""Delete the specified file.
On success return the file path, else None."""
try: self.run_as_current_user(self.fs.remove,path)
except (OSError,FilesystemError): err=sys.exc_info()[1]; why=_strerror(err); self.respond('550 %s.' % why)
else: self.respond("250 File removed."); return path
def ftp_RNFR(self, path):
"""Rename the specified (only the source name is specified here, see RNTO command)"""
if not self.fs.lexists(path): self.respond("550 No such file or directory.")
elif self.fs.realpath(path)==self.fs.realpath(self.fs.root): self.respond("550 Can't rename home directory.")
else: self._rnfr=path; self.respond("350 Ready for destination name.")
def ftp_RNTO(self, path):
"""Rename file (destination name only, source is specified with RNFR).
On success return a (source_path, destination_path) tuple."""
if not self._rnfr: self.respond("503 Bad sequence of commands: use RNFR first."); return
src=self._rnfr; self._rnfr=None
try: self.run_as_current_user(self.fs.rename,src,path)
except (OSError, FilesystemError): err=sys.exc_info()[1]; why=_strerror(err); self.respond('550 %s.' % why)
else: self.respond("250 Renaming ok."); return (src,path)
# --- others
def ftp_TYPE(self,line): #"""Set current type data type to binary/ascii"""
type=line.upper().replace(' ','')
if type in ("A","L7"): self.respond("200 Type set to: ASCII."); self._current_type='a'
elif type in ("I","L8"): self.respond("200 Type set to: Binary."); self._current_type='i'
else: self.respond('504 Unsupported type "%s".' % line)
def ftp_STRU(self, line):
"""Set file structure ("F" is the only one supported (noop))."""
stru=line.upper()
if stru=='F': self.respond('200 File transfer structure set to: F.')
elif stru in ('P', 'R'):
# R is required in minimum implementations by RFC-959, 5.1.
# RFC-1123, 4.1.2.13, amends this to only apply to servers
# whose file systems support record structures, but also
# suggests that such a server "may still accept files with
# STRU R, recording the byte stream literally".
# Should we accept R but with no operational difference from
# F? proftpd and wu-ftpd don't accept STRU R. We just do
# the same.
#
# RFC-1123 recommends against implementing P.
self.respond('504 Unimplemented STRU type.')
else: self.respond('501 Unrecognized STRU type.')
def ftp_MODE(self, line): #"""Set data transfer mode ("S" is the only one supported (noop))."""
mode=line.upper()
if mode=='S': self.respond('200 Transfer mode set to: S')
elif mode in ('B','C'): self.respond('504 Unimplemented MODE type.')
else: self.respond('501 Unrecognized MODE type.')
def ftp_STAT(self, path):
"""Return statistics about current ftp session. If an argument is provided return directory listing over command channel.
Implementation note:
RFC-959 does not explicitly mention globbing but many FTP servers do support it as a measure of convenience for FTP clients and users.
In order to search for and match the given globbing expression, the code has to search (possibly) many directories, examine
each contained filename, and build a list of matching files in memory. Since this operation can be quite intensive, both CPU-
and memory-wise, we do not support globbing."""
# return STATus information about ftpd
if not path:
s=[]
s.append('Connected to: %s:%s' % self.socket.getsockname()[:2])
if self.authenticated: s.append('Logged in as: %s' % self.username)
else:
if not self.username: s.append("Waiting for username.")
else: s.append("Waiting for password.")
if self._current_type=='a': type='ASCII'
else: type='Binary'
s.append("TYPE: %s; STRUcture: File; MODE: Stream" % type)
if self._dtp_acceptor is not None: s.append('Passive data channel waiting for connection.')
elif self.data_channel is not None:
bytes_sent=self.data_channel.tot_bytes_sent
bytes_recv=self.data_channel.tot_bytes_received
elapsed_time=self.data_channel.get_elapsed_time()
s.append('Data connection open:')
s.append('Total bytes sent: %s' % bytes_sent)
s.append('Total bytes received: %s' % bytes_recv)
s.append('Transfer elapsed time: %s secs' % elapsed_time)
else: s.append('Data connection closed.')
self.push('211-FTP server status:\r\n')
self.push(''.join([' %s\r\n' % item for item in s]))
self.respond('211 End of status.')
# return directory LISTing over the command channel
else:
line=self.fs.fs2ftp(path)
try: iterator=self.run_as_current_user(self.fs.get_list_dir,path)
except (OSError, FilesystemError): err=sys.exc_info()[1]; why=_strerror(err); self.respond('550 %s.' %why)
else: self.push('213-Status of "%s":\r\n' % line); self.push_with_producer(BufferedIteratorProducer(iterator)); self.respond('213 End of status.'); return path
def ftp_FEAT(self,line): #"""List all new features supported as defined in RFC-2398."""
features = set(['UTF8','TVFS'])
features.update([feat for feat in ('EPRT','EPSV','MDTM','SIZE') \
if feat in self.proto_cmds])
features.update(self._extra_feats)
if 'MLST' in self.proto_cmds or 'MLSD' in self.proto_cmds:
facts=''
for fact in self._available_facts:
if fact in self._current_facts: facts+=fact+'*;'
else: facts+=fact+';'
features.add('MLST '+facts)
if 'REST' in self.proto_cmds: features.add('REST STREAM')
features=sorted(features); self.push("211-Features supported:\r\n"); self.push("".join([" %s\r\n" % x for x in features])); self.respond('211 End FEAT.')
def ftp_OPTS(self,line): #"""Specify options for FTP commands as specified in RFC-2389."""
try:
if line.count(' ') > 1:
raise ValueError('Invalid number of arguments')
if ' ' in line:
cmd,arg=line.split(' ')
if ';' not in arg: raise ValueError('Invalid argument')
else: cmd,arg=line,''
# actually the only command able to accept options is MLST
if cmd.upper() != 'MLST' or 'MLST' not in self.proto_cmds: raise ValueError('Unsupported command "%s"' % cmd)
except ValueError:
err=sys.exc_info()[1]
self.respond('501 %s.' % err)
else:
facts=[x.lower() for x in arg.split(';')]
self._current_facts=[x for x in facts if x in self._available_facts]
f=''.join([x + ';' for x in self._current_facts])
self.respond('200 MLST OPTS '+f)
def ftp_NOOP(self,line): self.respond("200 I successfully done nothin'.") #"""Do nothing."""
def ftp_SYST(self,line):
"""Return system type (always returns UNIX type: L8)."""
# This command is used to find out the type of operating system
# at the server. The reply shall have as its first word one of
# the system names listed in RFC-943.
# Since that we always return a "/bin/ls -lA"-like output on
# LIST we prefer to respond as if we would on Unix in any case.
self.respond("215 UNIX Type: L8")
def ftp_ALLO(self,line): #"""Allocate bytes for storage (noop)."""
# not necessary (always respond with 202)
self.respond("202 No storage allocation necessary.")
def ftp_HELP(self,line):
"""Return help text to the client."""
if line:
line=line.upper()
if line in self.proto_cmds: self.respond("214 %s" % self.proto_cmds[line]['help'])
else: self.respond("501 Unrecognized command.")
else:
# provide a compact list of recognized commands
def formatted_help():
cmds=[]; keys=[x for x in self.proto_cmds.keys() if not x.startswith('SITE ')]; keys.sort()
while keys: elems=tuple((keys[0:8])); cmds.append(' %-6s' * len(elems) % elems +'\r\n'); del keys[0:8]
return ''.join(cmds)
self.push("214-The following commands are recognized:\r\n"); self.push(formatted_help()); self.respond("214 Help command successful.")
# --- site commands
# The user willing to add support for a specific SITE command must
# update self.proto_cmds dictionary and define a new ftp_SITE_%CMD%
# method in the subclass.
def ftp_SITE_CHMOD(self, path, mode): #"""Change file mode. On success return a (file_path, mode) tuple."""
# Note: although most UNIX servers implement it, SITE CHMOD is not
# defined in any official RFC.
try:
assert len(mode) in (3,4)
for x in mode: assert 0 <= int(x) <= 7
mode=int(mode,8)
except (AssertionError,ValueError): self.respond("501 Invalid SITE CHMOD format.")
else:
try: self.run_as_current_user(self.fs.chmod,path,mode)
except (OSError, FilesystemError): err=sys.exc_info()[1]; why=_strerror(err); self.respond('550 %s.' % why)
else: self.respond('200 SITE CHMOD successful.'); return (path,mode)
def ftp_SITE_HELP(self,line): #"""Return help text to the client for a given SITE command."""
if line:
line=line.upper()
if line in self.proto_cmds: self.respond("214 %s" % self.proto_cmds[line]['help'])
else: self.respond("501 Unrecognized SITE command.")
else:
self.push("214-The following SITE commands are recognized:\r\n"); site_cmds=[]
for cmd in sorted(self.proto_cmds.keys()):
if cmd.startswith('SITE '): site_cmds.append(' %s\r\n' % cmd[5:])
self.push(''.join(site_cmds)); self.respond("214 Help SITE command successful.")
# --- support for deprecated cmds
# RFC-1123 requires that the server treat XCUP, XCWD, XMKD, XPWD
# and XRMD commands as synonyms for CDUP, CWD, MKD, LIST and RMD.
# Such commands are obsoleted but some ftp clients (e.g. Windows
# ftp.exe) still use them.
def ftp_XCUP(self,line): return self.ftp_CDUP(line) #"""Change to the parent directory. Synonym for CDUP. Deprecated."""
def ftp_XCWD(self,line): return self.ftp_CWD(line) #"""Change the current working directory. Synonym for CWD. Deprecated."""
def ftp_XMKD(self,line): return self.ftp_MKD(line) #"""Create the specified directory. Synonym for MKD. Deprecated."""
def ftp_XPWD(self,line): return self.ftp_PWD(line) #"""Return the current working directory. Synonym for PWD. Deprecated."""
def ftp_XRMD(self,line): return self.ftp_RMD(line) #"""Remove the specified directory. Synonym for RMD. Deprecated."""
# ===================================================================
# --- FTP over SSL
# ===================================================================
# requires PyOpenSSL - http://pypi.python.org/pypi/pyOpenSSL
try: from OpenSSL import SSL
except ImportError: pass
else:
_ssl_proto_cmds=proto_cmds.copy()
_ssl_proto_cmds.update({
'AUTH': dict(perm=None, auth=False, arg=True,help='Syntax: AUTH <SP> TLS|SSL (set up secure control channel).'),
'PBSZ': dict(perm=None, auth=False, arg=True,help='Syntax: PBSZ <SP> 0 (negotiate TLS buffer).'),
'PROT': dict(perm=None, auth=False, arg=True,help='Syntax: PROT <SP> [C|P] (set up un/secure data channel).'),
})
class SSLConnection(_AsyncChatNewStyle): #"""An AsyncChat subclass supporting TLS/SSL."""
_ssl_accepting=False; _ssl_established=False; _ssl_closing=False
def __init__(self,*args,**kwargs): super(SSLConnection,self).__init__(*args,**kwargs); self._error=False
def secure_connection(self, ssl_context): #"""Secure the connection switching from plain-text to SSL/TLS."""
try: self.socket=SSL.Connection(ssl_context,self.socket)
except socket.error: self.close()
except ValueError:
if self.socket.fileno() == -1: return # may happen in case the client connects/disconnects very quickly
raise
else: self.socket.set_accept_state(); self._ssl_accepting=True
def _do_ssl_handshake(self):
self._ssl_accepting=True
try: self.socket.do_handshake()
except (SSL.WantReadError,SSL.WantWriteError): return
except SSL.SysCallError:
err=sys.exc_info()[1]; retval,desc=err.args
if (retval == -1 and desc=='Unexpected EOF') or retval > 0: return self.handle_close()
raise
except SSL.Error: return self.handle_failed_ssl_handshake()
else: self._ssl_accepting=False; self._ssl_established=True; self.handle_ssl_established()
def handle_ssl_established(self): pass #"""Called when SSL handshake has completed."""
def handle_ssl_shutdown(self): super(SSLConnection, self).close() #"""Called when SSL shutdown() has completed."""
def handle_failed_ssl_handshake(self): raise NotImplementedError("must be implemented in subclass")
def handle_read_event(self):
if self._ssl_accepting: self._do_ssl_handshake()
elif self._ssl_closing: self._do_ssl_shutdown()
else: super(SSLConnection,self).handle_read_event()
def handle_write_event(self):
if self._ssl_accepting: self._do_ssl_handshake()
elif self._ssl_closing: self._do_ssl_shutdown()
else: super(SSLConnection,self).handle_write_event()
def handle_error(self):
self._error=True
try: raise
except (KeyboardInterrupt,SystemExit): sys.exit(); raise
except: self.log_exception(self)
# when facing an unhandled exception in here it's better to rely on base class (FTPHandler or DTPHandler) close() method as it does not imply SSL shutdown logic
try: super(SSLConnection,self).close()
except Exception: logger.critical(traceback.format_exc())
def send(self,data):
try: return super(SSLConnection,self).send(data)
except (SSL.WantReadError,SSL.WantWriteError): return 0
except SSL.ZeroReturnError: super(SSLConnection,self).handle_close(); return 0
except SSL.SysCallError:
err=sys.exc_info()[1]; errnum,errstr=err.args
if errnum==errno.EWOULDBLOCK: return 0
elif errnum in _DISCONNECTED or errstr=='Unexpected EOF': super(SSLConnection,self).handle_close(); return 0
else: raise
def recv(self,buffer_size):
try: return super(SSLConnection,self).recv(buffer_size)
except (SSL.WantReadError, SSL.WantWriteError):
return b('')
except SSL.ZeroReturnError:
super(SSLConnection,self).handle_close()
return b('')
except SSL.SysCallError:
err=sys.exc_info()[1]; errnum,errstr=err.args
if errnum in _DISCONNECTED or errstr=='Unexpected EOF': super(SSLConnection,self).handle_close(); return b('')
else: raise
def _do_ssl_shutdown(self): #"""Executes a SSL_shutdown() call to revert the connection back to clear-text. twisted/internet/tcp.py code has been used as an example."""
self._ssl_closing=True
# since SSL_shutdown() doesn't report errors, an empty write call is done first, to try to detect if the connection has gone away
try: os.write(self.socket.fileno(),b(''))
except (OSError,socket.error):
err=sys.exc_info()[1]
if err.args[0] in (errno.EINTR,errno.EWOULDBLOCK,errno.ENOBUFS): return
elif err.args[0] in _DISCONNECTED: return super(SSLConnection,self).close()
else: raise
# Ok, this a mess, but the underlying OpenSSL API simply *SUCKS* and I really couldn't do any better.
#
# Here we just want to shutdown() the SSL layer and then close() the connection so we're not interested in a
# complete SSL shutdown() handshake, so let's pretend we already received a "RECEIVED" shutdown notification from the client.
# Once the client received our "SENT" shutdown notification then we close() the connection.
#
# Since it is not clear what errors to expect during the entire procedure we catch them all and assume the following:
# - WantReadError and WantWriteError means "retry"
# - ZeroReturnError, SysCallError[EOF], Error[] are all aliases for disconnection
try:
laststate=self.socket.get_shutdown()
self.socket.set_shutdown(laststate | SSL.RECEIVED_SHUTDOWN)
done=self.socket.shutdown()
if not (laststate & SSL.RECEIVED_SHUTDOWN):
self.socket.set_shutdown(SSL.SENT_SHUTDOWN)
except (SSL.WantReadError,SSL.WantWriteError): pass
except SSL.ZeroReturnError: super(SSLConnection,self).close()
except SSL.SysCallError:
err=sys.exc_info()[1]
errnum,errstr=err.args
if errnum in _DISCONNECTED or errstr=='Unexpected EOF': super(SSLConnection,self).close()
else: raise
except SSL.Error: # see: # http://code.google.com/p/pyftpdlib/issues/detail?id=171 # https://bugs.launchpad.net/pyopenssl/+bug/785985
err=sys.exc_info()[1]
if err.args and not err.args[0]: pass
else: raise
except socket.error:
err=sys.exc_info()[1]
if err.args[0] in _DISCONNECTED: super(SSLConnection,self).close()
else: raise
else:
if done: self._ssl_established=False; self._ssl_closing=False; self.handle_ssl_shutdown()
def close(self):
if self._ssl_established and not self._error: self._do_ssl_shutdown()
else: self._ssl_accepting=False; self._ssl_established=False; self._ssl_closing=False; super(SSLConnection,self).close()
class TLS_DTPHandler(SSLConnection, DTPHandler): #"""A DTPHandler subclass supporting TLS/SSL."""
def __init__(self,sock,cmd_channel):
super(TLS_DTPHandler,self).__init__(sock,cmd_channel)
if self.cmd_channel._prot: self.secure_connection(self.cmd_channel.ssl_context)
def _use_sendfile(self,producer): return False
def handle_failed_ssl_handshake(self):
# TLS/SSL handshake failure, probably client's fault which used a SSL version different from server's.
# RFC-4217, chapter 10.2 expects us to return 522 over the command channel.
self.cmd_channel.respond("522 SSL handshake failed."); self.cmd_channel.log_cmd("PROT","P",522,"SSL handshake failed."); self.close()
class TLS_FTPHandler(SSLConnection,FTPHandler):
"""A FTPHandler subclass supporting TLS/SSL.
Implements AUTH, PBSZ and PROT commands (RFC-2228 and RFC-4217).
Configurable attributes:
- (bool) tls_control_required:
When True requires SSL/TLS to be established on the control
channel, before logging in. This means the user will have
to issue AUTH before USER/PASS (default False).
- (bool) tls_data_required:
When True requires SSL/TLS to be established on the data
channel. This means the user will have to issue PROT
before PASV or PORT (default False).
SSL-specific options:
- (string) certfile:
the path to the file which contains a certificate to be
used to identify the local side of the connection.
This must always be specified, unless context is provided
instead.
- (string) keyfile:
the path to the file containing the private RSA key;
can be omitted if certfile already contains the private
key (defaults: None).
- (int) protocol:
specifies which version of the SSL protocol to use when
establishing SSL/TLS sessions; clients can then only
connect using the configured protocol (defaults to SSLv23,
allowing SSLv3 and TLSv1 protocols).
Possible values:
* SSL.SSLv2_METHOD - allow only SSLv2
* SSL.SSLv3_METHOD - allow only SSLv3
* SSL.SSLv23_METHOD - allow both SSLv3 and TLSv1
* SSL.TLSv1_METHOD - allow only TLSv1
- (instance) context:
a SSL Context object previously configured; if specified
all other parameters will be ignored.
(default None). """
# configurable attributes
tls_control_required=False; tls_data_required=False; certfile=None; keyfile=None; ssl_protocol=SSL.SSLv23_METHOD; ssl_context=None
# overridden attributes
proto_cmds=_ssl_proto_cmds; dtp_handler=TLS_DTPHandler
def __init__(self,conn,server,ioloop=None):
super(TLS_FTPHandler,self).__init__(conn,server,ioloop)
if not self.connected: return
self._extra_feats=['AUTH TLS','AUTH SSL','PBSZ','PROT']; self._pbsz=False; self._prot=False; self.ssl_context=self.get_ssl_context()
@classmethod
def get_ssl_context(cls):
if cls.ssl_context is None:
if cls.certfile is None: raise ValueError("at least certfile must be specified")
cls.ssl_context=SSL.Context(cls.ssl_protocol)
if cls.ssl_protocol != SSL.SSLv2_METHOD: cls.ssl_context.set_options(SSL.OP_NO_SSLv2)
else: warnings.warn("SSLv2 protocol is insecure",RuntimeWarning)
cls.ssl_context.use_certificate_file(cls.certfile)
if not cls.keyfile: cls.keyfile=cls.certfile
cls.ssl_context.use_privatekey_file(cls.keyfile)
return cls.ssl_context
# --- overridden methods
def flush_account(self): FTPHandler.flush_account(self); self._pbsz=False; self._prot=False
def process_command(self,cmd,*args,**kwargs):
if cmd in ('USER','PASS'):
if self.tls_control_required and not self._ssl_established: msg="SSL/TLS required on the control channel."; self.respond("550 "+msg); self.log_cmd(cmd,args[0],550,msg); return
elif cmd in ('PASV','EPSV','PORT','EPRT'):
if self.tls_data_required and not self._prot: msg="SSL/TLS required on the data channel."; self.respond("550 "+msg); self.log_cmd(cmd,args[0],550,msg); return
FTPHandler.process_command(self,cmd,*args,**kwargs)
# --- new methods
def handle_failed_ssl_handshake(self):
# TLS/SSL handshake failure, probably client's fault which used a SSL version different from server's.
# We can't rely on the control connection anymore so we just disconnect the client without sending any response.
self.log("SSL handshake failed."); self.close()
def ftp_AUTH(self,line): #"""Set up secure control channel."""
arg=line.upper()
if isinstance(self.socket,SSL.Connection): self.respond("503 Already using TLS.")
elif arg in ('TLS','TLS-C','SSL','TLS-P'):
# From RFC-4217: "As the SSL/TLS protocols self-negotiate their levels,
# there is no need to distinguish between SSL and TLS in the application layer".
self.respond('234 AUTH %s successful.' %arg); self.secure_connection(self.ssl_context)
else: self.respond("502 Unrecognized encryption type (use TLS or SSL).")
def ftp_PBSZ(self,line): #"""Negotiate size of buffer for secure data transfer. For TLS/SSL the only valid value for the parameter is '0'. Any other value is accepted but ignored."""
if not isinstance(self.socket,SSL.Connection): self.respond("503 PBSZ not allowed on insecure control connection.")
else: self.respond('200 PBSZ=0 successful.'); self._pbsz=True
def ftp_PROT(self,line): #"""Setup un/secure data channel."""
arg=line.upper()
if not isinstance(self.socket,SSL.Connection): self.respond("503 PROT not allowed on insecure control connection.")
elif not self._pbsz: self.respond("503 You must issue the PBSZ command prior to PROT.")
elif arg=='C': self.respond('200 Protection set to Clear'); self._prot=False
elif arg=='P': self.respond('200 Protection set to Private'); self._prot=True
elif arg in ('S','E'): self.respond('521 PROT %s unsupported (use C or P).' %arg)
else: self.respond("502 Unrecognized PROT type (use C or P).")
| UTF-8 | Python | false | false | 119,844 | py | 20 | handlers.py | 11 | 0.619555 | 0.608316 | 0 | 2,221 | 52.959478 | 448 |
menpo/menpo | 3,590,592,686,525 | 04e6d64bd2b301c691c944fe9fddea1f521722b6 | 5bb8e08e2d2bff503b4425cffe2e60dc32499089 | /menpo/transform/base/__init__.py | 9b476fc88afccb765a5783cd8881dec297f6bba3 | [
"BSD-3-Clause"
]
| permissive | https://github.com/menpo/menpo | 2e731560bfd480bf3008317368af35612e55ddc2 | a61500656c4fc2eea82497684f13cc31a605550b | refs/heads/master | 2023-08-15T21:04:41.768318 | 2022-01-02T22:10:04 | 2022-01-02T22:10:04 | 9,731,325 | 332 | 89 | BSD-3-Clause | false | 2022-01-02T22:10:05 | 2013-04-28T14:52:38 | 2022-01-02T21:59:15 | 2022-01-02T22:10:04 | 38,781 | 304 | 79 | 45 | Python | false | false | import warnings
import numpy as np
from menpo.base import Copyable, MenpoDeprecationWarning
class Transform(Copyable):
r"""
Abstract representation of any spatial transform.
Provides a unified interface to apply the transform with
:meth:`apply_inplace` and :meth:`apply`.
All Transforms support basic composition to form a :map:`TransformChain`.
There are two useful forms of composition. Firstly, the mathematical
composition symbol `o` has the following definition::
Let a(x) and b(x) be two transforms on x.
(a o b)(x) == a(b(x))
This functionality is provided by the :meth:`compose_after` family of
methods: ::
(a.compose_after(b)).apply(x) == a.apply(b.apply(x))
Equally useful is an inversion the order of composition - so that over
time a large chain of transforms can be built to do a useful job, and
composing on this chain adds another transform to the end (after all other
preceding transforms have been performed).
For instance, let's say we want to rescale a :map:`PointCloud` ``p`` around
its mean, and then translate it some place else. It would be nice to be able
to do something like::
t = Translation(-p.centre) # translate to centre
s = Scale(2.0) # rescale
move = Translate([10, 0 ,0]) # budge along the x axis
t.compose(s).compose(-t).compose(move)
In Menpo, this functionality is provided by the :meth:`compose_before()`
family of methods::
(a.compose_before(b)).apply(x) == b.apply(a.apply(x))
For native composition, see the :map:`ComposableTransform` subclass and
the :map:`VComposable` mix-in.
For inversion, see the :map:`Invertible` and :map:`VInvertible` mix-ins.
For alignment, see the :map:`Alignment` mix-in.
"""
@property
def n_dims(self):
r"""
The dimensionality of the data the transform operates on.
``None`` if the transform is not dimension specific.
:type: `int` or ``None``
"""
return None
@property
def n_dims_output(self):
r"""
The output of the data from the transform.
``None`` if the output of the transform is not dimension specific.
:type: `int` or ``None``
"""
# most Transforms don't change the dimensionality of their input.
return self.n_dims
def _apply(self, x, **kwargs):
r"""
Applies the transform to the array ``x``, returning the result.
This method does the actual work of transforming the data, and is the
one that subclasses must implement. :meth:`apply` and
:meth:`apply_inplace` both call this method to do that actual work.
Parameters
----------
x : ``(n_points, n_dims)`` `ndarray`
The array to be transformed.
kwargs : `dict`
Subclasses may need these in their ``_apply`` methods.
Returns
-------
transformed : ``(n_points, n_dims_output)`` `ndarray`
The transformed array
"""
raise NotImplementedError()
def apply_inplace(self, *args, **kwargs):
r"""
Deprecated as public supported API, use the non-mutating `apply()`
instead.
For internal performance-specific uses, see `_apply_inplace()`.
"""
warnings.warn(
"the public API for inplace operations is deprecated "
"and will be removed in a future version of Menpo. "
"Use .apply() instead.",
MenpoDeprecationWarning,
)
return self._apply_inplace(*args, **kwargs)
def _apply_inplace(self, x, **kwargs):
r"""
Applies this transform to a :map:`Transformable` ``x`` destructively.
Any ``kwargs`` will be passed to the specific transform :meth:`_apply`
method.
Note that this is an inplace operation that should be used sparingly,
by internal API's where creating a copy of the transformed object is
expensive. It does not return anything, as the operation is inplace.
Parameters
----------
x : :map:`Transformable`
The :map:`Transformable` object to be transformed.
kwargs : `dict`
Passed through to :meth:`_apply`.
"""
def transform(x_):
"""
Local closure which calls the :meth:`_apply` method with the
`kwargs` attached.
"""
return self._apply(x_, **kwargs)
try:
x._transform_inplace(transform)
except AttributeError:
raise ValueError(
"apply_inplace can only be used on Transformable" " objects."
)
def apply(self, x, batch_size=None, **kwargs):
r"""
Applies this transform to ``x``.
If ``x`` is :map:`Transformable`, ``x`` will be handed this transform
object to transform itself non-destructively (a transformed copy of the
object will be returned).
If not, ``x`` is assumed to be an `ndarray`. The transformation will be
non-destructive, returning the transformed version.
Any ``kwargs`` will be passed to the specific transform :meth:`_apply`
method.
Parameters
----------
x : :map:`Transformable` or ``(n_points, n_dims)`` `ndarray`
The array or object to be transformed.
batch_size : `int`, optional
If not ``None``, this determines how many items from the numpy
array will be passed through the transform at a time. This is
useful for operations that require large intermediate matrices
to be computed.
kwargs : `dict`
Passed through to :meth:`_apply`.
Returns
-------
transformed : ``type(x)``
The transformed object or array
"""
def transform(x_):
"""
Local closure which calls the :meth:`_apply` method with the
`kwargs` attached.
"""
return self._apply_batched(x_, batch_size, **kwargs)
try:
return x._transform(transform)
except AttributeError:
return self._apply_batched(x, batch_size, **kwargs)
def _apply_batched(self, x, batch_size, **kwargs):
if batch_size is None:
return self._apply(x, **kwargs)
else:
outputs = []
n_points = x.shape[0]
for lo_ind in range(0, n_points, batch_size):
hi_ind = lo_ind + batch_size
outputs.append(self._apply(x[lo_ind:hi_ind], **kwargs))
return np.vstack(outputs)
def compose_before(self, transform):
r"""
Returns a :map:`TransformChain` that represents **this** transform
composed **before** the given transform::
c = a.compose_before(b)
c.apply(p) == b.apply(a.apply(p))
``a`` and ``b`` are left unchanged.
Parameters
----------
transform : :map:`Transform`
Transform to be applied **after** self
Returns
-------
transform : :map:`TransformChain`
The resulting transform chain.
"""
return TransformChain([self, transform])
def compose_after(self, transform):
r"""
Returns a :map:`TransformChain` that represents **this** transform
composed **after** the given transform::
c = a.compose_after(b)
c.apply(p) == a.apply(b.apply(p))
``a`` and ``b`` are left unchanged.
This corresponds to the usual mathematical formalism for the compose
operator, `o`.
Parameters
----------
transform : :map:`Transform`
Transform to be applied **before** self
Returns
-------
transform : :map:`TransformChain`
The resulting transform chain.
"""
return TransformChain([transform, self])
class Transformable(Copyable):
r"""
Interface for objects that know how to be transformed by the
:map:`Transform` interface.
When ``Transform.apply_inplace`` is called on an object, the
:meth:`_transform_inplace` method is called, passing in the transforms'
:meth:`_apply` function.
This allows for the object to define how it should transform itself.
"""
def _transform_inplace(self, transform):
r"""
Apply the given transform function to ``self`` inplace.
Parameters
----------
transform : `function`
Function that applies a transformation to the transformable object.
Returns
-------
transformed : ``type(self)``
The transformed object, having been transformed in place.
"""
raise NotImplementedError()
def _transform(self, transform):
r"""
Apply the :map:`Transform` given in a non destructive manner -
returning the transformed object and leaving this object as it was.
Parameters
----------
transform : `function`
Function that applies a transformation to the transformable object.
Returns
-------
transformed : ``type(self)``
A copy of the object, transformed.
"""
copy_of_self = self.copy()
# transform the copy destructively
copy_of_self._transform_inplace(transform)
return copy_of_self
from .alignment import Alignment
from .composable import TransformChain, ComposableTransform, VComposable
from .invertible import Invertible, VInvertible
| UTF-8 | Python | false | false | 9,702 | py | 354 | __init__.py | 163 | 0.589569 | 0.588745 | 0 | 302 | 31.125828 | 80 |
zhangyuo/cf-nlp-py | 10,591,389,354,858 | bcbf03b7df672cd1d2edbda6c43ac433fe1ea87a | f5730407d57aa487c07c7ef82652ee21b64f3404 | /cfnlp/tools/logger.py | 9ceaec49dbbfebc4641f05aa9f17a9d95832958c | [
"MIT"
]
| permissive | https://github.com/zhangyuo/cf-nlp-py | 8c50c0249dd79882c881e589de83a940b82a7b0e | 1c7eae5a87ddc6695e5fdee833ea10f87e9e4ca6 | refs/heads/master | 2021-07-05T00:20:42.048259 | 2020-12-25T02:19:36 | 2020-12-25T02:19:36 | 171,785,556 | 0 | 0 | MIT | true | 2019-03-14T08:32:03 | 2019-02-21T02:28:14 | 2019-02-21T02:28:22 | 2019-03-14T08:32:02 | 11 | 0 | 0 | 0 | Python | false | null |
import logging
import logging.config
from logging_conf import PINGAN_LOGGING_CONF
logging.config.dictConfig(PINGAN_LOGGING_CONF)
logger = logging.getLogger("pingan") | UTF-8 | Python | false | false | 167 | py | 30 | logger.py | 26 | 0.826347 | 0.826347 | 0 | 6 | 26.833333 | 46 |
rafaelcsta/www | 14,783,277,463,687 | a72c7623ad95c67430e399ab884fe37a1d496305 | 1cf4bd373b37fb89a406ba389f54b4b7b13ec5e2 | /tools/merge_cals.py | 463cd800ae60eba27f27bb68126d5793088ae87c | []
| no_license | https://github.com/rafaelcsta/www | e0e680e5bd450c883472bf4411ba5ca7aabeb369 | 3a5097c94905c8e4e1dca9f401211d4a3e1d66db | refs/heads/master | 2022-04-19T00:57:33.289620 | 2018-07-23T19:03:56 | 2018-07-23T19:03:56 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/env python3
from glob import glob
import icalendar
import pytz
cals=[]
merged=icalendar.Calendar()
merged.add('prodid', '-//CCCB Calendar Generator//berlin.ccc.de//')
merged.add('version', '2.0')
for icsfilestr in glob("public/*/**/*.ics", recursive=True):
with open(icsfilestr, 'r') as icsfile:
print("Importing", icsfilestr)
cals.append(icalendar.Calendar.from_ical(icsfile.read()))
for cal in cals:
for e in cal.subcomponents:
merged.add_component(e)
outfile="static/all.ics"
with open(outfile, 'wb') as f:
print(f"writing to {outfile}...")
f.write(merged.to_ical())
| UTF-8 | Python | false | false | 599 | py | 22 | merge_cals.py | 4 | 0.704508 | 0.699499 | 0 | 24 | 23.875 | 67 |
zielman/Codeforces-solutions | 11,974,368,870,687 | 4e5d680d0544658f71d8c9feecf5b121e5dbf27a | f47ad16c7e8247d481372a98d3e45847e0153948 | /A/A 702 Maximum Increase.py | bb46c531b1ac30e2d5971a19149f322c68971502 | [
"MIT"
]
| permissive | https://github.com/zielman/Codeforces-solutions | 2759e1af95c0e11e8d9267b3424e837d4b8c083e | 636f11a9eb10939d09d2e50ddc5ec53327d0b7ab | refs/heads/main | 2023-06-12T04:01:48.019764 | 2021-07-05T20:24:47 | 2021-07-05T20:24:47 | 354,343,392 | 0 | 0 | MIT | false | 2021-07-05T20:24:47 | 2021-04-03T16:43:36 | 2021-06-24T01:31:27 | 2021-07-05T20:24:47 | 315 | 0 | 0 | 0 | Python | false | false | # https://codeforces.com/problemset/problem/702/A
n, arr = int(input()), list(map(int, input().split()))
ans, temp = 1, 1
for i in range(n-1):
if arr[i] < arr[i+1]:
temp += 1
ans = max(ans,temp)
else:
temp = 1
print(ans) | UTF-8 | Python | false | false | 256 | py | 196 | A 702 Maximum Increase.py | 194 | 0.542969 | 0.507813 | 0 | 14 | 17.357143 | 54 |
claranet/cloud-deploy | 11,725,260,737,816 | b19084741c2f72d8e5916ac745bcac915f5a000d | 0ccda4de98602b7addafe595d1ffecc6bb6929fe | /models/apps.py | 74201ac8d57534bb04e98e1885bda35d510fdbe9 | [
"Apache-2.0"
]
| permissive | https://github.com/claranet/cloud-deploy | 4ad5f1d4294219709eaf2ae7f3da60dbb8ac7180 | a1277f5a1173efffbaeb298c9d22ec0aa39c62e7 | refs/heads/stable | 2021-05-11T05:11:40.262534 | 2020-08-13T16:23:31 | 2020-08-13T16:23:31 | 117,956,032 | 25 | 5 | Apache-2.0 | false | 2020-08-13T16:23:33 | 2018-01-18T08:41:03 | 2020-08-13T16:22:39 | 2020-08-13T16:23:32 | 1,293 | 20 | 3 | 0 | Python | false | false | import instance_role
from jobs import LOG_NOTIFICATION_JOB_STATUSES
import resources
import tags
import volumes
APPS_DEFAULT = {
'modules.source.mode': 'symlink',
'modules.source.protocol': 'git',
}
apps_schema = {
'name': {
'type': 'string',
'regex': '^[a-zA-Z0-9_.+-]*$',
'required': True
},
'env': {
'type': 'string',
'regex': '^[a-z0-9\-\_]*$',
'required': True
},
'role': {
'type': 'string',
'regex': '^[a-z0-9\-\_]*$',
'required': True
},
'description': {
'type': 'string',
'required': False
},
'assumed_account_id': {
'type': 'string',
'regex': '^[a-zA-Z0-9_.+-]*$',
'required': False
},
'assumed_role_name': {
'type': 'string',
'regex': '^[a-zA-Z0-9_.+-]*$',
'required': False
},
'assumed_region_name': {
'type': 'string',
'regex': '^[a-zA-Z0-9_.+-]*$',
'required': False
},
'region': {'type': 'string'},
'instance_type': {'type': 'string'},
'instance_monitoring' : {'type': 'boolean', 'required': False},
'lifecycle_hooks': {
'type': 'dict',
'schema': {
'pre_buildimage': {'type': 'string'},
'post_buildimage': {'type': 'string'},
'pre_bootstrap': {'type': 'string'},
'post_bootstrap': {'type': 'string'},
}
},
'blue_green': {
'type': 'dict',
'schema': {
'enable_blue_green': {'type': 'boolean', 'required': False},
'hooks': {
'type': 'dict',
'schema': {
'pre_swap': {'type': 'string'},
'post_swap': {'type': 'string'},
}
},
'color': {'type': 'string', 'allowed': ['blue', 'green'], 'required': False},
'is_online': {'type': 'boolean', 'required': False},
'alter_ego_id': {
'readonly': True,
'type': 'objectid',
'data_relation': {
'resource': 'apps',
'field': '_id',
'embeddable': False
}
}
}
},
'features': {
'type': 'list',
'schema': {
'type': 'dict',
'schema': {
'name': {
'type': 'string',
'regex': '^[a-zA-Z0-9\.\-\_]*$',
'required': True
},
'version': {
'type': 'string',
'regex': '^[a-zA-Z0-9\.\-\_\/:~\+=\,]*$',
'required': False
},
'provisioner': {
'type': 'string',
'regex': '^[a-zA-Z0-9]*$',
'required': False
},
'parameters': {
'type': 'dict',
'allow_unknown': True,
}
}
}
},
'env_vars': {
'type': 'list',
'schema': {
'type': 'dict',
'schema': {
'var_key': {
'type': 'string',
'regex': '^(?!GHOST|ghost)[a-zA-Z_]+[a-zA-Z0-9_]*$',
'required': False
},
'var_value': {
'type': 'string',
'required': False
}
}
}
},
'ami': {'type': 'string',
'regex': '^ami-[a-z0-9]*$',
'readonly': True},
'vpc_id': {
'type': 'string',
'regex': '^vpc-[a-z0-9]*$',
'required': True
},
'modules': {
'type': 'list',
'schema': {
'type': 'dict',
'schema': {
'initialized': {'type': 'boolean',
'readonly': True},
'name': {'type': 'string',
'regex': '^[a-zA-Z0-9\.\-\_]*$',
'required': True},
'source': {
'type': 'dict',
'schema': {
'protocol': {
'type': 'string',
'required': False,
'default': APPS_DEFAULT['modules.source.protocol'],
'allowed': ['git', 's3'],
},
'url': {
'type': 'string',
'required': False,
},
'mode': {
'type': 'string',
'required': False,
'default': APPS_DEFAULT['modules.source.mode'],
'allowed': ['symlink'],
},
},
},
'git_repo': {'type': 'string',
'required': False},
'scope': {
'type': 'string',
'required': True,
'allowed': ['system', 'code']
},
'uid': {'type': 'integer', 'min': 0},
'gid': {'type': 'integer', 'min': 0},
'build_pack': {'type': 'string'},
'pre_deploy': {'type': 'string'},
'post_deploy': {'type': 'string'},
'after_all_deploy': {'type': 'string'},
'path': {'type': 'string',
'regex': '^(/[a-zA-Z0-9\.\-\_]+)+$',
'required': True},
'last_deployment': {
'readonly': True,
'type': 'objectid',
'data_relation': {
'resource': 'deployments',
'field': '_id',
'embeddable': True
}
}
}
}
},
'log_notifications': {
'type': 'list',
'coerce': lambda l: [{'email': v, 'job_states': ['*']} if isinstance(v, basestring) else v for v in l],
'schema': {
'type': 'dict',
'schema': {
'email': {
'type': 'string',
'regex': '^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$'
},
'job_states': {
'type': 'list',
'schema': {
'type': 'string',
'allowed': LOG_NOTIFICATION_JOB_STATUSES + ['*'],
'default': '*'
}
}
}
}
},
'autoscale': {
'type': 'dict',
'schema': {
'min': {'type': 'integer', 'min': 0},
'max': {'type': 'integer', 'min': 0},
'enable_metrics': {'type': 'boolean', 'required': False},
'name': {'type': 'string'}
}
},
'safe-deployment': {
'type': 'dict',
'schema': {
'load_balancer_type' : {'type': 'string'},
'wait_after_deploy' : {'type': 'integer', 'min': 0},
'wait_before_deploy' : {'type': 'integer', 'min': 0},
'app_tag_value': {'type': 'string', 'required': False},
'ha_backend': {'type': 'string', 'required': False},
'api_port': {'type': 'integer', 'required': False}
}
},
'build_infos': {
'type': 'dict',
'schema': {
'ssh_username': {'type': 'string',
'regex': '^[a-z\_][a-z0-9\_\-]{0,30}$',
'required': True},
'source_ami': {'type': 'string',
'regex': '^ami-[a-z0-9]*$',
'required': True},
'ami_name': {'type': 'string',
'readonly': True},
'source_container_image': {'type': 'string',
'regex': '^(().)*$',
'required': False
},
'container_image': {'type': 'string',
'readonly': True
},
'subnet_id': {'type': 'string',
'regex': '^subnet-[a-z0-9]*$',
'required': True}
}
},
'resources': {'type': 'list',
'schema': resources.available},
'environment_infos': {'type': 'dict',
'schema': {
'security_groups': {'type': 'list',
'schema':
{'type': 'string',
'regex': '^sg-[a-z0-9]*$'}},
'subnet_ids': {'type': 'list',
'schema': {'type': 'string',
'regex':
'^subnet-[a-z0-9]*$'}},
'instance_profile':
{'type': 'string',
'regex': '^[a-zA-Z0-9\+\=\,\.\@\-\_]{1,128}$'},
'key_name': {'type': 'string',
'regex': '^[\x00-\x7F]{1,255}$'},
'public_ip_address': {'type': 'boolean', 'required': False, 'default':True},
'root_block_device':
{'type': 'dict',
'schema': {
'size': {'type': 'integer', 'min': 20},
'name': {'type': 'string',
'regex': '^$|^(/[a-z0-9]+/)?[a-z0-9]+$'}
}},
'instance_tags':
{'type': 'list',
'required': False,
'schema': tags.block
},
'optional_volumes': {'type': 'list',
'required': False,
'schema': volumes.block}
}},
'user': {'type': 'string'},
'pending_changes': {
'type': 'list',
'required': False,
'schema': {
'type': 'dict',
'schema': {
'field': {
'type': 'string',
'regex': '[a-zA-Z_]+[a-zA-Z0-9_]*$',
'required': False
},
'user': {
'type': 'string',
'required': False
},
'updated': {
'type': 'datetime',
'required': False
},
},
},
},
}
apps = {
'datasource': {
'source': 'apps'
},
'item_title': 'app',
'schema': apps_schema,
'mongo_indexes': {
'name-role-env-blue_green.color': [('name', 1), ('role', 1), ('env', 1), ('blue_green.color', 1)],
}
}
| UTF-8 | Python | false | false | 11,363 | py | 115 | apps.py | 83 | 0.309249 | 0.302297 | 0.000176 | 326 | 33.855828 | 111 |
IslamZakaria/Hotel-Reviews-Analysis | 5,093,831,238,954 | 04c6e1346808d6f670e1410dc4f2c4419419987e | db200fbcf394d63a8d0d736fa45da2f45048bba4 | /SVM.py | a5755f31a961c28775b4bd9cd8a11ff607046696 | []
| no_license | https://github.com/IslamZakaria/Hotel-Reviews-Analysis | b93a7265e4f83bc2565572f7f73bb75dd6a82d4d | 65055e0ba27f8032fbbc59144b361eedd2a1dbdf | refs/heads/master | 2022-07-15T23:04:15.623824 | 2020-05-14T07:09:14 | 2020-05-14T07:09:14 | 263,839,475 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
from Label_Endcoder import *
import seaborn as sns
from sklearn import metrics
from sklearn.metrics import r2_score
from timeit import default_timer as timer
from sklearn.svm import SVC
from sklearn.multiclass import OneVsRestClassifier
Missing_Val =["NA",' ','']
data = pd.read_csv('Hotel_Review_Milestone_2_Test_Samples.csv',na_values=Missing_Val)
mean_lat = data['lat'].mean()
mean_lng = data['lng'].mean()
mean_Additinal_num_Score = data['Additional_Number_of_Scoring'].mean()
mean_Av_Score = data['Average_Score'].mean()
mean_Neg_Count = data['Review_Total_Negative_Word_Counts'].mean()
mean_Numb_Rev = data['Total_Number_of_Reviews'].mean()
mean_Numb_Reviwer_Revs = data['Total_Number_of_Reviews_Reviewer_Has_Given'].mean()
mean_Pos_Count = data['Review_Total_Positive_Word_Counts'].mean()
data['lat'].fillna(mean_lat, inplace=True)
data['lng'].fillna(mean_lng, inplace=True)
data['Additional_Number_of_Scoring'].fillna(mean_Additinal_num_Score, inplace=True)
data['Average_Score'].fillna(mean_Av_Score, inplace=True)
data['Review_Total_Negative_Word_Counts'].fillna(mean_Neg_Count, inplace=True)
data['Total_Number_of_Reviews'].fillna(mean_Numb_Rev, inplace=True)
data['Total_Number_of_Reviews_Reviewer_Has_Given'].fillna(mean_Numb_Reviwer_Revs, inplace=True)
data['Review_Total_Positive_Word_Counts'].fillna(mean_Pos_Count, inplace=True)
data['Negative_Review'].fillna('No Negative',inplace=True)
data['Positive_Review'].fillna('No Positive',inplace=True)
data['Hotel_Address'].fillna('No Hotel',inplace=True)
data['Review_Date'].fillna('No Date',inplace=True)
data['Hotel_Name'].fillna('No Name',inplace=True)
data['Reviewer_Nationality'].fillna('No Nationality',inplace=True)
data['Tags'].fillna('No Tags',inplace=True)
data['days_since_review'].fillna('No Days',inplace=True)
#print(data["Negative_Review"][286])
cols=('Hotel_Address','Review_Date','Hotel_Name','Reviewer_Nationality','Reviewer_Score','Tags')
data=Feature_Encoder(data,cols);
corr = data.corr()
plt.subplots(figsize=(12, 8))
sns.heatmap(corr, annot=True)
plt.show()
needed=['Reviewer_Score','Hotel_Address','Review_Date','Hotel_Name','Reviewer_Nationality','Tags','Review_Total_Positive_Word_Counts','Review_Total_Negative_Word_Counts']
def drop_columns(data, needed):
not_needed_columns = [c for c in data if c not in needed]
data.drop(not_needed_columns, axis=1, inplace=True)
return data
data = drop_columns(data, needed)
X = np.array(data.iloc[:, :len(data.columns) - 1])
Y = np.array(data["Reviewer_Score"][:])
X_train, X_test, y_train, y_test = train_test_split(X, Y, test_size = 0.20,shuffle=True)
scaler = StandardScaler()
scaler.fit(X_train)
X_train = scaler.transform(X_train)
X_test = scaler.transform(X_test)
svm_model_linear_ovr = OneVsRestClassifier(SVC(kernel='linear', C=1))
start=timer()
svm_model_linear_ovr.fit(X_train, y_train)
end=timer()
train_time_ovr=end-start
#svm_predictions = svm_model_linear_ovr.predict(X_test)
start=timer()
ovr_prediction=svm_model_linear_ovr.predict(X_test)
end=timer()
test_time_ovr=end-start
# model accuracy for X_test
accuracy = svm_model_linear_ovr.score(X_test, y_test)
print('One VS Rest SVM accuracy: ' + str(accuracy))
print('Mean Square Error for OVR SVM', metrics.mean_squared_error(np.asarray(y_test), ovr_prediction))
print('r2 score OVR is: '+ str(r2_score(y_test, ovr_prediction)))
print('train time OVR is :'+ str(train_time_ovr))
print('test time OVR is :'+ str(test_time_ovr))
print('--------------------------------------------------------------------------------')
#
svm_model_linear_ovo = SVC(kernel='linear', C=1)
start=timer()
svm_model_linear_ovo.fit(X_train, y_train)
end=timer()
ovo_train_time=end-start
#svm_predictions = svm_model_linear_ovo.predict(X_test)
start=timer()
ovo_prediction = svm_model_linear_ovo.predict(X_test)
end=timer()
ovo_test_time=end-start
# model accuracy for X_test
accuracy = svm_model_linear_ovo.score(X_test, y_test)
print('One VS One SVM accuracy: ' + str(accuracy))
print('Mean Square Error for OVO SVM', metrics.mean_squared_error(np.asarray(y_test), ovo_prediction))
print('r2 score OVO is: '+ str(r2_score(y_test, ovo_prediction)))
print('train time OVO is :'+ str(ovo_train_time))
print('test time OVO is :'+ str(ovo_test_time))
| UTF-8 | Python | false | false | 4,416 | py | 7 | SVM.py | 6 | 0.726676 | 0.722373 | 0 | 108 | 39.888889 | 170 |
Abrasko/SppedVagon | 19,061,064,896,562 | 1f98915442ec1460a6dcae509ff4a6b709dd9355 | 94027d44590e9a589814209fd9989ef4bb094904 | /mychar/apps.py | 2acbdc504bfd2276db6aa0a0186a7e9b50591973 | []
| no_license | https://github.com/Abrasko/SppedVagon | 10b0503f73800aaca0e506a01bbbfd2fce6018a6 | eaa849776560b8803b6f3d22440c9e1eb8df5571 | refs/heads/master | 2021-04-29T22:38:28.772850 | 2018-07-28T16:02:10 | 2018-07-28T16:02:10 | 121,640,943 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from django.apps import AppConfig
class MycharConfig(AppConfig):
name = 'mychar'
| UTF-8 | Python | false | false | 87 | py | 32 | apps.py | 20 | 0.747126 | 0.747126 | 0 | 5 | 16.4 | 33 |
bartosz-socha/python_basics | 3,032,246,945,060 | 22ccb1e49eaee62321ae4c0702783190d18291ef | e32508c2ea2781420848017258fca2e256b4cc82 | /K_modules_pip.py | 9cebd3160f3deadf3aebe27b977b60e7ca6a7d95 | []
| no_license | https://github.com/bartosz-socha/python_basics | fa3448c1b9ebbe73f4241cd10a6bacedfe780ec4 | 8ea42bd6e237851550c8fb987e4d2c5e35f02607 | refs/heads/master | 2023-01-24T11:40:49.947052 | 2020-11-26T08:52:37 | 2020-11-26T08:52:37 | 314,291,711 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import H_functions as fun
import docx as doc
doc.Document()
fun.echo("I use import to print that text")
fun.echo(fun.my_power(3, 3))
| UTF-8 | Python | false | false | 137 | py | 16 | K_modules_pip.py | 14 | 0.715328 | 0.70073 | 0 | 8 | 16 | 43 |
jiangzhenfei/blog-back | 592,705,533,861 | e0ede080ee179165bbc5114e48ebb9cd1c027263 | a92d4e72cc084141b8f571ddf42c7ea7a8bcaf34 | /app/blog.py | 6d82e0ff215ae6723ab5744189f098e1a83659a8 | []
| no_license | https://github.com/jiangzhenfei/blog-back | d5c04e8ef146f111f114ef3661251ae9ef696bf9 | 268ffbee50f8703137574a9bad2085603105e9d5 | refs/heads/master | 2020-03-15T04:04:28.307284 | 2018-05-03T07:29:48 | 2018-05-03T07:29:48 | 131,956,961 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from flask import Flask,url_for, request, redirect, render_template,Blueprint,jsonify
import json
from app import db
from .model import Blog,User
#引入时间
from datetime import datetime
blog = Blueprint('blog',__name__)
#返回的数据格式,根据不同情况生成不同额返回值
def util_res(code=200,message='',success=True,response=None):
return {
'code': code,
'message': message,
'success': success,
'response': response
}
#博客列表的接口
@blog.route('/list',methods=['GET'])
def list():
res_blog = []
blogs = Blog.query.all()
#将数据库的数据处理发送给前端
for item in blogs:
if item == None:break
item_log = {
'title': item.title,
'content': item.content,
'summary': item.summary,
'id': item.id
}
res_blog.append(item_log)
resp = util_res(response = res_blog)
return jsonify(resp)
#增加博客的接口
@blog.route('/add',methods = ['POST'])
def add():
data = request.data.decode('utf-8')
#json.loads将字符转成相应的对象
j_data = json.loads(data)
#判断对象是否存在某些属性
print('ddddd',j_data.get('title'))
if not j_data.get('title'):
resp = util_res(code = 400,message='title 是必须的',success=False)
return jsonify(resp)
if not j_data.get('content'):
resp = util_res(code = 400,message='content 是必须的',success=False)
return jsonify(resp)
if not j_data.get('summary'):
resp = util_res(code = 400,message='summary 是必须的',success=False)
return jsonify(resp)
blog_item = Blog(title = j_data['title'], content = j_data['content'],summary = j_data['summary'])
db.session.add(blog_item)
db.session.commit()
resp = util_res()
return jsonify(resp)
#这是博客详情接口
@blog.route('/detail/<id>',methods=['GET'])
def detail(id):
#筛选出来符合条件的第一条数据
thisBlog = Blog.query.filter_by(id = id).first()
if thisBlog is None:
resp = util_res(code=400,message='改博客数据不存在',success=False)
return jsonify(resp)
#整理成前端需要的数据格式
res_detail = {
'title':thisBlog.title,
'content':thisBlog.content,
'summary':thisBlog.summary,
'id':thisBlog.id
}
resp = util_res(response = res_detail)
return jsonify(resp)
#用户登录界面的接口
@blog.route('/login',methods = ['POST'])
def login():
data = request.data.decode('ascii')
#json.loads将字符转成相应的对象
j_data = json.loads(data)
#判断对象是否存在某些属性
if not j_data.get('name'):
resp = util_res(code = 400,message = 'name 是必须的',success = False)
return jsonify(resp)
if not j_data.get('password'):
resp = util_res(code = 400,message='password 是必须的',success = False)
return jsonify(resp)
#在数据库获取相应的用户信息
#用来做密码匹配的判断
user = User.query.filter_by(name = j_data['name']).first()
#用户不存在的判断
#用户存在但是密码不对的判断
#都是正确的判断
if user is None:
resp = util_res(code = 400,message = '该用户不存在',success = False)
return jsonify(resp)
if j_data['password'] == user.password:
#每次登入更新数据库的时间戳
user.time = datetime.now().timestamp()
db.session.add(user)
db.session.commit()
resp = util_res(message = '登入成功',response = user.name)
return jsonify(resp)
else:
resp = util_res(code = 400,message = '密码错误',success = False)
return jsonify(resp)
| UTF-8 | Python | false | false | 3,815 | py | 8 | blog.py | 6 | 0.606143 | 0.597712 | 0 | 111 | 28.828829 | 102 |
lianglunzhong/latte-erp | 515,396,120,902 | 084fbfa555499872f5e875343eb6bc5f3998b91a | a0a0932b6ab6ec47c2757d8929216790f5bc6535 | /shipping/models.py | f9e7c6702c97f381a0137e412c3010076a662bba | []
| no_license | https://github.com/lianglunzhong/latte-erp | b4e6e3b13c4bce17911ff166fecc36172e0bea5b | b58936c8d9917f3efdcb3585c54bfd3aba4723c2 | refs/heads/master | 2022-11-27T03:08:23.780124 | 2017-04-28T02:51:43 | 2017-04-28T02:51:43 | 89,660,834 | 0 | 0 | null | false | 2022-11-22T01:04:12 | 2017-04-28T02:48:50 | 2017-04-28T03:04:49 | 2022-11-22T01:04:08 | 6,316 | 0 | 0 | 4 | Python | false | false | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
import datetime
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.core.urlresolvers import reverse
from django.db.models import Sum
import lib
import order
import product
import depot
import supply
class Package(models.Model):
STATUS = (
(0, u'未处理'),
(1, u'开始处理'),#手动审核
(2, u'配货中'),#开始pick,全部用库存
(3, u'打包中'),
(5, u'已发货'),
(6, u'妥投'),
(7, u'到达待取'),
(4, u'取消'),
)
PICK_STATUS = (
(0, u'未分拣'),
(1, u'分拣中'),
(2, u'分拣完成'),
(3, u'包装完成'),
(4, u'分拣异常'),
(5, u'包装异常'),
)
order = models.ForeignKey('order.Order', verbose_name=u'订单')#与订单表中import 冲突
status = models.IntegerField(choices=STATUS, default=0, verbose_name=u'包裹发货状态')
shipping = models.ForeignKey(lib.models.Shipping, null=True, blank=True, verbose_name=u'物流方式')
note = models.TextField(default='', blank=True, null=True, verbose_name=u'备注')
tracking_no = models.CharField(max_length=250, default="", blank=True, db_index=True, verbose_name=u'运单号')
ship_time = models.DateTimeField(blank=True, null=True, verbose_name=u'执行发货时间')
shipper = models.ForeignKey(User, null=True, blank=True, related_name="package_shipper", verbose_name=u'执行发货人') #执行发货人
print_time = models.DateTimeField(blank=True, null=True, verbose_name=u'包裹打印时间')
printer = models.ForeignKey(User, null=True, blank=True, related_name="package_printer", verbose_name=u'打印包裹/面单人') #打印面单人
email = models.EmailField(default='', verbose_name=u'收件人邮箱')
shipping_firstname = models.CharField(u'收件人姓', max_length=100, default='', blank=True, null=True)
shipping_lastname = models.CharField(u'收件人名', max_length=100, default='', blank=True, null=True)
shipping_address = models.CharField(u'收件人地址', max_length=500, default='', blank=True, null=True)
shipping_address1 = models.CharField(u'收件人地址1', max_length=500, default='', blank=True, null=True)
shipping_city = models.CharField(u'收件人城市', max_length=250, default='', blank=True, null=True)
shipping_state = models.CharField(u'收件人州', max_length=250, default='', blank=True, null=True)
shipping_country = models.ForeignKey(lib.models.Country, null=True, blank=True)
shipping_zipcode = models.CharField(u'收件人邮编', max_length=100, default='', blank=True, null=True)
shipping_phone = models.CharField(u'收件人电话', max_length=100, default='', blank=True, null=True)
qty = models.IntegerField(default=0, blank=True, verbose_name=u"货品总数")
sku_qty = models.IntegerField(default=0, blank=True, verbose_name=u"SKU总数")
weight = models.FloatField(u'包裹重量', default=0.0)
cost = models.FloatField(default=0, blank=True, null=True, verbose_name=u"成本")
cost1 = models.FloatField(default=0, blank=True, null=True, verbose_name=u"成本1")
sf_numbers = models.CharField(u"顺丰单号", max_length=30, default="", blank=True)
skybill_code = models.CharField(u"顺丰渠道转单号", max_length=30, default="", blank=True)
pick = models.ForeignKey(depot.models.Pick, null=True, blank=True, verbose_name=u"拣货单")
pick_type = models.IntegerField(u'拣货单类型', choices=depot.models.Pick.PICK_TYPES, default=0)
pick_status = models.IntegerField(u'分拣状态', choices=PICK_STATUS, default=0, blank=True)
code = models.IntegerField(u'是否同一物理仓,一起拣货', choices=depot.models.Depot.CODE, default=0)
position_score = models.IntegerField(default=0, blank=True, verbose_name=u'库位得分')
pick_error_info = models.TextField(default='', blank=True, verbose_name=u'拣货单异常信息')
option_log = models.TextField(default='', blank=True, verbose_name=u'操作记录')
created = models.DateTimeField(auto_now_add=True, verbose_name=u"新建时间")
updated = models.DateTimeField(auto_now=True, verbose_name=u"修改时间")
_status = None
class Meta:
verbose_name = u'包裹单'
verbose_name_plural = u'包裹单'
def get_admin_url(self):
content_type = ContentType.objects.get_for_model(self.__class__)
return reverse("admin:%s_%s_change" % (content_type.app_label, content_type.model), args=(self.id,))
def __unicode__(self):
return "PACKAGE#%s" % self.id
def __init__(self, *args, **kwargs):
super(Package, self).__init__(*args, **kwargs)
self._status = self.status
self._shipping_id = self.shipping_id
self._tracking_no = self.tracking_no
def can_pick(self):
can_pick = False
packageitems = PackageItem.objects.filter(package=self).filter(deleted=False)
for packageitem in packageitems:
can_pick = True
qty_locked = ItemLocked.objects.filter(package_item=packageitem, deleted=False).aggregate(Sum('qty'))['qty__sum']
if not qty_locked:
qty_locked = 0
if packageitem.qty != qty_locked:
can_pick = False
break
return can_pick
def get_name(self):
return self.shipping_firstname + ' ' + self.shipping_lastname
def get_address(self):
return self.shipping_address + ' ' + self.shipping_address1
def get_weight(self):
weight = 0
for item_weight, qty in self.packageitem_set.filter(deleted=False).values_list('item__product__weight', 'qty'):
weight += item_weight * qty
return weight
def get_qty(self):
qty = self.packageitem_set.filter(deleted=False).aggregate(Sum('qty')).get('qty__sum') or 0
return qty
def get_sku_qty(self):
qty = self.packageitem_set.filter(deleted=False).values('item_id').count() or 0
return qty
def get_pick_type(self):
"""根据package的ItemLocked的数量来判断pick_type"""
item_count = ItemLocked.objects.filter(package_item__package_id=self.id).filter(deleted=False).count()
item_qtys = self.packageitem_set.aggregate(Sum('qty')).get('qty__sum') or 0
if item_count == 1 and item_qtys == 1:
pick_type = 1
elif item_count == 1 and item_qtys > 1:
pick_type = 2
elif item_count > 1:
pick_type = 3
else:
pick_type = 0
return pick_type
def get_position_score(self):
"""获取package的库位得分"""
positions = ItemLocked.objects.filter(package_item__package_id=self.id)\
.values_list('depot_item__position', flat=True)
# todo 南京和广州库位排序, 和库位规则可能不同, 需要各自定义
positions = [i.split(',')[0] for i in positions if i]
# 每个库位只计算一次, 所以使用set推导式
if self.code == 1: # 南京
SCORE_STR = "ABCDEFGHIJKLMNOPGRSTUVWXYZ"
diff_str = {i[1] for i in positions}
elif self.code == 2: # 广州
SCORE_STR = "EFBDCAGHIJKLMNOPGRSTUVWXYZ"
diff_str = {i[2] for i in positions}
# 计算每个不同库位得分之和
return sum([2**SCORE_STR.index(i) for i in diff_str])
def get_custom_amount(self):
'''计算package的通关打印金额, 这个金额根据各个渠道需求进行设置'''
# todo add custom_amount字段
if hasattr(self, 'custom_amount') and self.custom_amount:
return self.custom_amount
custom_amount = 0
#shop为choies
shop_type = self.order.channel.type
if shop_type == 0:
items = self.packageitem_set.values_list('item__product__category__name', 'qty')
for category, qty in items:
# todo shoes的category确定
if category.lower() == 'shoes':
custom_amount += 15 * qty
# todo 首饰的category的确定
elif category.lower() in ['anklets', 'body chains', 'bracelets & bangles', 'earrings', 'necklaces', 'rings']:
custom_amount += 0.5 * qty
else:
custom_amount += 5 * qty
# todo 速卖通和wholesale的shop确认
elif shop_type in (2, 8):
custom_amount = self.get_qty() * 2
if custom_amount >= 20 or custom_amount == 0:
return 20
else:
return custom_amount
def set_to_logistics(self):
'''给物流api准备好package和packageitem的数据
物流api一般只传递一个packageitem, 数量传递这个包裹中准确的数量'''
self.name = self.get_name()
self.qty = self.get_qty()
self.weight = self.weight or self.get_weight()
self.address = self.get_address()
self.custom_amount = self.get_custom_amount()
if self.qty:
self.price = round(float(self.custom_amount) / self.qty, 2)
else:
self.price = 0
packageitem = self.packageitem_set.first()
if packageitem:
packageitem.name, packageitem.cn_name = packageitem.get_item_name()
return packageitem
def have_nails(self):
'''判断是否含有指甲油(发货的时候, 如果含有指甲油, 则需要手工发货)'''
package_item_categorys = self.packageitem_set.values_list('item__product__category__name', flat=True)
if 'nails' in [i.lower for i in package_item_categorys]:
return True
else:
return False
def get_carrier(self):
'''根据指定规则, 获取当前package的carrier label, 如果没有合适的, 则返回空字符串'''
carrier = ''
amount_shipping = round(self.order.amount_shipping / self.order.rate, 2)
shipping_country = self.shipping_country.code
weight = self.get_weight()
tongguan = True if not self.order.cannot_tongguan() else False
if self.note or self.order.note:
pass
# ky即smt的物流方式
elif self.order.channel.type == 2:
# 港澳台, 国内
if shipping_country in ['HK','MO','TW',"CN"]:
carrier = 'SF'
else:
if amount_shipping >= 10 or self.order.shipping_type == 1:
# 阿联酋、印度、阿曼、孟加拉国
if shipping_country in ['AE', 'IN', 'OM', 'BD']:
carrier = 'ARAMEX'
else:
carrier = 'EMS'
elif amount_shipping < 10 or self.order.shipping == 0:
# 美国、美属维尔京群岛
if shipping_country in ['US']:
if tongguan:
carrier = 'EUB'
else:
carrier = 'DEUB'
elif shipping_country == 'CA':
# 因为ky即smt, 所以亚马逊的规则无效了, 这里删除
# if package.order.shop_id in [13,2] and tongguan:
# carrier = 'EUB'
# elif package.order.shop_id in [13,2] and (not tongguan):
# carrier = 'DEUB'
if tongguan:
carrier = 'NXB'
else:
carrier = 'DNXB'
elif shipping_country == 'RU':
# 根据package的order的amount来判断
order_amount = round(self.order.amount / self.order.rate, 2)
if order_amount < 10:
carrier = 'SFRU'
elif 10 <= order_amount < 100:
carrier = 'NXB'
elif tongguan:
carrier = 'NXB'
else:
carrier = 'DNXB'
# 下面是除了smt之外的规则即ws
elif self.have_nails:
self.note += u'含有指甲油, 需要手动发货'
self.save()
else:
hkpt_countries = [
'IS','MC','VA','AL','MK','RS','KH','MM','CL','CR','PA','PY','PE','UY','KO','IC',
'AF','AG','GS','AO','BB','BD','BF','BI','BJ','BO','BS','BT','BW','BZ','CI','CK',
'CM','CO','CU','CV','CX','DJ','DZ','EC','EH','ER','ET','FJ','FO','GA','GD','GE',
'GF','GH','GI','GM','GN','GQ','GT','GU','GW','GY','HN','HT','IQ','IR','JM','KE',
'KM','KY','LI','LR','LS','MG','MH','ML','MQ','MR','MU','MV','MW','NF','KI','AD',
'NI','NP','NR','NU','PG','PR','RW','SB','SD','SL','SM','SN','SO','SR','SV','TD',
'TG','TL','TN','TO','TT','TV','UG','VE','VU','YE','ZM','ZW','IF','JI','EI','ZR',
'AI','AS','AW','BM','BN','CC','CD','CF','CG','DO','FK','FM','GL','GP','HM','IO',
'KN','LA','LC','LY','MP','MS','NC','PM','PN','PW','SJ','TC','TF','TK','TZ','UM',
'VC','VG','VI','WF','WS','ME',]
# 港澳台
if shipping_country in ['HK', 'MO', 'TW']:
carrier = 'SF'
# 中国大陆
elif shipping_country in ['CN']:
carrier = 'SFZG'
# 发快递(快递比较贵)
elif amount_shipping >= 10 or self.order.shipping == 1:
# 澳大利亚,巴布亚新几内亚,菲律宾,韩国,柬埔寨,马来西亚,蒙古,日本,泰国,新加坡,新西兰,印度尼西亚,越南,朝鲜
if shipping_country in ['AU','PG','PH','KR','KH','MY','MN','JP','TH','SG','NZ','ID','VN','KP']:
carrier = 'EMS'
# 俄罗斯 巴林 科威特 卡塔尔 约旦 黎巴嫩 沙特阿拉伯 埃及 伊朗 塞浦路斯 土耳其 以色列 斯里兰卡 巴基斯坦
elif shipping_country in ['CY','RU','TR','IL','JO','QA','SA','BH','EG','IR','KW','LB','LK','PK']:
pass
# 阿联酋、印度、阿曼、孟加拉国
elif shipping_country in ['AE','IN','OM','BD']:
carrier = 'ARAMEX'
# 爱尔兰,爱沙尼亚,奥地利,波兰,丹麦,芬兰,格恩西岛,加拿大,捷克,拉脱维亚,立陶宛,马耳他,墨西哥,挪威,葡萄牙,瑞典,泽西,瑞士,塞浦路斯,斯洛伐克,斯洛文尼亚,希腊,匈牙利
elif shipping_country in ['IE','EE','AT','PL','DK','FI','GG','CZ','LV','LT','MT','NO','PT','SE','JE','CH','CY','SK','SI','GR','HU']:
carrier = 'DHL'
# 英国,德国,法国,比利时,意大利,西班牙,卢森堡,荷兰
elif shipping_country in ['GB','DE','FR','BE','IT','ES','LU','NL']:
carrier = 'DHL'
# 美国
elif shipping_country in ['US','CA','MX']:
carrier = 'DHL'
else:
pass
elif amount_shipping < 10 or self.order.shipping == 0:
if shipping_country == 'US':
if self.order.shop_id != 7:
if not tongguan:
carrier = 'MU'
elif weight < 1:
carrier = 'SUB'
else:
carrier = 'MU'
else:
if weight < 1.85:
carrier = 'SUB'
else:
carrier = "DHL"
elif shipping_country == 'CA':
if weight < 1.85:
carrier = 'DGM'
else:
carrier = 'DHL'
#乌克兰,以色列
elif shipping_country in ['UA','IL']:
carrier = 'KYD'
# 巴西
elif shipping_country in ['BR']:
carrier = 'SEB'
# 英国,卢森堡,荷兰,拉脱维亚
elif shipping_country in ['GB','LU','NL','LV']:
if weight > 0.6 and weight < 1.85:
carrier = 'MU'
elif tongguan and weight <= 0.6:
carrier = 'KYD'
elif not tongguan and weight <= 0.6:
carrier = "MU"
else:
carrier = 'DHL'
# 芬兰,冰岛,葡萄牙,斯洛伐克
elif shipping_country in ['FI','IS','PT','SK']:
carrier = 'NLR'
# 捷克,立陶宛,斯洛文尼亚
elif shipping_country in ['CZ','LT','SI']:
if weight < 1:
carrier = 'NLR'
elif weight >= 1 and weight <= 1.85:
carrier = 'MU'
else:
carrier = 'DHL'
# 罗马尼亚
elif shipping_country in ['RO',]:
if weight < 1.5:
carrier = 'NLR'
elif weight >= 1.5 and weight <= 1.85:
carrier = 'MU'
else:
'DHL'
# 澳大利亚
elif shipping_country == 'AU':
if weight < 1:
carrier = 'KYD'
else:
carrier = 'MU'
# 匈牙利,瑞典,爱沙尼亚,爱尔兰
elif shipping_country in ['HU','SE','EE','IE']:
if weight > 1 and weight < 1.85:
carrier = 'MU'
elif weight <= 1:
carrier = 'KYD'
else:
carrier = 'DHL'
#印度尼西亚,文莱,新西兰,菲律宾
elif shipping_country in ['ID','BN','NZ','PH']:
if weight < 1.85:
carrier = 'KYD'
else:
carrier = 'EMS'
elif shipping_country == 'RU':
if weight < 1.85:
carrier = 'XRA'
# 波斯尼亚和黑塞哥维那
elif shipping_country in ['BA']:
carrier = 'HKPT'
# 奥地利,波兰
elif shipping_country in ['AT','PL']:
if weight < 1.85:
carrier = 'NLR'
else:
weight = 'DHL'
# 瑞士,丹麦
elif shipping_country in ['CH','DK']:
if weight < 1.85 and tongguan:
carrier = 'KYD'
elif weight < 1.85 and not tongguan:
carrier = 'KYD'
else:
carrier = 'DHL'
# 德国
elif shipping_country in ['DE']:
if weight < 1.85:
carrier = 'DGM'
else:
carrier = 'DHL'
# 比利时,西班牙
elif shipping_country in ['BE', 'ES']:
if weight < 1.85:
carrier = 'NLR'
else:
carrier = 'DHL'
# 韩国
elif shipping_country in ['KR']:
if weight < 1.85:
carrier = 'KYD'
else:
carrier = 'EMS'
# 新加坡
elif shipping_country in ['SG']:
if weight < 1.85 and tongguan:
carrier = 'KYD'
elif weight < 1.85 and not tongguan:
carrier = 'KYD'
else:
carrier = 'EMS'
# 沙特阿拉伯,科威特,南非,越南,摩洛哥,黎巴嫩,巴林,阿塞拜疆,埃及,约旦,卡塔尔,尼日利亚,摩尔多瓦
elif shipping_country in ['SA','KW','ZA','VN','MA','LB','BH','AZ','EG','JO','QA','NG','MD']:
if weight < 1.85:
carrier = 'HKPT'
elif shipping_country in hkpt_countries:
carrier = 'HKPT'
# 墨西哥
elif shipping_country in ['MX']:
if weight < 1.85:
carrier = 'HKPT'
else:
carrier = 'DHL'
elif shipping_country in ['IT']:
if weight < 1.85 and tongguan:
carrier = 'KYD'
elif weight < 1.85 and not tongguan:
carrier = 'KYD'
else:
carrier = 'DHL'
elif shipping_country in ['FR']:
if weight < 1.85:
carrier = "DGM"
else:
carrier = 'DHL'
#阿联酋,
elif shipping_country in ['AE',]:
if weight < 1.85:
carrier = 'SGB'
else:
carrier = 'ARAMEX'
else:
if tongguan:
carrier = 'KYD'
else:
carrier = 'KYD'
return carrier
def _delete(self):
for packageitem in self.packageitem_set.all():
packageitem.deleted = True
packageitem.save()
def save(self, *args, **kw):
# package 变成取消状态
PACKAGE_CANCEL = 4
if self._status != PACKAGE_CANCEL and self.status == PACKAGE_CANCEL:
self._delete()
super(Package, self).save(*args, **kw)
def delete(self, *args, **kwargs):
self._delete()
super(Package, self).delete(*args, **kwargs)
class PackageItem(models.Model):
package = models.ForeignKey(Package, verbose_name='包裹单号')
item = models.ForeignKey(product.models.Item, verbose_name='包裹货品')
qty = models.IntegerField(default=0, verbose_name='数量')
note = models.TextField(default='', blank=True, null=True, verbose_name='备注')
deleted = models.BooleanField(default=False, verbose_name='是否已删除')
created = models.DateTimeField(auto_now_add=True, verbose_name='新增时间')
updated = models.DateTimeField(auto_now=True, verbose_name='修改时间')
_qty = 0
class Meta:
verbose_name = u'包裹单货品'
verbose_name_plural = u'包裹单货品'
def get_admin_url(self):
content_type = ContentType.objects.get_for_model(self.__class__)
return reverse("admin:%s_%s_change" % (content_type.app_label, content_type.model), args=(self.id,))
def get_positions(self):
"""获得这个package_item的库位"""
positions = list(ItemLocked.objects.filter(package_item=self.id).values_list('depot_item__position', flat=True))
position_list = ','.join(positions).split(',')
no_repeat_position = []
for i in position_list:
no_repeat_position.append(i)
return ','.join(no_repeat_position)
def get_total(self):
"""这个包裹的成本使用出库记录中的成本"""
total = 0
try:
info = depot.models.DepotOutLog.objects.filter(item_id=self.item_id)\
.filter(content_object=self.package)\
.filter(deleted=False)\
.values_list('qty', 'cost')
for qty, cost in info:
total += qty * cost
except:
pass
return total
def __unicode__(self):
return "PackageItem:%s" % self.id
_item_id = 0
_qty = 0
_deleted = False
def __init__(self, *args, **kwargs):
super(PackageItem, self).__init__(*args, **kwargs)
self._item_id = self.item_id
self._qty = self.qty
self._deleted = self.deleted
def save(self, *args, **kw):
# 当packageitem删除的时候, 使用_delete删除对应的itemwanted和itemlocked
if self.deleted and not self._deleted and self.id:
self._delete()
# 需要先保存packageitem的数量, 然后后面调用package_get_items中用的qty才对
super(PackageItem, self).save(*args, **kw)
# 当packageitem的数量改变的时候, 调用package_get_items
if self.qty != self._qty:
from package_action import package_get_items
package_get_items(self.package)
if self._item_id != self.item_id or self._qty != self.qty:
self.package.qty = self.package.get_qty()
self.package.sku_qty = self.package.get_sku_qty()
self.package.weight = self.package.get_weight()
self.package.save()
def _delete(self):
# delete item wanted
self.deleted = True
for itemwanted in ItemWanted.objects.filter(package_item=self).filter(deleted=False):
itemwanted.deleted = True
itemwanted.save()
# delete item locked
for itemlocked in ItemLocked.objects.filter(package_item=self).filter(deleted=False):
itemlocked.deleted = True
itemlocked.save()
def delete(self, *args, **kwargs):
self._delete()
super(PackageItem, self).delete(*args, **kwargs)
def get_admin_url(self):
content_type = ContentType.objects.get_for_model(self.__class__)
return reverse("admin:%s_%s_change" % (content_type.app_label, content_type.model), args=(self.id,))
def get_item_name(self):
en_name = self.item.product.category.name
cn_name = self.item.product.category.cn_name
return en_name, cn_name
class PackagePickError(models.Model):
pick = models.ForeignKey(depot.models.Pick) # 记录当时的pick单
package = models.ForeignKey(Package)
error_type = models.IntegerField(choices=Package.PICK_STATUS, default=4, verbose_name=u'异常类型')
is_processed = models.BooleanField(default=False, verbose_name=u'是否处理')
processor = models.ForeignKey(User, null=True, blank=True, verbose_name=u'处理人')
process_time = models.DateTimeField(null=True, verbose_name=u'处理时间')
error_info = models.TextField(default='', blank=True, verbose_name=u'异常相关信息')
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
deleted = models.BooleanField(default=False)
def __unicode__(self):
return "Package(%s):%s(%s)" % (self.package_id, self.get_error_type_display(), self.id)
class ItemLocked(models.Model):
package_item = models.ForeignKey(PackageItem) #一个package_item可以对应多个ItemLocked,一个item锁不同仓库的库存
depot_item = models.ForeignKey(depot.models.DepotItem)
qty = models.PositiveIntegerField(default=1)
note = models.TextField(default='', blank=True, null=True)
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
deleted = models.BooleanField(default=False)
_deleted = False
def get_admin_url(self):
content_type = ContentType.objects.get_for_model(self.__class__)
return reverse("admin:%s_%s_change" % (content_type.app_label, content_type.model), args=(self.id,))
def __unicode__(self):
return "ItemLocked:%s" % self.id
class Meta:
verbose_name = u'包裹占用库存'
verbose_name_plural = u'包裹占用库存'
def _delete(self):
self.depot_item.qty_locked = self.depot_item.qty_locked - self.qty if self.depot_item.qty_locked > self.qty else 0
self.depot_item.save()
def __init__(self, *args, **kwargs):
super(ItemLocked, self).__init__(*args, **kwargs)
self._deleted = self.deleted
def save(self, *args, **kw):
if not self.deleted:
if not self.id:
self.depot_item.qty_locked = self.depot_item.qty_locked + self.qty
self.depot_item.save()
else:
if not self._deleted and self.id:
self._delete()
super(ItemLocked, self).save(*args, **kw)
def delete(self, *args, **kwargs):
print 'start delete'
self._delete()
super(ItemLocked, self).delete(*args, **kwargs)
class ItemWanted(models.Model):
item = models.ForeignKey(product.models.Item, verbose_name=u"采购需求物品")
depot = models.ForeignKey(depot.models.Depot, verbose_name=u"采购入库仓库")
package_item = models.ForeignKey(PackageItem)
purchaseorderitem = models.ForeignKey('supply.PurchaseOrderItem', null=True, blank=True)
qty = models.PositiveIntegerField(default=1, verbose_name=u"采购需求数量")
create_time = models.DateTimeField(auto_now_add=True, verbose_name=u"订单时间")
created = models.DateTimeField(auto_now_add=True, verbose_name=u"新增时间")
updated = models.DateTimeField(auto_now=True, verbose_name=u"修改时间")
deleted = models.BooleanField(default=False, verbose_name=u"是否已删除")
class Meta:
verbose_name = u'采购需求'
verbose_name_plural = u'采购需求'
def get_admin_url(self):
content_type = ContentType.objects.get_for_model(self.__class__)
return reverse("admin:%s_%s_change" % (content_type.app_label, content_type.model), args=(self.id,))
def _delete(self):
self.deleted = True
def delete(self, *args, **kwargs):
print 'start delete'
self._delete()
super(ItemWanted, self).delete(*args, **kwargs)
class NxbCode(models.Model):
code = models.CharField(max_length=50, unique=True)
is_used = models.IntegerField(default=0, verbose_name=u'是否使用过')
used_time = models.DateTimeField(blank=True, null=True, verbose_name=u'使用时间')
package = models.ForeignKey(Package, null=True, blank=True)
note = models.CharField(max_length=255, default='', blank=True, verbose_name='备注')
created = models.DateTimeField(auto_now_add=True, verbose_name=u"新增时间")
updated = models.DateTimeField(auto_now=True, verbose_name=u"修改时间")
deleted = models.BooleanField(default=False, verbose_name=u"是否已删除")
def __unicode__(self):
return "%s: %s" % (self.id, self.code)
| UTF-8 | Python | false | false | 31,052 | py | 182 | models.py | 116 | 0.521572 | 0.513734 | 0 | 695 | 40.486331 | 148 |
tmhpilipala/shiyanlou-code | 3,856,880,658,760 | fbf79baf9348d45b56e8a1a55231be25dc56cda9 | 7ac243945fe78c795242c45df3eb919972feb05d | /jump7.py | 60edadf5888cbe9724b732df8a82c602da5bc3c1 | []
| no_license | https://github.com/tmhpilipala/shiyanlou-code | 8bd4bb7154593beba7468f16fb3ebe76d5a636b7 | e1045f7e0d7702b01e73bd76defd9b2646164100 | refs/heads/master | 2023-03-16T18:01:02.527462 | 2021-03-07T08:01:09 | 2021-03-07T08:01:09 | 345,288,153 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | a=0
for a in range(100):
a = a+1
if a%7==0:
continue
elif a%10==7:
continue
elif a//10==0:
continue
print(a)
| UTF-8 | Python | false | false | 120 | py | 1 | jump7.py | 1 | 0.583333 | 0.475 | 0 | 10 | 11 | 20 |
Asdprom/spotify_to_google | 10,222,022,166,339 | b70bdaca4c8fc647532a127dfd6017d3f9c4e3ab | a1bb5e9b9f3ca16b8bd717d3fd44aa5a8e2270f5 | /transfer_collection.py | c52e7defa4c158ea878907dd83d5cb156a94db07 | []
| no_license | https://github.com/Asdprom/spotify_to_google | 1e267be3a1cbbafaedf465097dfa6fffb4cbf137 | c54f1865f21451c349f61b9d0f85f8fdb094fc6b | refs/heads/master | 2020-12-06T20:32:18.270544 | 2020-01-10T05:34:30 | 2020-01-10T05:34:30 | 232,547,136 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from gmusicapi import Mobileclient
import spotipy
import spotipy.util as util
def add_search_string(tracks, search_strings):
for i, item in enumerate(tracks['items']):
track = item['track']
search_strings.append(track['artists'][0]['name'] + ' ' + track['name'])
client_id = 'your client id here'
client_secret='your secret client id here'
redirect_uri='http://localhost:4002'
username = input('Enter your account name/email: ')
token = util.prompt_for_user_token(username, 'user-library-read playlist-read-private', client_id, client_secret, redirect_uri)
playlist_name = input('Enter destination playlist name: ')
search_strings = []
if token:
sp = spotipy.Spotify(auth=token)
playlists = sp.current_user_playlists()
for playlist in playlists['items']:
print(f'PLAYLIST ', playlist['name'])
print(' total tracks', playlist['tracks']['total'])
playlist_to_transfer_name = input('Input name of playlist you want to transfer (if you want export saved songs - type \'saved\'): ')
playlist_to_transfer = next((x for x in playlists['items'] if x['name'] == playlist_to_transfer_name), None)
page_length = 20
page = 0
if playlist_to_transfer_name == 'saved':
while True:
results = sp.current_user_saved_tracks(page_length, page * page_length)
add_search_string(results, search_strings)
if len(results['items']) != page_length:
break
else:
page += 1
elif playlist_to_transfer == None:
print("Can't find playlist ", playlist_to_transfer_name)
exit(0)
else:
print("Playlist ", playlist_to_transfer_name)
playlist = sp.user_playlist(username, playlist_to_transfer['id'], fields="tracks,next")
tracks = playlist['tracks']
add_search_string(tracks, search_strings)
while tracks['next']:
tracks = sp.next(tracks)
add_search_string(tracks, search_strings)
else:
print("Can't get token for", username)
print(f'Detected {len(search_strings)} songs to transfer.')
mm = Mobileclient()
mm.perform_oauth()
mm.oauth_login(Mobileclient.FROM_MAC_ADDRESS)
playlist_id = mm.create_playlist(playlist_name)
print(f'Playlist \'{playlist_name}\' created.')
found_songs = 0
for row in search_strings:
print(f'\t Searching \'{row}\'.')
search_result = mm.search(row)
songs = search_result.get('song_hits')
song_id = None
if len(songs) > 0:
song_id = songs[0].get('track').get('storeId')
found_songs += 1
else:
print('Song not found.')
continue
mm.add_songs_to_playlist(playlist_id, song_id)
print(f'Imported {found_songs} songs.')
| UTF-8 | Python | false | false | 2,734 | py | 2 | transfer_collection.py | 1 | 0.642648 | 0.637527 | 0 | 80 | 33.175 | 136 |
davpge/McMaster-Scraper | 13,838,384,666,210 | ea5f80cc98bb919ebf1ce19ca7ae9114de169031 | d5000e33762ae4a73e4e0ddb52af28148feedf66 | /mc_master.py | 00244b1c0bbb8791a9c3a444096e0b68a5835d48 | []
| no_license | https://github.com/davpge/McMaster-Scraper | d781644864100cdfeba325644a7fe3263de87bf1 | 9e887d17cb8d2e53c9614ef98a08c0a0c72f1ecc | refs/heads/master | 2020-04-26T01:11:28.810136 | 2019-02-28T22:28:51 | 2019-02-28T22:28:51 | 173,196,637 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Tue Feb 12 11:18:28 2019
@author: david.page
"""
# import libraries
import urllib
import re
from urllib.request import urlopen
from bs4 import BeautifulSoup
import csv
from itertools import zip_longest
import itertools
import tkinter as tk
from tkinter import filedialog
import os
root = tk.Tk()
root.withdraw()
root.update()
file_list = []
file_count = 0
filename = []
print('Save HTML of the McMaster Cart, right click->save as on browser page')
print('Choose the html file you saved onto the desktop')
print('A CSV file will be created in the same folder the .exe was run from')
while True:
file_path = filedialog.askopenfilename() # Open window to pick file until cancel
if len(file_path) > 0:
break
file_list.append(file_path) # add file path to list
file_count += 1
# specify the url
quote_page = (file_path)
# query the website and return the html to the variable ‘page’
page = open(quote_page)
# parse the html using beautiful soup and store in variable `soup`
soup = BeautifulSoup(page.read())
#Getting the item description
item_description = soup.find_all('div', attrs={'class':"title-text"})
item_description_stripped = []
for item in item_description: item_description_stripped.append(item.get_text())
for items in item_description_stripped:
if items =='Length, ft.':
print(items)
for count, i in enumerate(item_description_stripped):
if i=='Length, ft.':
# print(i)
item_description_stripped.pop(count)
#item_description_stripped.remove('Length, ft.')
item_description_string =','.join(str(e) for e in item_description_stripped)
print (item_description_string)
#Getting the item quantity##################################################################################
quantity = soup.find_all('div', attrs={"class":"line-section line-quantity"})
quantity_string = []
for sections in quantity:
quantity_string.append(str(sections))
number = []
for strings in quantity_string:
# print(strings)
number.append(re.findall('\d+', strings ))
for count, i in enumerate(number):
# if len(i)>1:
# print(i[0])
number[count] = i[0]
for items in number:
print(*items, sep =",")
number_string = (",".join([item for sublist in number for item in sublist]))
#Getting item price##################################################################################
price = soup.find_all('div', attrs={'class':"line-section line-unit-price"})
price_stripped = []
for items in price:
price_stripped.append(str(items))
price_ea = []
for strings in price_stripped:
# print(strings)
price_ea.append(re.findall('[0-9\.]+', strings ))
price_string =(",".join([item for sublist in price_ea for item in sublist]))
for items in price_ea:
print(*items, sep = ",")
##################################################################################
vendor_part = soup.find_all(attrs={'class':"line-part-number-input"})
vendor_stripped = []
for items in vendor_part:
vendor_stripped.append(str(items))
vendor_text_store = ""
for items in vendor_stripped:
vendor_text_split = items.split("value=")[1]
vendor_text_split= vendor_text_split.split("/>")[0]
vendor_text_split= vendor_text_split.split('"')[1]
vendor_text_store += vendor_text_split + ","
print (vendor_text_store)
print (vendor_text_store)
print (item_description_string)
print (price_string)
print (number_string)
data_list = [vendor_text_store,item_description_string,price_string,number_string]
data_list =zip_longest(vendor_text_store.split(','),item_description_string.split(','),price_string.split(','),number_string.split(','))
with open('numbers.csv', 'a',newline="") as csvFile:
writer = csv.writer(csvFile,delimiter=',')
writer.writerow(['Part#','Description','Price','Quantity'])
for values in data_list:
print(values)
writer.writerow([values[0],values[1],values[2],values[3]])
page.close()
| UTF-8 | Python | false | false | 4,101 | py | 2 | mc_master.py | 1 | 0.629241 | 0.622163 | 0 | 154 | 25.577922 | 136 |
Evanyok/PyLab | 14,070,312,888,188 | d15d95a961d8ef6456194b6b4cdf98483958941c | 35e443131031886fd627f9967772ae76287e17b8 | /design-modes/decorator1.py | fc0e5c3072c729617901c18483b7d011114dcc66 | []
| no_license | https://github.com/Evanyok/PyLab | 4fdae03c2d4c5ad04fc7a4accf4f8e1cfce72edf | 11171f6e7d9a76ad98196e134002e1bf6381f24f | refs/heads/master | 2020-05-09T16:42:13.673481 | 2019-04-28T07:40:51 | 2019-04-28T07:40:51 | 181,280,657 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | def wrapper(tag):
def handler(fn):
def inner(*args, **kvargs):
print('wrapper: %s' % tag)
setattr(args[0], 'tag', tag)
fn(args[0], args[1])
return inner
return handler
class ts:
tag = 'tag'
@wrapper('p')
def foo1(self, arg):
if hasattr(self, 'tag'):
print('%s - %s' % (getattr(self, 'tag'), arg))
f = ts()
f.foo1('h')
class wrap(object):
def __init__(self, tag):
self.tag = tag
def __call__(self, fn):
def wrapped(*args, **kvargs):
setattr(args[0], 'tag', self.tag)
fn(*args, **kvargs)
return wrapped
class tso:
@wrap(tag= 'hi')
def foo(self):
if hasattr(self, 'tag'):
print('%s' % getattr(self, 'tag'))
ff = tso()
ff.foo() | UTF-8 | Python | false | false | 825 | py | 34 | decorator1.py | 27 | 0.471515 | 0.464242 | 0 | 37 | 21.324324 | 58 |
AlesyaKovaleva/IT-Academy-tasks | 6,270,652,275,765 | d11d4859d414e58845acd0beefbc81c377cc9cfb | 30b1ea6af6251cc42fd02e6e9f2c2dbbeba3176f | /tasks_12/lift.py | 8cfe42b2ea13f48a1ace920f177a2cfced56db74 | []
| no_license | https://github.com/AlesyaKovaleva/IT-Academy-tasks | 96fc080d3db17e854e4a59b202bbd1b761daabc2 | 64249fe0b7a95ab5a82ffdbdcb02aaa8670b5c27 | refs/heads/master | 2022-03-03T09:18:12.673736 | 2019-10-02T08:00:51 | 2019-10-02T08:00:51 | 121,854,244 | 0 | 0 | null | false | 2019-10-02T08:00:53 | 2018-02-17T12:39:59 | 2019-10-02T07:54:14 | 2019-10-02T08:00:52 | 51 | 0 | 0 | 0 | Python | false | false | """
Задача 0
Реализовать класс лифта Elevator. Класс должен обладать методом, lift,
отвечающий за вызов лифта. При сложении/вычитании экземпляров класса должно
возвращаться значение производимой математической операции. Если производить
вычитание у лифта, который еще не совершал поднятий, должна быть выведена
ошибка неправильной операции. Предусмотреть возможность сравнения какой из
лифтов совершил большее количество поднятий.
Также необходимо предусмотреть подсчет общего количества поднятий всех лифтов.
При строчных операциях необходимо вывести детальную информацию по лифту:
наименование, количество поднятий и процент от общего количества поднятий всех лифтов.
"""
import sys
class IncorrectOperation(Exception):
pass
class Elevator:
total_call = 0
def __init__(self, name):
self.name = name
self.call = 0
def lift(self):
Elevator.total_call += 1
self.call += 1
def __add__(self, other):
result = self.call + other.call
return result
def __sub__(self, other):
if self.call < other.call:
raise IncorrectOperation
result = self.call - other.call
return result
def __gt__(self, other):
result = self.call > other.call
return result
def __lt__(self, other):
result = self.call < other.call
return result
def __eq__(self, other):
result = self.call == other.call
return result
def __str__(self):
if self.call == 0:
percent = 0
else:
percent = round(self.call / Elevator.total_call * 100, 2)
return '\nЛифт № {}\nКоличество поднятий: {}\nПроцент от общего количества поднятий всех лифтов: {}'\
.format(self.name, self.call, percent)
class LiftManager:
"""
Команды управления лифтами:
lift - вызвать лифты
add - сложить вызовы двух лифтов
sub - вычесть вызовы двух лифтов
cmp - сравнить кол-во вызовов двух лифтов
all - общее кол-во всех вызовов
print - инфа о лифтах
help - список команд
exit - выход из программы
Например: lift 1 2 3
"""
def __init__(self, number_elevator):
self.list_elevators = [Elevator(lift) for lift in range(1, number_elevator + 1)]
def lift(self, arguments):
if len(arguments) < 1:
print('Введите номер лита для вызова.')
for element in arguments:
index = element - 1
self.list_elevators[index].lift()
def add(self, arguments):
if len(arguments) != 2:
print('Выберите два лифта.')
return
index1, index2 = arguments[0] - 1, arguments[1] - 1
lift1, lift2 = self.list_elevators[index1], self.list_elevators[index2]
result = lift1 + lift2
print('Лифт № {} + Лифт № {} = {}'.format(lift1.name, lift2.name, result))
def sub(self, arguments):
if len(arguments) != 2:
print('Выберите два лифта.')
return
index1, index2 = arguments[0] - 1, arguments[1] - 1
lift1, lift2 = self.list_elevators[index1], self.list_elevators[index2]
try:
result = lift1 - lift2
print('Лифт № {} - Лифт № {} = {}'.format(lift1.name, lift2.name, result))
except IncorrectOperation:
print('Невозможно выполнить операцию. У лифта № {} меньше вызовов чем у второго.'.format(lift1))
def cmp(self, arguments):
if len(arguments) != 2:
print('Выберите два лифта.')
return
index1, index2 = arguments[0] - 1, arguments[1] - 1
lift1, lift2 = self.list_elevators[index1], self.list_elevators[index2]
if lift1 == lift2:
print('У лифта № {} и у лифта № {} равное кол-во вызовов ({} раз(а))'
.format(lift1.name, lift2.name, lift1.call))
elif lift1 > lift2:
print('У лифта № {} большее кол-во вызовов ({} раз(а), чем у лифта № {} ({} раз(а))'
.format(lift1.name, lift2.call, lift2.name, lift2.call))
elif lift1 < lift2:
print('У лифта № {} меньшее кол-во вызовов ({} раз(а), чем у лифта № {} ({} раз(а))'
.format(lift1.name, lift2.call, lift2.name, lift2.call))
def all_call(self, *args):
print('Количество вызовов всех лифтов: {} раз(а)'.format(Elevator.total_call))
def print_lift(self, arguments):
for element in arguments:
index = element - 1
print(self.list_elevators[index])
def help(self, *args):
print(LiftManager.__doc__)
commands = {
'lift': lift,
'add': add,
'sub': sub,
'cmp': cmp,
'all': all_call,
'print': print_lift,
'help': help,
}
def main():
while True:
try:
number_elevator = int(input('Введите количество лифтов: '))
break
except ValueError:
print("Вы ввели неверное значение.")
manager = LiftManager(number_elevator)
name = [lift.name for lift in manager.list_elevators]
print('Ваши лифты:\n', '[' + '] ['.join(map(str, name)) + ']')
manager.help()
while True:
command_input = input('Введите команду >> ').lower().split()
if not command_input:
continue
command = command_input[0]
try:
arguments = list(map(int, command_input[1:]))
except ValueError:
print('Неправильно введён номер лифта.')
continue
if command == 'exit':
sys.exit()
if command in LiftManager.commands:
func = LiftManager.commands[command]
try:
func(manager, arguments)
except IndexError:
print('Вы выбрали несуществующий лифт.')
else:
print('Такой команды не существует.')
if __name__ == '__main__':
main()
| UTF-8 | Python | false | false | 7,230 | py | 65 | lift.py | 58 | 0.579963 | 0.566538 | 0 | 198 | 29.09596 | 109 |
pradeeppanayal/pat | 489,626,284,613 | b0511bc3b8598ea6849ab2ab4cc1b3d9c0072eca | 10013c3dd136f931a0f4c72d0bdd433da6c2040a | /cgi/cgi/v2/Lib/vmactions/vminfo.py | 40a83e4e652511c0f66bdea715b4e4e18af5a035 | []
| no_license | https://github.com/pradeeppanayal/pat | fe4c79a7b6e5139d899415d5c7a7b97ba77fd8c7 | 98d5a9a955fad673cce2bcfe0e5166b0b7ff8bb5 | refs/heads/master | 2020-12-02T17:38:04.568770 | 2017-07-21T11:33:27 | 2017-07-21T11:33:27 | 96,403,078 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/env python
"""
"""
import atexit
import sys
import re
sys.path.append('../')
from pyVim.connect import SmartConnect, Disconnect
from pyVmomi import vim, vmodl
def getDataStore(path):
try:
return re.match('.*\[(.*)\].*',path).group(1)
except:
return ''
def getVMInfo(ip,un,pwd,dname):
si = SmartConnect(host=ip,
user=un,
pwd=pwd)
assert si, "Cannot connect to specified host using specified username and password"
info = []
content = si.content
objView = content.viewManager.CreateContainerView(content.rootFolder,
[vim.VirtualMachine],
True)
vmList = objView.view
objView.Destroy()
vm = [vm for vm in vmList if vm.name == dname]
assert len(vm) ==1,"Device info not found in the server"
vm = vm[0]
moid = vm._moId
datastore = getDataStore( vm.summary.config.vmPathName)
return {'moid':moid,'datastore':datastore}
if __name__=='__main__':
print getVMInfo("10.10.100.201",'root','Payoda#89','vEOS-65.91')
| UTF-8 | Python | false | false | 1,140 | py | 90 | vminfo.py | 82 | 0.57807 | 0.561404 | 0 | 42 | 26.142857 | 86 |
hunsoo0823/python_algorithm | 11,793,980,208,609 | decfa9f1defd5dc93c4ffb8b77bf0f435b9cfdb9 | 78d47abbdc829e5fceb22ac350a2cf5b058e5316 | /dynamic/edit_distance_sol.py | 9376b34867213213eb1308258f5a430d9a9129c1 | []
| no_license | https://github.com/hunsoo0823/python_algorithm | c7fba216f7eb58ea9864916cbaa2040572f4f5ba | a9ebc0ead5b1be341860621946871f53f82722a2 | refs/heads/master | 2023-07-09T04:45:24.410323 | 2021-08-14T08:56:52 | 2021-08-14T08:56:52 | 295,997,179 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # 최소 편집 거리(Edit Distance) 계산을 위한 다이나믹 프로그래밍
def edit_dist(str1, str2):
n = len(str1)
m = len(str2)
# 다이나믹 프로그래밍을 위한 2차원 DP 테이블 초기화
dp = [[0] * (m+1) for _ in range(n+1)]
# DP 테이블 초기 설정
for i in range(1, n+1):
dp[i][0] = i
for j in range(1, m+1):
dp[0][j] = j
for i in range(1, n+1):
for j in range(1, m+1):
# 문자가 같다면, 왼쪽 위에 해당하는 수를 그대로 대입
if str1[i - 1] == str2[j - 2]:
dp[i][j] = dp[i-1][j-1]
# 문자가 다르다면, 3가지 경우 중에서 최소값 찾기
else: # 삽입(완쪽) , 삭제(위쪽), 교체(왼쪽 위) 중에서 최소 값을 찾아 대입
dp[i][j] = 1 + min(dp[i][j-1], dp[i-1][j], dp[i-1][j-1])
return dp[n][m]
# 두 문자열을 입력받기
str1 = input()
str2 = input()
# 최소 편집 거리 출력
print(edit_dist(str1, str2))
| UTF-8 | Python | false | false | 1,049 | py | 73 | edit_distance_sol.py | 73 | 0.460277 | 0.417402 | 0 | 32 | 23.40625 | 72 |
Bkairis/both_sides | 16,638,703,328,693 | f68be0f1b92e86de6e64288f69dfb0c5ef3fe401 | 2584fb1a623ccf0e553e1f9476209f048728b9d8 | /Analyze.py | 18f929844648c98e1bfe448b42f6d50639effe4d | []
| no_license | https://github.com/Bkairis/both_sides | 9a952c161a93c7cf1c0cc0524591a7328e1e6b05 | 906472640d1b9a58041c303b1f092fb4821d81d0 | refs/heads/master | 2020-07-05T23:23:04.363604 | 2016-12-14T14:45:02 | 2016-12-14T14:45:02 | 73,975,609 | 0 | 0 | null | false | 2016-11-17T01:17:12 | 2016-11-17T00:56:31 | 2016-11-17T01:05:17 | 2016-11-17T01:17:12 | 0 | 0 | 0 | 0 | Python | null | null | def compare_pronouns(text1, text2):#function calls the previously defined functions to identify the percentage of first person singular, first person plural, and third person plural pronouns in the texts
import re
def convert(textfile): #takes a textfile as an argument and opens it. Any time the function is called, it treats the text file as an object (which can then be named within the function depending on its purpose)
source = open(textfile)
return source.read()
def wordcount(text): #finds the total number of words in the a text
text = convert(text)
words = re.findall(' [A-Za-z]+', text)
return len(words)
def first_person_singular(text): #finds the total number of first person singular pronouns
#I, me, my, mine
text = convert(text)
fps = re.findall(' I ', text) #I is never lowercase, so no need to include
fps1 = re.findall('"I ', text) #dialogue
fps2 = re.findall(' me', text)#me does not begin sentences, so no need to include uppercase or dialogue.
fps3 = re.findall(' my ', text) #lowercase
fps4 = re.findall(' My ', text) #uppercase
fps5 = re.findall('"My ', text) #dialogue
fps6 = re.findall(' mine ', text) #lowercase, doesn't begine sentences so no need to include uppercase or dialogue.
return (len(fps)) + (len(fps1)) + (len(fps2)) + (len(fps3)) + (len(fps4)) + (len(fps5)) + (len(fps6))
def first_person_plural(text): #finds the total number of first person plural pronouns
#we, us, our, ours
text = convert(text)
fpp = re.findall(' we ', text) #lowercase
fpp1 = re.findall(' We ', text) #uppercase
fpp2 = re.findall('"We ', text) #dialogue
fpp3 = re.findall(' us ', text) #doesn't begin sentences, no uppercase or dialoguwe required
fpp4 = re.findall(' our ', text) #lowercase
fpp5 = re.findall(' Our ', text) #uppercase
fpp6 = re.findall('"Our ', text) #dialogue
fpp7 = re.findall(' ours ', text) #lowercase
fpp8 = re.findall(' Ours ', text) #uppercase
fpp9 = re.findall('"Ours ', text) #dialogue
return (len(fpp)) + (len(fpp1)) + (len(fpp2)) + (len(fpp3)) + (len(fpp4)) + (len(fpp5)) + (len(fpp6)) + (len(fpp7)) + (len(fpp8)) + (len(fpp9))
def third_person_plural(text): #finds the total number of third person plural pronouns
#they, them, their, theirs, they're
text = convert(text)
tpp = re.findall(' they ', text) #lowercase
tpp1 = re.findall( ' They ', text) #uppercase
tpp2 = re.findall('They ', text) #dialogue
tpp3 = re.findall(' them ', text) #doesn't begin sentences, no uppercase or dialogue required
tpp4 = re.findall(' their ', text) #lowercase
tpp5 = re.findall(' Their ', text) #uppercase
tpp6 = re.findall('"Their ', text) #dialogue
tpp7 = re.findall(' theirs ', text) #lowercase
tpp8 = re.findall(' Theirs ', text) #uppercase
tpp9 = re.findall('"Theirs ', text) #dialogue
tpp10 = re.findall(" they're ", text) #lowercase
tpp11 = re.findall(" They're ", text) #uppercase
return (len(tpp)) + (len(tpp1)) + (len(tpp2)) + (len(tpp3)) + (len(tpp4)) + (len(tpp5)) + (len(tpp6)) + (len(tpp7)) + (len(tpp8)) + (len(tpp9)) + (len(tpp10)) + (len(tpp11))
text1_percent_fps = (first_person_singular(text1) / float(wordcount(text1))) * 100 #divides the number of first person singular pronouns by the number of words in the text and then multiplies by 100 to find the percentage
text2_percent_fps = (first_person_singular(text2) / float(wordcount(text2))) * 100
text1_percent_fpp = (first_person_plural(text1) / float(wordcount(text1))) * 100 #percentage of first person plural pronouns
text2_percent_fpp = (first_person_plural(text2) / float(wordcount(text2))) * 100
text1_percent_tpp = (third_person_plural(text1) / float(wordcount(text1))) * 100 #percentage of third person plural pronouns
text2_percent_tpp = (third_person_plural(text2) / float(wordcount(text2))) * 100
return 'Text 1 first person singular pronouns: ' + str(text1_percent_fps) + '%', 'Text 2 first person singular pronouns: ' + str(text2_percent_fps) + '%', 'Text 1 first person plural pronouns: ' + str(text1_percent_fpp) + '%', 'Text 2 first person plural pronouns: ' + str(text2_percent_fpp) + '%', 'Text 1 third person plural pronouns: ' + str(text1_percent_tpp) + '%', 'Text 2 third person plural pronouns: ' + str(text2_percent_tpp) + '%'
| UTF-8 | Python | false | false | 4,682 | py | 4 | Analyze.py | 3 | 0.621316 | 0.598035 | 0 | 68 | 66.808824 | 445 |
antoh/memml | 15,814,069,595,535 | 92c775ee9bfc1889c1f353cbd7fb2a4a4394b5c8 | 630def1284eb4f3b03de58071503c00def616b83 | /AUC-ROC.py | bf71e7486f4b7d69fe0706b265698085136eebbb | []
| no_license | https://github.com/antoh/memml | 81b468e3bb20295ea0413800d057b3fe4199b563 | 4792f70e16ceb43bccfee75e78a6d8cac3e593d7 | refs/heads/master | 2022-09-26T01:15:23.819364 | 2020-05-29T16:17:31 | 2020-05-29T16:17:31 | 267,855,263 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # Classification Area under curve
import warnings
import pandas
from sklearn import model_selection
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import roc_auc_score, roc_curve
warnings.filterwarnings('ignore')
url = "https://raw.githubusercontent.com/jbrownlee/Datasets/master/pima-indians-diabetes.data.csv"
dataframe = pandas.read_csv(url)
dat = dataframe.values
X = dat[:, :-1]
y = dat[:, -1]
test_size = 0.33
seed = 7
# split data
X_train, X_test, y_train, y_test = model_selection.train_test_split(X, y, test_size=test_size, random_state=seed)
model.fit(X_train, y_train)
# predict probabilities
probs = model.predict_proba(X_test)
# keep probabilities for the positive outcome only
probs = probs[:, 1]
auc = roc_auc_score(y_test, probs)
print('AUC - Test Set: %.2f%%' % (auc * 100))
# calculate roc curve
fpr, tpr, thresholds = roc_curve(y_test, probs)
# plot no skill
plt.plot([0, 1], [0, 1], linestyle='--')
# plot the roc curve for the model
plt.plot(fpr, tpr, marker='.')
plt.xlabel('False positive rate')
plt.ylabel('Sensitivity/ Recall')
# show the plot
plt.show()
| UTF-8 | Python | false | false | 1,111 | py | 5 | AUC-ROC.py | 4 | 0.726373 | 0.712871 | 0 | 38 | 28.236842 | 113 |
samedii/rainbow | 7,112,465,862,200 | 5fbf759e46dbccf5b7ca29a3204e01f9e21706cf | 072227631e61ec26ab3d08321828e3342757c3c8 | /stock_env.py | 40ee79ad2ab5cb437aeae5a14943dfb4e2dad7e4 | [
"MIT"
]
| permissive | https://github.com/samedii/rainbow | 7f6ec8cbfde4dbecf4f26a413476b4cd204c1c2a | 0ac4b08b931c87562ac7fc2cc6f1dbb332df7be6 | refs/heads/master | 2020-03-27T09:10:32.669769 | 2018-08-27T15:45:45 | 2018-08-27T15:45:45 | 146,319,212 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from collections import deque
import random
import atari_py
import torch
import cv2 # Note that importing cv2 before torch may cause segfaults?
import pickle
import numpy as np
import pandas as pd
class Env():
def __init__(self, args):
self.device = args.device
self.actions = {
'out': 0,
'in': 1
}
self.position = 0
self.training = True # Consistent with model training mode
self.threshold = 0.01 # courtage and spread
data = get_data()
self.Xs, self.ys, self.lengths = prepare_data(data)
self.current_stock = 0
self.current_day = 0
def _get_state(self):
return torch.Tensor(list(self.Xs[self.current_stock][self.current_day]) + [self.position]).to(self.device)
def reset(self):
self.current_stock += 1
self.current_day = 0
self.position = 0
return self._get_state()
def step(self, action):
reward = self.ys[self.current_stock][self.current_day] if action == 1 else 0
if self.position == 0 and action == 1:
reward += np.log(1 - 2*self.threshold)
done = (self.current_day >= self.lengths[self.current_stock])
# Return state, reward, done
return self._get_state(), reward, done
def train(self):
self.training = True
def eval(self):
self.training = False
def action_space(self):
return len(self.actions)
def render(self):
pass
def close(self):
pass
def get_data():
database = 'SSE'
file_object = open('prepared_data_'+database,'rb')
data = pickle.load(file_object)
file_object.close()
return data
def prepare_data(data):
X = []
y = []
lengths = []
for stock in data:
last = stock['Last'].values
log_rdiff_last = np.log(last[1:]/last[:-1])
log_rdiff_last = np.expand_dims(log_rdiff_last, axis=1)
lag1 = log_rdiff_last[4:-1]
lag5 = log_rdiff_last[:-5]
X_stock = np.concatenate((lag1, lag5), axis=1) # TODO: do something more useful
y_stock = log_rdiff_last[5:]
is_nan = np.logical_or(np.isnan(X_stock).any(axis=1), np.isnan(y_stock).any(axis=1))
X_stock = X_stock[~is_nan]
y_stock = y_stock[~is_nan]
X.append(X_stock)
y.append(y_stock)
lengths.append(y_stock.shape[0])
return X, y, lengths | UTF-8 | Python | false | false | 2,251 | py | 1 | stock_env.py | 1 | 0.629054 | 0.613949 | 0 | 96 | 22.458333 | 110 |
numba/Meta | 7,327,214,210,139 | db4d515839d52c1f7888166a9030bab5906c3294 | 77845f48ef5819e8e641cd72c059fa5cff35d82e | /setup.py | 905779950b4db44c26722aa5154d6e164bc75e72 | [
"BSD-2-Clause"
]
| permissive | https://github.com/numba/Meta | 5fe2c2b7f6981ec073d6e3e10d9d0a0a697341cf | d11b4fa38fc51858f7f45b58d992652ddd39cb92 | refs/heads/develop | 2023-06-10T08:21:29.211754 | 2013-02-22T23:49:20 | 2013-02-22T23:49:20 | 5,077,375 | 2 | 2 | null | true | 2013-10-05T22:17:58 | 2012-07-17T04:43:22 | 2013-10-05T20:46:01 | 2013-02-23T01:28:12 | 180 | null | 4 | 1 | Python | null | null | # Copyright (c) 2008-2011 by Enthought, Inc.
# All rights reserved.
from setuptools import setup, find_packages
try:
long_description = open('README.rst').read()
except IOError as err:
long_description = str(err)
try:
version_str = open('version.txt').read()
except IOError as err:
version_str = '???'
setup(
name='meta',
version=version_str,
author='Sean Ross-Ross, Enthought Inc.',
author_email='srossross@enthought.com',
maintainer='Sean Ross-Ross',
maintainer_email='enthought-dev@enthought.com',
url='http://srossross.github.com/Meta',
classifiers=[c.strip() for c in """\
Development Status :: 5 - Production/Stable
Intended Audience :: Developers
License :: OSI Approved :: BSD License
Operating System :: MacOS
Operating System :: Microsoft :: Windows
Operating System :: OS Independent
Operating System :: POSIX
Operating System :: Unix
Programming Language :: Python
Topic :: Scientific/Engineering
Topic :: Software Development
Topic :: Software Development :: Libraries
""".splitlines() if len(c.strip()) > 0],
description='Byte-code and ast programming tools',
long_description=long_description,
include_package_data=True,
license='BSD',
packages=find_packages(),
platforms=["Windows", "Linux", "Mac OS-X", "Unix", "Solaris"],
entry_points={
'console_scripts': [
'depyc = meta.scripts.depyc:main',
],
}
)
| UTF-8 | Python | false | false | 1,629 | py | 26 | setup.py | 20 | 0.596071 | 0.589932 | 0 | 52 | 30.326923 | 74 |
lapis42/boj | 5,626,407,193,034 | ba950439dacd4b2ec43f98876abb1cf661ace648 | d74ec030bf38db5fdaa594280784ad1cab999690 | /boj11779_dijkstra_track.py | 8bf30d9ea9103f188459bf4b3a35ad36d8b75df4 | []
| no_license | https://github.com/lapis42/boj | 36c4964ebbd3f2f60282fa7942cad3fa386f3823 | 4f225f397b9ff919cfda26772036aff7d9637d07 | refs/heads/main | 2023-05-31T08:28:10.345232 | 2021-06-14T01:49:28 | 2021-06-14T01:49:28 | 376,677,591 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import sys
from heapq import heappush, heappop
input = sys.stdin.readline
# input
n_node, n_edge = int(input()), int(input())
edges = [[] for _ in range(n_node + 1)]
for _ in range(n_edge):
a, b, c = map(int, input().split())
edges[a].append((c, b))
start, end = map(int, input().split())
# variables
dist = [sys.maxsize] * (n_node + 1)
track = [0] * (n_node + 1)
# dijkstra
Q = [(0, start)] # dist / start
while Q:
dist_now, node_now = heappop(Q)
if dist_now > dist[node_now]:
continue
for dist_next, node_next in edges[node_now]:
dist_next += dist_now
if dist_next < dist[node_next]:
dist[node_next] = dist_next
track[node_next] = node_now
heappush(Q, (dist_next, node_next))
# backward
i = end
ans = [end]
while i != start:
i = track[i]
ans.append(i)
# output
print(dist[end])
print(len(ans))
print(*ans[::-1])
| UTF-8 | Python | false | false | 910 | py | 127 | boj11779_dijkstra_track.py | 127 | 0.575824 | 0.569231 | 0 | 42 | 20.666667 | 48 |
chenhaipei/share_bicycle_system | 2,611,340,132,846 | 26d84d3e9f6c0a0e487b8093f16ea1fe9aa8ae3e | 93b5d846e0ba77a2bcb2ef21504b56a565182bfb | /bike/migrations/0002_auto_20211003_1157.py | c97cc8d536f07d3ee2248805d08e48b0f0268d5e | []
| no_license | https://github.com/chenhaipei/share_bicycle_system | a16633844ea59bc43ed53e11a327642f901a06f4 | 6e4c1826a1b9e4b02962304c12452f60ae9fea95 | refs/heads/main | 2023-08-23T16:21:21.913542 | 2021-10-21T04:24:15 | 2021-10-21T04:24:15 | 418,426,133 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # Generated by Django 3.2.7 on 2021-10-03 03:57
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('bike', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='bike',
name='need_repair',
field=models.BooleanField(default=False),
),
migrations.AlterField(
model_name='bike',
name='available',
field=models.BooleanField(default=True),
),
migrations.AlterField(
model_name='bike',
name='unique',
field=models.CharField(default='0d8ddaaedea6', max_length=12, unique=True, verbose_name='ID'),
),
migrations.AlterField(
model_name='customer',
name='unique',
field=models.CharField(default='bb36c528', max_length=8, unique=True, verbose_name='ID'),
),
migrations.AlterField(
model_name='record',
name='unique',
field=models.CharField(default='79f2dca47ab54530b8d3e7434d3d12c3', max_length=16, unique=True, verbose_name='ID'),
),
migrations.AlterField(
model_name='transaction',
name='finish_time',
field=models.DateTimeField(auto_now=True, default=django.utils.timezone.now),
preserve_default=False,
),
migrations.AlterField(
model_name='transaction',
name='unique',
field=models.CharField(default='6fb520db67b94594b314a254bffa2341', max_length=16, unique=True, verbose_name='ID'),
),
]
| UTF-8 | Python | false | false | 1,675 | py | 14 | 0002_auto_20211003_1157.py | 6 | 0.580896 | 0.536119 | 0 | 50 | 32.5 | 126 |
bmyerz/iowa-computer-science-methods | 2,774,548,900,263 | 01dcc58eb667ee624d167a71d9c2f0c43ae58150 | d6b1a72f2726397f5fc7f8493362b2f0a5c20b5d | /jes-code/backwards.py | d86b80649e246fdb73f69f6e4aafa1beed6efba1 | []
| no_license | https://github.com/bmyerz/iowa-computer-science-methods | a9d8e42cff9323d0d8419a000278498e46ba8f3a | bd76a2f65c1cf291ca0255e7c03e06feba6231b3 | refs/heads/master | 2021-06-20T03:22:57.555524 | 2021-02-22T17:29:18 | 2021-02-22T17:29:18 | 189,730,334 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | s = makeSound("houstonproblem.wav")
backwards = duplicateSound(s)
for i in range(getLength(s)):
v = getSampleValueAt(s, i)
setSampleValueAt(backwards, getLength(s)-1-i, v)
blockingPlay(s)
blockingPlay(backwards)
| UTF-8 | Python | false | false | 218 | py | 19 | backwards.py | 19 | 0.747706 | 0.743119 | 0 | 9 | 23.222222 | 50 |
JarvisLee0423/Pytorch_Practice | 13,915,694,082,923 | b5aea2913e72e4679bee246d5ff7965fc193fa15 | b01ac5e290cdb70044233ab8502f5e8d0f36f347 | /Simple_Neural_Network_with_Pytorch/Numpy_Implementation/NumpyNN.py | 82287f1d00eb310987a04f55bf0eceda3db74b1c | []
| no_license | https://github.com/JarvisLee0423/Pytorch_Practice | e681332c2274d7c9b43b78b6c352edf4ec95a6c6 | 984df607ecd8f369b40eda907a34368b6885b2da | refs/heads/master | 2023-02-11T20:23:40.108930 | 2020-12-30T16:39:54 | 2020-12-30T16:39:54 | 288,927,146 | 3 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #============================================================================================#
# Copyright: JarvisLee
# Date: 2020/08/20
# Project Name: NumpyNN.py
# Description: Using library numpy to build the 3-layers neural network.
# Model Description: Hypothesis 1 -> n_1 nodes
# tanh -> Activation Function
# Hypothesis 2 -> n_2 nodes
# tanh -> Activation Function
# Hypothesis 3 -> n_3 nodes
# sigmoid -> Classifier
#============================================================================================#
# Importing the necessary library.
import numpy as np
# Setting the hyperparameters.
# The number of the training instances.
m = 100
# The number of features of the training data.
n_0 = 1000
# The number of nodes of the first layer.
n_1 = 100
# The number of nodes of the second layer.
n_2 = 100
# The number of nodes of the third layer.
n_3 = 1
# The learning rate of the gradient descent.
learning_rate = 0.01
# The number of training epoches.
epoch = 1000
# Creating the model.
class NumpyModel3():
# Defining the training method.
@staticmethod
def trainer(m = m, n_0 = n_0, n_1 = n_1, n_2 = n_2, n_3 = n_3, lr = learning_rate, epoch = epoch):
# Preparing the training data.
# The initialization of the training data.
X = np.random.rand(n_0, m)
# The initialization of the truth label.
Y = np.random.randint(0, 2, (n_3, m))
# The weight of the first layer.
W_1 = np.random.rand(n_1, n_0)
# The weight of the second layer.
W_2 = np.random.rand(n_2, n_1)
# The weight of the third layer.
W_3 = np.random.rand(n_3, n_2)
# The bias of the first layer.
b_1 = np.random.rand(n_1, 1)
# The bias of the second layer.
b_2 = np.random.rand(n_2, 1)
# The bias of the third layer.
b_3 = np.random.rand(n_3, 1)
# Appling the normalization of the training data.
X = (X - np.mean(X)) / np.std(X)
# Applying the gradient descent.
for each in range(epoch):
# Forward propagation.
# The hypothesis computation of the first layer.
Z_1 = np.dot(W_1, X) + b_1
# The activation computation of the first layer.
A_1 = np.tanh(Z_1)
# The hypothesis computation of the second layer.
Z_2 = np.dot(W_2, A_1) + b_2
# The activation computation of the second layer.
A_2 = np.tanh(Z_2)
# The hypothesis computation of the third layer.
Z_3 = np.dot(W_3, A_2) + b_3
# The activation computation of the classifier.
A = 1 / (1 + np.exp(-Z_3))
# The binary cross-entropy cost function.
Cost = 1 / m * np.sum(-np.multiply(Y, np.log(A)) - np.multiply((1 - Y), np.log(1 - A)))
# Printing the value of the cost function.
print("The loss of the epoch " + str(each + 1) + " is: " + str(Cost))
# Backward propagation.
# The derivative of Z_3.
dZ_3 = A - Y
# The derivative of W_3.
dW_3 = 1 / m * np.dot(dZ_3, A_2.T)
# The derivative of b_3.
dB_3 = 1 / m * np.sum(dZ_3, axis = 1, keepdims = True)
# The derivative of Z_2.
dZ_2 = np.multiply(np.dot(W_3.T, dZ_3), (1 - np.square(np.tanh(Z_2))))
# The derivative of W_2.
dW_2 = 1 / m * np.dot(dZ_2, A_1.T)
# The derivative of b_2.
dB_2 = 1 / m * np.sum(dZ_2, axis = 1, keepdims = True)
# The derivative of Z_1.
dZ_1 = np.multiply(np.dot(W_2.T, dZ_2), (1 - np.square(np.tanh(Z_1))))
# The derivative of W_1.
dW_1 = 1 / m * np.dot(dZ_1, X.T)
# The derivative of b_1.
dB_1 = 1 / m * np.sum(dZ_1, axis = 1, keepdims = True)
# Updating the parameters.
# The weight updating of W_1.
W_1 = W_1 - lr * dW_1
# The weight updating of W_2.
W_2 = W_2 - lr * dW_2
# The weight updating of W_3.
W_3 = W_3 - lr * dW_3
# The weight updating of b_1.
b_1 = b_1 - lr * dB_1
# The weight updating of b_2.
b_2 = b_2 - lr * dB_2
# The weight updating of b_3.
b_3 = b_3 - lr * dB_3
# Getting the predicted lable.
A = A >= 0.5
# Printing the value of the training accuracy.
print("The accuracy is: " + str(np.sum(A == Y) / len(A)))
# Printing the value of the truth label.
print("The truth label is: " + str(Y))
# Printing the value of the predicted label.
print("The predicted label is: " + str(A))
# Training the model.
if __name__ == "__main__":
NumpyModel3.trainer() | UTF-8 | Python | false | false | 5,939 | py | 41 | NumpyNN.py | 38 | 0.420778 | 0.393332 | 0 | 122 | 47.688525 | 130 |
OlivierGaillard/fitness | 18,476,949,327,639 | 2ad8d796e19678c19c9704d2cf56c804ca11f225 | 092726655009a4dc8ff258eb1ebc386a64b7fef5 | /workouts/urls.py | 2f289d4e6e47e903ae0d8e6b7b7570958832f699 | []
| no_license | https://github.com/OlivierGaillard/fitness | e759e0f5c798dd6bf95251b093ecb7c02a9249af | ec63c6829aa1374140ca076fbc48ddbed516b962 | refs/heads/master | 2020-03-28T22:43:12.440422 | 2018-09-20T16:00:35 | 2018-09-20T16:00:35 | 149,253,138 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from django.urls import path
from workouts.views import IndexView
urlpatterns = [
path('', IndexView.as_view(), name='index'),
]
| UTF-8 | Python | false | false | 134 | py | 10 | urls.py | 8 | 0.708955 | 0.708955 | 0 | 6 | 21.333333 | 48 |
Aasthaengg/IBMdataset | 15,908,558,881,085 | f45f54c44840e2fa469c80de8b638856de85d14f | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p04043/s313374300.py | c5525a2f176267f4c5acdb9d4186ac0c94606893 | []
| no_license | https://github.com/Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | A, B, C = map(int, input().split())
S = [0]*3
S[0] = A
S[1] = B
S[2] = C
S.sort()
if(S[0]==5 and S[1]==5 and S[2]==7):
print('YES')
else:
print('NO') | UTF-8 | Python | false | false | 160 | py | 202,060 | s313374300.py | 202,055 | 0.45 | 0.38125 | 0 | 13 | 11.384615 | 36 |
rgoshen/flask_greet_calc | 10,651,518,900,450 | 68ff9fc16f8eb28e31cd47b7b9afe376a9008bb8 | 4830e7f3e3f9e41d127757a42e011a8b53876e9f | /greet/app.py | d523f605a3f41b418bfed68d317432f5e03f8244 | []
| no_license | https://github.com/rgoshen/flask_greet_calc | 963552cf089d4cf048838fe4788cd08fa7ad16ad | 53c8c9f8c33c64a5e9491b454ef36a288312a09f | refs/heads/master | 2023-01-23T18:08:56.771985 | 2020-12-10T00:25:23 | 2020-12-10T00:25:23 | 320,114,383 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from flask import Flask
app = Flask(__name__)
@app.route('/welcome')
def app_welcome():
"""Return simple string."""
return 'welcome'
@app.route('/welcome/home')
def app_welcome_home():
"""Return welcome home."""
return 'welcome home'
@app.route('/welcome/back')
def app_welcome_back():
"""Return welcome back."""
return 'welcome back'
| UTF-8 | Python | false | false | 362 | py | 3 | app.py | 2 | 0.638122 | 0.638122 | 0 | 17 | 20.294118 | 31 |
saulmoore1/PhD_Project | 8,323,646,627,159 | 3f5fce60e05bc42204f23efa80862d39ade4aeb4 | f269b417034e397139adf2802514165b0eb26f7c | /Python/image_processing/fourier_image.py | 1515454024083343c21450ff294422e87e0c503c | []
| no_license | https://github.com/saulmoore1/PhD_Project | 2d333f7fdbd8b2b1932007e7cc6e05b3108ed325 | a235bf8700e4b5a311fc1dfd79c474c5467e9c7a | refs/heads/master | 2023-08-03T17:51:17.262188 | 2023-07-26T12:35:37 | 2023-07-26T12:35:37 | 158,314,469 | 1 | 2 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Apr 14 17:47:29 2020
@author: sm5911
"""
import cv2
import numpy as np
from matplotlib import pyplot as plt
#%%
img_path = '/Users/lferiani/Desktop/ffmpeg_playground/PG10_0mM_GFP_s121z2.tif'
img = cv2.imread(img_path, -1)
img_float = img.astype(np.float)
img_float = np.log10(img_float)
img_float -= img_float.min()
img_float /= img_float.max()
ff = np.fft.fft2(img_float)
keep_modes_frac = 1/64
cut_rows_min = int(keep_modes_frac/2*ff.shape[0])
print(cut_rows_min)
cut_rows_max = int(ff.shape[0] - cut_rows_min)
cut_cols_min = int(keep_modes_frac/2*ff.shape[1])
cut_cols_max = int(ff.shape[1] - cut_rows_min)
ff[cut_rows_min:cut_rows_max, cut_cols_min:cut_cols_max] *= 0
img_filtered = np.fft.ifft2(ff)
img_filtered = abs(img_filtered)
#%%
plt.close('all')
plt.figure()
plt.imshow(img_filtered)
plt.title('filtered')
plt.show()
plt.figure()
plt.imshow(img_float * img_filtered)
plt.title('times')
plt.show()
#%%
x = np.linspace(-10, 10, 1000)
y = np.sin(x) + np.random.randn(x.shape[0])
y_ff = np.fft.fft(y)
y_ff_filt = y_ff.copy()
y_ff_filt[5:-5] = 0
y_filt = np.fft.ifft(y_ff_filt)
plt.figure()
plt.plot(x, y)
plt.plot(x, y_filt)
plt.figure()
plt.plot(y_ff)
plt.plot(y_ff_filt)
| UTF-8 | Python | false | false | 1,250 | py | 160 | fourier_image.py | 138 | 0.6776 | 0.6344 | 0 | 52 | 23.038462 | 78 |
AK-1121/code_extraction | 17,446,157,188,925 | b9f751a5f35e73320ff0b3501909bf9a28cd90c5 | 2f98aa7e5bfc2fc5ef25e4d5cfa1d7802e3a7fae | /python/python_19634.py | c2996064240528648fd1c7f2defc7d312077b0d0 | []
| no_license | https://github.com/AK-1121/code_extraction | cc812b6832b112e3ffcc2bb7eb4237fd85c88c01 | 5297a4a3aab3bb37efa24a89636935da04a1f8b6 | refs/heads/master | 2020-05-23T08:04:11.789141 | 2015-10-22T19:19:40 | 2015-10-22T19:19:40 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # Finding string in dictionary key where the string is not exactly the key in the dict
print [k for k in d if string in k]
| UTF-8 | Python | false | false | 123 | py | 29,367 | python_19634.py | 29,367 | 0.756098 | 0.756098 | 0 | 2 | 60.5 | 86 |
technogeek156/TUTAI-AI | 6,846,177,873,340 | 09e794ab0991245f7bb0e7645864291c7606e0df | 4c37015418ff7b45cf9d65dd67c7568460b69629 | /AI Prototype.py | c852a73d5970128dfa956a5157c41aa77720b240 | []
| no_license | https://github.com/technogeek156/TUTAI-AI | 69fb4cd230f4fe09a5b7988484c3552a85a4d514 | d0cf61033618bee836cde70d342fc8cd362c9290 | refs/heads/master | 2021-01-10T02:55:39.005356 | 2016-01-29T14:17:09 | 2016-01-29T14:17:09 | 50,608,267 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import sys,time,random, os.path
typing_speed = 150 #wpm
def slow_type(t):
for l in t:
sys.stdout.write(l)
sys.stdout.flush()
time.sleep(random.random()*10.0/typing_speed)
print ''
slow_type("Hello! My name is TUTAI, or Turing Test Artificial Intelligance")
slow_type("Currently I am in training, so my features aren't fully complete.")
slow_type("If you say something I don't understand yet, I will repeat it back to you in order for me to learn")
slow_type("and build a database of responces!")
loopDeLoop = True
inputOne = 1
justAsked = False
while loopDeLoop == True:
if justAsked == True:
nothing = 0
elif justAsked == False:
inputOne = raw_input()
fileName = inputOne + ".txt"
existance = True
try:
test = open(fileName, "r")
except:
existance = False
if existance == True:
talkBack = open(fileName, "r+")
slow_type(talkBack.read())
else:
slow_type("I'm sorry, I don't have a responce for that yet")
slow_type(inputOne)
talk = raw_input()
learningName = inputOne + ".txt"
learning = open(learningName, "w")
learning.write(talk)
learning.close()
loopDeLoop = True
| UTF-8 | Python | false | false | 1,278 | py | 2 | AI Prototype.py | 1 | 0.608764 | 0.602504 | 0 | 57 | 21.280702 | 111 |
Yoketwx/tensor | 9,045,201,146,369 | d41a275c87130cb73eeb860041d2f4f0dda7f214 | f2ff42d8ee9f221a2dae4b65cec5190b1a385f83 | /hupunews.py | cd8336035dd235274b9cb95f041124dff0b8a821 | []
| no_license | https://github.com/Yoketwx/tensor | 82f4a9f2d24cbba6b38b786ca53e5f358e5641ee | cdb8c407b26ca49ed0406407967b2a5ff1caecff | refs/heads/master | 2020-03-18T00:56:49.342066 | 2018-05-20T05:00:20 | 2018-05-20T05:10:36 | 134,120,482 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import requests
from lxml import etree
with open('hupunews.txt', 'w', encoding='utf-8') as f:
for a in range(10):
url = 'https://voice.hupu.com/nba/{}.html'.format(2270998+a)
data = requests.get(url).text
s = etree.HTML(data)
news = s.xpath('/html/body/div[4]/div[1]/div[2]/div/div[2]')
for div in news:
lenStory = div.xpath('./p/text()')
title = div.xpath('/html/body/div[4]/div[1]/div[1]/h1/text()')[0]
"""print(title)"""
f.write("{}\n\n\n".format(title))
for i in range(len(lenStory)):
story = div.xpath('./p/text()')[i]
f.write("{}\n\n\n".format(story))
| UTF-8 | Python | false | false | 701 | py | 6 | hupunews.py | 3 | 0.512126 | 0.485021 | 0 | 18 | 37.666667 | 77 |
JzyJade/torchmtlr | 12,773,232,767,705 | 867e8d36a3c9be7145fb219593ed5d17eed63137 | 8fef330ade6ad415ca912b8a8da586460657f4e1 | /torchmtlr/__init__.py | aa248f66ac009698abf8f86916fd60c64ffbb214 | [
"MIT"
]
| permissive | https://github.com/JzyJade/torchmtlr | bb63748b132f175be09e456f2283f8b6ab813ab7 | 7234e96b8c7c62cc7e398f7cd2319eb971944dea | refs/heads/master | 2023-07-09T10:48:23.251622 | 2020-12-08T13:01:41 | 2020-12-08T13:01:41 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from typing import Union
import numpy as np
from scipy.interpolate import interp1d
import torch
import torch.nn as nn
class MTLR(nn.Module):
"""Multi-task logistic regression for individualised
survival prediction.
The MTLR time-logits are computed as:
`z = sum_k x^T w_k + b_k`,
where `w_k` and `b_k` are learnable weights and biases for each time
interval.
Note that a slightly more efficient reformulation is used here, first
proposed in [2]_.
References
----------
..[1] C.-N. Yu et al., ‘Learning patient-specific cancer survival
distributions as a sequence of dependent regressors’, in Advances in neural
information processing systems 24, 2011, pp. 1845–1853.
..[2] P. Jin, ‘Using Survival Prediction Techniques to Learn
Consumer-Specific Reservation Price Distributions’, Master's thesis,
University of Alberta, Edmonton, AB, 2015.
"""
def __init__(self, in_features: int, num_time_bins: int):
"""Initialises the module.
Parameters
----------
in_features
Number of input features.
num_time_bins
The number of bins to divide the time axis into.
"""
super().__init__()
if num_time_bins < 1:
raise ValueError("The number of time bins must be at least 1")
if in_features < 1:
raise ValueError("The number of input features must be at least 1")
self.in_features = in_features
self.num_time_bins = num_time_bins + 1 # + extra time bin [max_time, inf)
self.mtlr_weight = nn.Parameter(torch.Tensor(self.in_features,
self.num_time_bins - 1))
self.mtlr_bias = nn.Parameter(torch.Tensor(self.num_time_bins - 1))
# `G` is the coding matrix from [2]_ used for fast summation.
# When registered as buffer, it will be automatically
# moved to the correct device and stored in saved
# model state.
self.register_buffer(
"G",
torch.tril(
torch.ones(self.num_time_bins - 1,
self.num_time_bins,
requires_grad=True)))
self.reset_parameters()
def forward(self, x: torch.Tensor) -> torch.Tensor:
"""Performs a forward pass on a batch of examples.
Parameters
----------
x : torch.Tensor, shape (num_samples, num_features)
The input data.
Returns
-------
torch.Tensor, shape (num_samples, num_time_bins)
The predicted time logits.
"""
out = torch.matmul(x, self.mtlr_weight) + self.mtlr_bias
return torch.matmul(out, self.G)
def reset_parameters(self):
"""Resets the model parameters."""
nn.init.xavier_normal_(self.mtlr_weight)
nn.init.constant_(self.mtlr_bias, 0.)
def __repr__(self):
return (f"{self.__class__.__name__}(in_features={self.in_features},"
f" num_time_bins={self.num_time_bins})")
def masked_logsumexp(x: torch.Tensor,
mask: torch.Tensor,
dim: int = -1) -> torch.Tensor:
"""Computes logsumexp over elements of a tensor specified by a mask
in a numerically stable way.
Parameters
----------
x
The input tensor.
mask
A tensor with the same shape as `x` with 1s in positions that should
be used for logsumexp computation and 0s everywhere else.
dim
The dimension of `x` over which logsumexp is computed. Default -1 uses
the last dimension.
Returns
-------
torch.Tensor
Tensor containing the logsumexp of each row of `x` over `dim`.
"""
max_val, _ = (x * mask).max(dim=dim)
max_val = torch.clamp_min(max_val, 0)
return torch.log(
torch.sum(torch.exp(x - max_val.unsqueeze(dim)) * mask,
dim=dim)) + max_val
def mtlr_neg_log_likelihood(logits: torch.Tensor,
target: torch.Tensor,
model: torch.nn.Module,
C1: float,
average: bool = False) -> torch.Tensor:
"""Computes the negative log-likelihood of a batch of model predictions.
Parameters
----------
logits : torch.Tensor, shape (num_samples, num_time_bins)
Tensor with the time-logits (as returned by the MTLR module) for one
instance in each row.
target : torch.Tensor, shape (num_samples, num_time_bins)
Tensor with the encoded ground truth survival.
model
PyTorch Module with at least `MTLR` layer.
C1
The L2 regularization strength.
average
Whether to compute the average log likelihood instead of sum
(useful for minibatch training).
Returns
-------
torch.Tensor
The negative log likelihood.
"""
censored = target.sum(dim=1) > 1
nll_censored = masked_logsumexp(logits[censored], target[censored]).sum() if censored.any() else 0
nll_uncensored = (logits[~censored] * target[~censored]).sum() if (~censored).any() else 0
# the normalising constant
norm = torch.logsumexp(logits, dim=1).sum()
nll_total = -(nll_censored + nll_uncensored - norm)
if average:
nll_total = nll_total / target.size(0)
# L2 regularization
for k, v in model.named_parameters():
if "mtlr_weight" in k:
nll_total += C1/2 * torch.sum(v**2)
return nll_total
def mtlr_survival(logits: torch.Tensor) -> torch.Tensor:
"""Generates predicted survival curves from predicted logits.
Parameters
----------
logits
Tensor with the time-logits (as returned by the MTLR module) for one
instance in each row.
Returns
-------
torch.Tensor
The predicted survival curves for each row in `pred` at timepoints used
during training.
"""
# TODO: do not reallocate G in every call
G = torch.tril(torch.ones(logits.size(1),
logits.size(1))).to(logits.device)
density = torch.softmax(logits, dim=1)
return torch.matmul(density, G)
def mtlr_survival_at_times(logits: torch.Tensor,
train_times: Union[torch.Tensor, np.ndarray],
pred_times: np.ndarray) -> np.ndarray:
"""Generates predicted survival curves at arbitrary timepoints using linear
interpolation.
Notes
-----
This function uses scipy.interpolate internally and returns a Numpy array,
in contrast with `mtlr_survival`.
Parameters
----------
logits
Tensor with the time-logits (as returned by the MTLR module) for one
instance in each row.
train_times
Time bins used for model training. Must have the same length as the
first dimension of `pred`.
pred_times
Array of times used to compute the survival curve.
Returns
-------
np.ndarray
The survival curve for each row in `pred` at `pred_times`. The values
are linearly interpolated at timepoints not used for training.
"""
train_times = np.pad(train_times, (1, 0))
surv = mtlr_survival(logits).detach().cpu().numpy()
interpolator = interp1d(train_times, surv)
return interpolator(np.clip(pred_times, 0, train_times.max()))
def mtlr_hazard(logits: torch.Tensor) -> torch.Tensor:
"""Computes the hazard function from MTLR predictions.
The hazard function is the instantenous rate of failure, i.e. roughly
the risk of event at each time interval. It's computed using
`h(t) = f(t) / S(t)`,
where `f(t)` and `S(t)` are the density and survival functions at t,
respectively.
Parameters
----------
logits
The predicted logits as returned by the `MTLR` module.
Returns
-------
torch.Tensor
The hazard function at each time interval in `y_pred`.
"""
return torch.softmax(
logits, dim=1)[:, :-1] / (mtlr_survival(logits) + 1e-15)[:, 1:]
def mtlr_risk(logits: torch.Tensor) -> torch.Tensor:
"""Computes the overall risk of event from MTLR predictions.
The risk is computed as the time integral of the cumulative hazard,
as defined in [1]_.
Parameters
----------
logits
The predicted logits as returned by the `MTLR` module.
Returns
-------
torch.Tensor
The predicted overall risk.
"""
hazard = mtlr_hazard(logits)
return torch.sum(hazard.cumsum(1), dim=1)
| UTF-8 | Python | false | false | 8,592 | py | 7 | __init__.py | 4 | 0.603123 | 0.595432 | 0 | 262 | 31.755725 | 102 |
Lucas-Irvine/Particle-Cloud-Framework | 2,645,699,856,362 | 371cc0c589e691a3cb8c14a208a3ba32797e2e65 | b2e7c3c7c2d4fa89dc971a7a03799a42af35178a | /pcf/particle/aws/efs/efs_instance.py | b996e633176b910cb7a7f85abb89852448ba0eb3 | [
"Apache-2.0"
]
| permissive | https://github.com/Lucas-Irvine/Particle-Cloud-Framework | 9c990ba14225e60ab1e90ae9a52b833593d0e8df | 0f424126067f70c278b7b4b48f9d62adb7a735a4 | refs/heads/master | 2021-02-16T02:02:58.669138 | 2020-01-08T19:43:41 | 2020-01-08T19:43:41 | 244,955,816 | 0 | 0 | Apache-2.0 | true | 2020-03-04T16:58:54 | 2020-03-04T16:58:54 | 2020-01-08T19:43:44 | 2020-01-08T19:45:43 | 7,565 | 0 | 0 | 0 | null | false | false | # Copyright 2018 Capital One Services, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pcf.core import State
from pcf.util import pcf_util
from pcf.core.aws_resource import AWSResource
from pcf.core.pcf_exceptions import *
import logging
logger = logging.getLogger(__name__)
class EFSInstance(AWSResource):
"""
This is the implementation of Amazon's Elastic File System.
"""
flavor = "efs_instance"
state_lookup = {
"creating": State.pending,
"deleting": State.pending,
"available": State.running,
"deleted": State.terminated
}
START_PARAM_FILTER = {
"CreationToken",
"PerformanceMode",
"Encrypted",
"KmsKeyId",
}
RETURN_PARAM_FILTER = {
"OwnerId",
"CreationToken",
"FileSystemId",
"Name",
"PerformanceMode",
"Encrypted",
"KmsKeyId"
}
UNIQUE_KEYS = ["aws_resource.custom_config.instance_name"]
def __init__(self, particle_definition, session=None):
super().__init__(particle_definition, "efs", session=session)
self.instance_name = self.desired_state_definition.get('custom_config').get('instance_name')
self._set_unique_keys()
def _set_unique_keys(self):
"""
Logic that sets keys from state definition that are used to uniquely identify the S3 Bucket
"""
self.unique_keys = EFSInstance.UNIQUE_KEYS
def get_status(self):
"""
Gets current status of the EFS instance
Returns:
status (dict)
"""
try:
current_definition = self._describe_file_systems()
except NoResourceException:
logger.info("File System {} was not Found. State is terminated".format(self.instance_name))
return {"status": "missing"}
except TooManyResourceException as e:
raise e
return current_definition
def sync_state(self):
"""
Elastic File System implementation of sync state. Calls get status and sets the current state
"""
status_def = self.get_status()
if status_def.get("status") == "missing":
self.state = State.terminated
return
self.current_state_definition = status_def
self.state = self.state_lookup[self.current_state_definition["LifeCycleState"]]
def _terminate(self):
"""
Calls boto3 delete_file_system
Returns:
response of boto3 delete_file_system
"""
return self.client.delete_file_system(FileSystemId=self.current_state_definition['FileSystemId'])
def _start(self):
"""
Calls boto3 create_file_system https://boto3.readthedocs.io/en/latest/reference/services/efs.html#EFS.Client.create_file_system
Returns:
boto3 create_file_system() response
"""
new_desired_state_def, diff_dict = pcf_util.update_dict(
self.current_state_definition,
self.get_desired_state_definition()
)
create_definition = pcf_util.param_filter(new_desired_state_def, EFSInstance.START_PARAM_FILTER)
res = self.client.create_file_system(**create_definition)
self.client.create_tags(FileSystemId=res['FileSystemId'], Tags=[{'Key': 'Name', 'Value': self.instance_name}])
return res
def _stop(self):
"""
Elastic File System does not have a stopped state so calls terminate
"""
return self._terminate()
def _update(self):
if self.is_state_definition_equivalent() is False:
raise InvalidUpdateParamException()
def is_state_definition_equivalent(self):
"""
Compares the desired state and current state definitions.
Returns:
bool
"""
self.get_state()
desired_definition = pcf_util.param_filter(self.desired_state_definition, EFSInstance.RETURN_PARAM_FILTER)
new_desired_state_def, diff_dict = pcf_util.update_dict(self.current_state_definition, desired_definition)
return diff_dict == {}
def _describe_file_systems(self):
"""
Uses instance_name as a filter for boto3 describe_file_systems()
Returns:
boto3 describe_file_systems() response
"""
counter = 0
dict = {}
for fileSystem in self.client.describe_file_systems().get("FileSystems", []):
if fileSystem["Name"] == self.instance_name:
dict = fileSystem
counter += 1
if counter == 0:
raise NoResourceException
elif counter > 1:
raise TooManyResourceException
else:
return dict
def create_tags(self, fs_tags):
"""
Creates or overwrites tags associated with file system
Args:
fs_tags (list): Array of Tag objects to add, each object is a key-value pair
"""
self.client.create_tags(FileSystemId=self.current_state_definition['FileSystemId'], Tags=fs_tags)
def describe_tags(self):
"""
Returns the tags associated with the file system
Returns:
boto3 describe_tags() response
"""
return self.client.describe_tags(FileSystemId=self.current_state_definition['FileSystemId'])['Tags']
def delete_tags(self, tag_keys):
"""
Deletes the specified tags from a file system
Args:
tag_keys(list): List of tag keys to delete
"""
self.client.delete_tags(FileSystemId=self.current_state_definition['FileSystemId'], TagKeys=tag_keys)
def create_mount_target(self, subnet_id, **kwargs):
"""
Creates a mount target for a file system
Args:
subnet_id (string): ID of the subnet to add the mount target in
**kwargs: options for boto3 create_mount target
Returns:
response of boto3 create_mount_target
"""
return self.client.create_mount_target(FileSystemId=self.current_state_definition['FileSystemId'], SubnetId=subnet_id, **kwargs)
def delete_mount_target(self, mount_target_id):
"""
Deletes the specified mount target
Args:
mount_target_id (string): ID of the mount target to delete
Returns:
response of boto3 delete_mount_target
"""
return self.client.delete_mount_target(MountTargetId=mount_target_id)
def describe_mount_targets(self):
"""
Returns the descriptions of all the current mount targets, or a specific mount target, for the file system
Args:
mount_target_id(string): ID of the mount target that you want to have described
**kwargs: options for boto3 describe_mount_targets
Returns:
response of boto3 describe_mount_targets
"""
return self.client.describe_mount_targets(FileSystemId=self.current_state_definition['FileSystemId'])['MountTargets']
def modify_mount_target_security_groups(self, mount_target_id, security_groups):
"""
Modifies the set of security groups in effect for a mount target
Args:
mount_target_id(string): ID of the mount target whose security groups you want to modify
security_groups(list): Array of VPC security group IDs
Returns:
reponse of boto3 modify_mount_target_security_groups
"""
return self.client.modify_mount_target_security_groups(MountTargetId=mount_target_id, SecurityGroups=security_groups)
| UTF-8 | Python | false | false | 8,100 | py | 326 | efs_instance.py | 152 | 0.630123 | 0.62679 | 0 | 233 | 33.763948 | 136 |
wdehaes/focj_story2 | 6,554,120,100,014 | bdf5bd750ed7d5d5f67ad9b51fce75f681b0c464 | 74c50369a3bbc3eb94b53eed3965c0470d88735f | /mercari/scrape_bs4.py | 6ef7547cd6038809b3214fd7a3cdf6a9143dd9f0 | []
| no_license | https://github.com/wdehaes/focj_story2 | ecb517b083bfdef085f2a65c24ac40aeea49dbde | dbace6d4b7a6fcd14a4eb5058717b12d2ee6ff5b | refs/heads/master | 2020-09-05T05:35:36.637121 | 2019-11-06T13:06:55 | 2019-11-06T13:06:55 | 219,893,952 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from bs4 import BeautifulSoup
import requests
url = 'https://www.mercari.com/search/?categoryIds=22&facets=2&itemConditions=2-3-4-5&length=30&minPrice=20000'
html = requests.get(url).text
soup = BeautifulSoup(html, "lxml")
print(soup.prettify())
| UTF-8 | Python | false | false | 248 | py | 37 | scrape_bs4.py | 10 | 0.766129 | 0.705645 | 0 | 8 | 30 | 111 |
HarshithGP/Movie_Recommender_System | 6,356,551,618,388 | 326ea472f2690d8c4588081e4ab1b479d946da0a | 98a70a74ddb2d3143944594f66b3a758037448c5 | /Python scripts/make_recommendations.py | 3b8158091cba6a71aa9e119baa2896e1c1660973 | []
| no_license | https://github.com/HarshithGP/Movie_Recommender_System | e473a5d361da46e709f7599dc673850ed083f169 | c6c38e85eb192085fb4d56925e58a7e14ef0cc1f | refs/heads/master | 2021-01-19T16:33:50.543053 | 2018-03-02T20:16:17 | 2018-03-02T20:16:17 | 101,005,324 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import numpy as np
import pandas as pd
import matrix_factorization_utilities
# Load user ratings
raw_dataset_df = pd.read_csv('movie_ratings_data_set.csv')
# Load movie titles
movies_df = pd.read_csv('movies.csv', index_col='movie_id')
# Convert the running list of user ratings into a matrix
ratings_df = pd.pivot_table(raw_dataset_df, index='user_id',
columns='movie_id',
aggfunc=np.max)
# Apply matrix factorization to find the latent features
U, M = matrix_factorization_utilities.low_rank_matrix_factorization(ratings_df.as_matrix(),
num_features=15,
regularization_amount=0.1)
# Find all predicted ratings by multiplying U and M matrices
predicted_ratings = np.matmul(U, M)
print("Enter a user_id to get recommendations (Between 1 and 100):")
user_id_to_search = int(input())
print("Movies previously reviewed by user_id {}:".format(user_id_to_search))
reviewed_movies_df = raw_dataset_df[raw_dataset_df['user_id'] == user_id_to_search]
reviewed_movies_df = reviewed_movies_df.join(movies_df, on='movie_id')
print(reviewed_movies_df[['title', 'genre', 'value']])
input("Press enter to continue.")
print("Movies we will recommend:")
user_ratings = predicted_ratings[user_id_to_search - 1]
movies_df['rating'] = user_ratings
already_reviewed = reviewed_movies_df['movie_id']
recommended_df = movies_df[movies_df.index.isin(already_reviewed) == False]
recommended_df = recommended_df.sort_values(by=['rating'], ascending=False)
print(recommended_df[['title', 'genre', 'rating']].head(5))
"""
import pickle
import pandas as pd
# Load prediction rules from data files
U = pickle.load(open("user_features.dat", "rb"))
M = pickle.load(open("product_features.dat", "rb"))
predicted_ratings = pickle.load(open("predicted_ratings.dat", "rb"))
# Load movie titles
movies_df = pd.read_csv('movies.csv', index_col='movie_id')
print("Enter a user_id to get recommendations (Between 1 and 100):")
user_id_to_search = int(input())
print("Movies we will recommend:")
user_ratings = predicted_ratings[user_id_to_search - 1]
movies_df['rating'] = user_ratings
movies_df = movies_df.sort_values(by=['rating'], ascending=False)
print(movies_df[['title', 'genre', 'rating']].head(5))
"""
| UTF-8 | Python | false | false | 2,456 | py | 11 | make_recommendations.py | 8 | 0.649837 | 0.643322 | 0 | 69 | 33.565217 | 94 |
Herschdorfer/python-mqtt-home | 17,901,423,709,311 | dbde9646484133c7e6b974c0afd697734c35996d | eb5e67f1b9d68972310d4647fc5c90c64bf2db42 | /mqtt-dht22.py | f896c7941afe1b23b233fc7c8705ba6ef6b5aae0 | []
| no_license | https://github.com/Herschdorfer/python-mqtt-home | c94fac644b3d25141b80c19faa986ac6558cc9b7 | a3401454059104d196b47110fef6ee014125f5d4 | refs/heads/master | 2023-05-13T14:23:53.467477 | 2021-05-22T13:42:04 | 2021-05-22T13:42:04 | 112,947,659 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import os
import time
import sys
import Adafruit_DHT
import paho.mqtt.client as mqtt
import json
#sensor config
sensor = Adafruit_DHT.DHT22
pin = 22
sensor_data = {'temperature': 0, 'humidity': 0}
#openhab config
OPENHAB = 'localhost'
# Data capture and upload interval in seconds. Less interval will eventually hang the DHT22.
INTERVAL=60
next_reading = time.time()
client = mqtt.Client()
# Connect to ThingsBoard using default MQTT port and 60 seconds keepalive interval
client.connect(OPENHAB, 1883, 60)
client.loop_start()
try:
while True:
humidity, temperature = Adafruit_DHT.read_retry(sensor, pin)
humidity = round(humidity, 3)
temperature = round(temperature, 3)
sensor_data['temperature'] = temperature
sensor_data['humidity'] = humidity
# Sending humidity and temperature data to ThingsBoard
client.publish('workingroom/sensors/temperature2', temperature , 1)
client.publish('workingroom/sensors/humidity', humidity , 1)
next_reading += INTERVAL
sleep_time = next_reading-time.time()
if sleep_time > 0:
time.sleep(sleep_time)
except KeyboardInterrupt:
pass
client.loop_stop()
client.disconnect()
| UTF-8 | Python | false | false | 1,147 | py | 6 | mqtt-dht22.py | 3 | 0.749782 | 0.728858 | 0 | 49 | 22.387755 | 92 |
xrl12/dbz | 12,386,685,709,616 | 83249cf1db801979231f04a0b6a28fc5345be6b2 | e0aae53312677fe4f304769dd3b53bfed7b997c3 | /blog/tests.py | c149ee687466bdc2905e1e987dc49d040a794671 | []
| no_license | https://github.com/xrl12/dbz | bf3ab8ccc4fb907bb479260384f6edccde10bb05 | 7e5bfd7096fa5b9b50b4574f8a624c2893914fc7 | refs/heads/master | 2022-08-27T13:06:09.266177 | 2022-08-20T18:42:36 | 2022-08-20T18:42:36 | 232,839,626 | 0 | 0 | null | false | 2022-01-13T02:15:09 | 2020-01-09T15:25:41 | 2020-02-19T02:26:33 | 2022-01-13T02:15:06 | 4,451 | 0 | 0 | 2 | JavaScript | false | false | import datetime
# from dateutil.parser import parse
from django.test import TestCase
# Create your tests here.
# print(datetime.datetime.now())
# create_time = parse('2019-12-29/00:00:00')
# now_time = datetime.datetime.now()
# (now_time-create_time).days
# import datetime
# start = datetime.datetime.strptime('2019-12-29 00:00:00', "%Y-%m-%d %H:%M:%S")
# end = (datetime.datetime.now())
# print((end - start).days)
import re
# def check_pwd(password):
# result = r'^[\d|a-z|A-Z][\d|a-z|A-Z|@]{5,19}$'
# abc = re.match(result,password)
# print(abc.group())
#
#
# check_pwd('234566fgasdfghqertgys')
def check_phone(phone):
result = r'^1[13456789]\d{9}$'
phone = re.match(result,phone)
print('123456')
if phone:
print('123')
return phone
else:
print('456')
print('请您输入正确的手机号,我们没有时间和你在这玩')
check_phone('123456') | UTF-8 | Python | false | false | 963 | py | 13 | tests.py | 8 | 0.603917 | 0.533188 | 0 | 39 | 21.615385 | 80 |
harithlaxman/Load-Balancer | 18,322,330,509,352 | b80937181b60fccee64368f69224d40503eb2b8b | f6c628348b507751dc5f3588413043390c981659 | /request.py | e5299ccf23efd5cb87dd48f4fa50cc24704ec79c | []
| no_license | https://github.com/harithlaxman/Load-Balancer | bea38084f0041b2916880817e82664d2059733a5 | 45eaaf7b75691efbf9a45b008bf53453600ae034 | refs/heads/master | 2022-05-29T17:06:47.830093 | 2020-04-30T15:29:21 | 2020-04-30T15:29:21 | 260,243,474 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import requests
import random
name = ["cpu1", "cpu2", "cpu3", "cpu4"]
rname = name[random.randint(0,3)]
rcreq = random.randint(0,20)
rmreq = random.randint(0,100)
rtreq = random.randint(0,60)
param = {'name': rname, 'creq': rcreq, 'mreq':rmreq, 'treq': rtreq}
stuff = requests.post("http://localhost/hello/dbh.php", param)
| UTF-8 | Python | false | false | 332 | py | 3 | request.py | 2 | 0.665663 | 0.61747 | 0 | 9 | 34.888889 | 67 |
paulthomas2107/DjangoForBeginners | 19,318,762,906,252 | dde20b9cc9ed9afea1eb032162f87f009b93a88b | 1612bb7cdbde86a7358a443e6f0485bfa90f8fa7 | /TestProject/TestProject/NewsApp/urls.py | 60cb3e2cd446a47814456db1fd0f345ef806f71f | []
| no_license | https://github.com/paulthomas2107/DjangoForBeginners | fb86a9a153bf982912288f6d8d4d07b46319212d | b05c623e066a9b1f32f547a904ee34a858b57010 | refs/heads/main | 2023-03-21T17:45:52.875191 | 2021-03-19T19:09:09 | 2021-03-19T19:09:09 | 347,662,519 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from django.urls import path
from .views import Home, Contact, register, addUser, modelform, addModalForm, Home2
urlpatterns = [
# path('news/<int:year>', News, name='News'),
path('home/', Home, name='Home'),
path('home2/', Home2, name='Home2'),
path('contact/', Contact, name='Contact'),
path('signup/', register, name='register'),
path('addUser/', addUser, name='addUser'),
path('modalform/', modelform, name='form'),
path('addmodalform/', addModalForm, name='modalform'),
]
| UTF-8 | Python | false | false | 513 | py | 18 | urls.py | 13 | 0.645224 | 0.637427 | 0 | 16 | 31.0625 | 83 |
150801116/python_100 | 11,802,570,141,332 | 087713e4f3316efa0d3abcdfbb3d1cea7c27d163 | 66b714a90f71a03d2224b9309b4a8717696c02ce | /python_100/实例031:字母识词.py | e144536bdce9f84c69598530c2113e0b3155d1c2 | []
| no_license | https://github.com/150801116/python_100 | 20a9b8a42b168fb08675733541f5aaf1e540a3d3 | ff9aa130d946affc9b43ad702997bb0c7a3aa359 | refs/heads/master | 2023-06-11T04:59:07.786652 | 2021-07-07T13:27:28 | 2021-07-07T13:27:28 | 348,686,584 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #题目 请输入星期几的第一个字母来判断一下是星期几,如果第一个字母一样,则继续判断第二个字母。
#首先百度一下周一到周日的英文:Monday,Tuesday,Wednesday,Thursday,Friday,Saturday,Sunday
print("请输入星期几的第一个字母(区分大小写):",end="")
str = input()
if str =="M":
print("Monday")
elif str == "W":
print("Wednesday")
elif str == "F":
print("Friday")
elif str =="T":
print("请输入星期几的第二个字母(区分大小写):", end="")
str2 = input()
if str2 =="u":
print("Tuesday")
elif str2 =="h":
print("Thursday")
elif str[0] =="S":
print("请输入星期几的第二个字母(区分大小写):", end="")
str2 = input()
if str2 == "a":
print("Saturday")
elif str2 == "u":
print("Sunday")
#参考答案,很明显答案用字典显得更简洁!
weekT={'h':'thursday',
'u':'tuesday'}
weekS={'a':'saturday',
'u':'sunday'}
week={'t':weekT,
's':weekS,
'm':'monday',
'w':'wensday',
'f':'friday'}
a=week[str(input('请输入第一位字母:')).lower()]
if a==weekT or a==weekS:
print(a[str(input('请输入第二位字母:')).lower()])
else:
print(a) | UTF-8 | Python | false | false | 1,270 | py | 86 | 实例031:字母识词.py | 85 | 0.563674 | 0.556367 | 0 | 40 | 22.975 | 72 |
WangGodder/FutureLab_ImageClassify | 498,216,239,593 | a00852cbb5401647283ee02d20e5cadd2e0da356 | 9fb101a6c494c9b2cf8a119d6fe6ef92aef66c36 | /Classy/datasets/MyTestDataset.py | dcadc047e06a1ab0a6bad7ff3a9c02b8720d00d1 | []
| no_license | https://github.com/WangGodder/FutureLab_ImageClassify | 2e183fbb955798076d06b955bc28cfe57cc4b2e9 | 002300c97a080eab00b2930929021900c43664f2 | refs/heads/master | 2020-03-20T23:42:27.929619 | 2018-06-19T09:00:11 | 2018-06-19T09:00:11 | 137,859,261 | 3 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import torch.utils.data as data
from PIL import Image
import os
import os.path
import csv
def has_file_allowed_extension(filename, extensions):
"""Checks if a file is an allowed extension.
Args:
filename (string): path to a file
Returns:
bool: True if the filename ends with a known image extension
"""
filename_lower = filename.lower()
return any(filename_lower.endswith(ext) for ext in extensions)
def make_dataset(dir, extensions):
images = []
dir = os.path.expanduser(dir)
# for root, _, fnames in sorted(os.walk(dir)):
for root, _, fnames in os.walk(dir):
# for fname in sorted(fnames):
for fname in fnames:
if has_file_allowed_extension(fname, extensions):
path = os.path.join(root, fname)
item = path
images.append(item)
return images
def read_list(LIST_PATH):
if os.path.exists(LIST_PATH):
list = []
with open(LIST_PATH, 'r') as csvfile:
rows = csv.reader(csvfile)
for row in rows:
if row[0] !='FILE_ID':
list.append(row[0])
return list
else:
print('No {} file'.format(LIST_PATH))
IMG_EXTENSIONS = ['.jpg', '.jpeg', '.png', '.ppm', '.bmp', '.pgm', '.tif']
def default_loader(path):
from torchvision import get_image_backend
if get_image_backend() == 'accimage':
return accimage_loader(path)
else:
return pil_loader(path)
def check_csv_image(list, data_root):
n = 0
error = 0
for name in list:
if any(os.path.exists(os.path.join(data_root, name + e)) for e in IMG_EXTENSIONS):
n = n +1
else:
error = error + 1
print('filename : {} is not exist'.format(name))
if len(list) == n:
print('Total check {} files, and all exist\n\r'.format(n))
return True
else:
print('Total check {} files, and error {} files\n\r'.format(n, error))
return False
class MyTestDataset(data.Dataset):
def __init__(self, data_root, csv_path, loader=default_loader, extensions=IMG_EXTENSIONS, transform=None, target_transform=None):
list = read_list(csv_path)
samples = make_dataset(data_root, extensions)
if check_csv_image(list, data_root) and len(list)==len(samples):
self.data_root = data_root
self.loader = loader
self.extensions = extensions
self.samples = samples
self.transform = transform
self.target_transform = target_transform
else:
print('Can not create database !')
def __getitem__(self, index):
"""
Args:
index (int): Index
Returns:
tuple: (sample, target) where target is class_index of the target class.
"""
path = self.samples[index]
sample = self.loader(path)
if self.transform is not None:
sample = self.transform(sample)
return sample, path
def __len__(self):
return len(self.samples)
def __repr__(self):
fmt_str = 'Dataset ' + self.__class__.__name__ + '\n'
fmt_str += ' Number of datapoints: {}\n'.format(self.__len__())
fmt_str += ' Root Location: {}\n'.format(self.root)
tmp = ' Transforms (if any): '
fmt_str += '{0}{1}\n'.format(tmp, self.transform.__repr__().replace('\n', '\n' + ' ' * len(tmp)))
tmp = ' Target Transforms (if any): '
fmt_str += '{0}{1}'.format(tmp, self.target_transform.__repr__().replace('\n', '\n' + ' ' * len(tmp)))
return fmt_str
def pil_loader(path):
# open path as file to avoid ResourceWarning (https://github.com/python-pillow/Pillow/issues/835)
with open(path, 'rb') as f:
img = Image.open(f)
return img.convert('RGB')
def accimage_loader(path):
import accimage
try:
return accimage.Image(path)
except IOError:
# Potentially a decoding problem, fall back to PIL.Image
return pil_loader(path)
| UTF-8 | Python | false | false | 4,207 | py | 15 | MyTestDataset.py | 13 | 0.557642 | 0.554552 | 0 | 121 | 32.752066 | 133 |
sanskrit/ambuda | 19,413,252,190,717 | 877bc242caf89c02abead8e95dd3fccfa4502e32 | 2ac808781ac59fcb5539d351e29f55e5ef967fc7 | /test/ambuda/views/test_site.py | 0008f061cfa8f80b5c2e164ef224dd3fd5214d4e | [
"MIT"
]
| permissive | https://github.com/sanskrit/ambuda | b2b105c8b92f33d0190582e70002ba011c3312aa | 1962e83b8e90a4ee0ecbc502e709c20d094932d6 | refs/heads/main | 2022-07-09T03:54:13.162529 | 2022-07-09T02:49:00 | 2022-07-09T02:49:01 | 11,392,229 | 23 | 8 | null | null | null | null | null | null | null | null | null | null | null | null | null | def test_index(client):
resp = client.get("/")
assert "complete archive" in resp.text
def test_about(client):
resp = client.get("/about/")
assert "<h1>About</h1>" in resp.text
def test_404(client):
resp = client.get("/unknown-page/")
assert "<h1>Not Found" in resp.text
assert resp.status_code == 404
| UTF-8 | Python | false | false | 333 | py | 122 | test_site.py | 70 | 0.636637 | 0.60961 | 0 | 14 | 22.785714 | 42 |
liran05/Algorithm | 19,069,654,801,292 | a055cbad3487a68d616baf8b60c20ca45634eb87 | d0f6211a1c771f9e4fac70f04a9c88dc8057f17a | /pattern_match/trie.py | 552328390147edda7654c01556af95e963c7389a | []
| no_license | https://github.com/liran05/Algorithm | f38d036dabf017280c70a33809c8da5a7648656f | cf859df0395b7edbe654ab1b6f360776ac02a5c7 | refs/heads/master | 2020-06-26T06:12:11.062384 | 2019-07-30T02:11:19 | 2019-07-30T02:11:19 | 199,556,774 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/env python
#-*- coding: utf-8 -*-
import sys
import pdb
reload(sys)
sys.setdefaultencoding('utf-8')
class TrieNode(object):
"""Trie节点
Attributes:
_val: 本节点的值(非None即作为结束判断条件)
_next: 后继节点
"""
def __init__(self, value=None):
self._val = value
self._next = {}
def set_value(self, value=None):
"""为当前节点设置值
"""
self._val = value
def get_value(self):
"""获取当前节点的值
"""
return self._val
def set_next(self, key, value=None):
"""为当前节点添加一个后继节点
"""
if key not in self._next:
self._next[key] = TrieNode(value)
return self._next[key]
def get_next(self, key):
"""从当前节点获取指定的后继节点
"""
if key not in self._next:
return None
return self._next[key]
class Trie(object):
"""Trie树
Attribures:
_root: 根节点
"""
def __init__(self):
# 生成root节点
self._root = TrieNode()
def insert(self, word):
"""将一个单词插入trie树
"""
curr = self._root
for char in word:
curr = curr.set_next(char)
curr.set_value(True)
def search(self, word):
"""检索一个单词是否trie树中存在
"""
curr = self._root
ret = False
for i, c in enumerate(word):
curr = curr.get_next(c)
if curr is None:
break
if i + 1 == len(word) and curr.get_value() is True:
ret = True
break
return ret
def startsWith(self, prefix):
"""检索trie树中是否有prefix开头的单词
"""
curr = self._root
ret = True
for c in prefix:
curr = curr.get_next(c)
if curr is None:
ret = False
break
return ret
def main():
trie = Trie()
trie.insert("app")
trie.insert("apple")
print trie.search("app")
if __name__ == '__main__':
main()
| UTF-8 | Python | false | false | 2,203 | py | 4 | trie.py | 4 | 0.480663 | 0.479156 | 0 | 101 | 18.712871 | 63 |
Nacho-sh/Minesweeper-AI | 8,667,244,040,501 | f143ee1293751b062abe673a8dd51b5ba667af85 | d52da84a320058403ec8c9f82f5b218e19f61a1a | /click.py | 01f270ef3bbb3c7e8ee32326f93eeaebe24f3cae | []
| no_license | https://github.com/Nacho-sh/Minesweeper-AI | cab983c241ca79d02547b51b05eb628d8045ad0b | 4a7ae79cd9c1c37867a11dcea0d59b30b6250c0c | refs/heads/main | 2023-07-04T21:57:06.910885 | 2021-08-15T14:14:03 | 2021-08-15T14:14:03 | 396,373,895 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import pyautogui as gui
def click(position, board, length, button='left'):
gui.click(board[0] + 10 + position % length * 20, board[1] + position // length * 20 + 10, button=button)
| UTF-8 | Python | false | false | 192 | py | 4 | click.py | 4 | 0.651042 | 0.598958 | 0 | 5 | 36.4 | 109 |
kayartaya-vinod/2017_08_PHILIPS_PYTHON | 2,147,483,681,618 | 598f30422edb5efa30431dc3f24fecfcbe42ae6a | 95bab4037a2c56ead3750e4cdcd2f3a24b410e2e | /Examples/userexceptions.py | 7dd2d37665fab921a595f42cbcde7df6c9e711f5 | []
| no_license | https://github.com/kayartaya-vinod/2017_08_PHILIPS_PYTHON | d6769d7b6f2566e2d01a635656cd1ca686d058bc | ebddd588fcb9c30f1724f0a547f3e39e242e76b8 | refs/heads/master | 2020-03-18T12:29:19.393363 | 2019-02-06T08:04:46 | 2019-02-06T08:04:46 | 134,728,970 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # this and every file with .py is known as a module
# A module may comprise: variables, functions, classes, or some executable code
# variables, functions and classes from this module can be used in other modules
class InvalidAgeException(Exception):
def __init__(self, message = "Invalid age. Must be a number between 1 and 120"):
self.__message = message
def __str__(self):
return self.__message
class InvalidNameException(Exception):
def __init__(self, message = "Invalid name. Must be a string"):
self.__message = message
def __str__(self):
return self.__message
| UTF-8 | Python | false | false | 588 | py | 29 | userexceptions.py | 27 | 0.714286 | 0.707483 | 0 | 19 | 29.894737 | 81 |
edureka-demo-aj/edureka-aws-lambda | 12,455,405,161,509 | cfaa2a87c11923c6f354476cffd62b9802f94014 | e7cf0b5f5cd05792d662f5f919094e42b42c764b | /demo-1/web-app/upload_to_s3.py | 90f24d943d6f4a2e6580c7b9b975a285ba628487 | []
| no_license | https://github.com/edureka-demo-aj/edureka-aws-lambda | 0033e3e1cdc0b8ca82f33bcda4dd9b5eebc0f022 | 392d704e53376a805bfd72a1ccae381c5f52c0df | refs/heads/main | 2023-02-06T16:59:50.273863 | 2020-12-30T09:41:42 | 2020-12-30T09:41:42 | 322,752,803 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import logging
import boto3
from botocore.exceptions import ClientError
get_last_modified = lambda obj: int(obj['LastModified'].strftime('%s'))
def upload_file(file_name, bucket, object_name=None):
"""Upload a file to an S3 bucket
:param file_name: File to upload
:param bucket: Bucket to upload to
:param object_name: S3 object name. If not specified then file_name is used
:return: True if file was uploaded, else False
"""
# If S3 object_name was not specified, use file_name
if object_name is None:
object_name = file_name
# Upload the file
s3_client = boto3.client('s3',
aws_access_key_id="YOUR ACCESS ID",
aws_secret_access_key="SECRET KEY",
)
try:
response = s3_client.upload_file(file_name, bucket, object_name)
objs = s3_client.list_objects_v2(Bucket='lambdademo-1')['Contents']
last_added = [obj['Key'] for obj in sorted(objs, key=get_last_modified)][-1]
print("LAST FILE : ", last_added)
except ClientError as e:
logging.error(e)
return False
return True
if __name__ == "__main__":
upload_file("image-2.png", "lambdademo-1")
| UTF-8 | Python | false | false | 1,239 | py | 8 | upload_to_s3.py | 2 | 0.612591 | 0.601291 | 0 | 34 | 35.411765 | 84 |
bopopescu/Aquila | 14,130,442,422,356 | d17d7ea4e7bc0962a42ff66f0cc17f5f50192001 | 495ec27dc747f3f2d6094a7792fbcbd4ec8d6007 | /cmdb/forms.py | 94c60ab85cd6170d8df82f0de1b3f4073d57c59e | []
| no_license | https://github.com/bopopescu/Aquila | 684ada4a039906d914f46c91e9e87222c10f2888 | 67a0ae68c9080b2952ef31d9570c3a0358d7deda | refs/heads/master | 2022-11-04T21:37:24.163391 | 2017-07-20T07:39:37 | 2017-07-20T07:39:37 | 281,633,709 | 0 | 0 | null | true | 2020-07-22T09:27:49 | 2020-07-22T09:27:48 | 2020-06-06T02:56:07 | 2017-07-20T07:39:37 | 1,738 | 0 | 0 | 0 | null | false | false | from django import forms
from django.forms import widgets
from django.forms import fields
from cmdb import models as cmdb_models
class HostAppend(forms.Form):
host_id = fields.CharField(widget=widgets.TextInput(attrs={'style': 'display: none;'}),
required=False)
host_ip = fields.GenericIPAddressField(error_messages={'required': '主机地址不能为空', 'invalid': 'IP地址不合法'},
protocol='ipv4',
label='主机地址',
widget=widgets.TextInput(attrs={'class': 'form-control'}),
strip=True
)
host_user = fields.CharField(label='主机用户',
widget=widgets.TextInput(attrs={'class': 'form-control'}),
strip=True)
host_pass = fields.CharField(label='主机密码',
widget=widgets.TextInput(attrs={'class': 'form-control'}),
strip=True)
host_port = fields.CharField(label='主机端口',
initial=22,
widget=widgets.TextInput(attrs={'class': 'form-control'}),
error_messages={'required': 'host_port: 请输入有效端口号'},
strip=True)
app_type = fields.CharField(
widget=widgets.Select(choices=[]),
label='应用类型',
error_messages={'required': '应用类型不能为空'},
strip=True
)
host_group = fields.CharField(
widget=widgets.Select(choices=[]),
label='主机组',
error_messages={'required': '主机组不能为空'},
strip=True
)
app_user = fields.CharField(required=False,
label='应用用户',
widget=widgets.TextInput(attrs={'class': 'form-control'}),
strip=True)
app_pass = fields.CharField(required=False,
label='应用密码',
widget=widgets.TextInput(attrs={'class': 'form-control'}),
strip=True)
app_port = fields.CharField(required=False,
label='应用端口',
widget=widgets.TextInput(attrs={'class': 'form-control'}),
error_messages={'required': 'app_port: 请输入有效端口号'},
strip=True)
def __init__(self, *args, **kwargs):
super(HostAppend, self).__init__(*args, **kwargs)
self.fields['host_group'].widget.choices = cmdb_models.HostGroup.objects.values_list('id', 'host_group_jd')
self.fields['app_type'].widget.choices = cmdb_models.AppType.objects.values_list('id', 'app_name')
| UTF-8 | Python | false | false | 3,004 | py | 36 | forms.py | 22 | 0.484188 | 0.483134 | 0 | 59 | 47.237288 | 115 |
rishabhjaincoder/Python-Demos | 18,683,107,772,462 | 50ffe54043e047e80640e32226a21c9cbf196a0a | ec4c311f6b16558602caed62666c2000b3a56f79 | /19-Misc/tkinter-db-2.py | bca657f702a9a1f91977acd728a4a889c4b88e36 | []
| no_license | https://github.com/rishabhjaincoder/Python-Demos | 72fb3e042fb22c4fe83554d439357c88e613642d | c81054b55ab46b018daf22ac5c9240810ab5ff81 | refs/heads/master | 2020-03-24T19:19:25.337903 | 2019-07-13T17:40:26 | 2019-07-13T17:40:26 | 142,919,304 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from tkinter import *
import MySQLdb
#Read Data
con=MySQLdb.connect('localhost',user='root',password='')
con.select_db('b11')
cur = con.cursor()
cur.execute("SELECT * FROM student")
records=cur.fetchall()
con.close()
win=Tk()
win.geometry('250x200+100+200')
win.title('Records')
rollLabel = Label(win, text="Roll", width=10)
rollLabel.grid(row=0, column=0)
nameLabel = Label(win, text="Name", width=10)
nameLabel.grid(row=0, column=1)
courseLabel = Label(win, text="Course", width=10)
courseLabel.grid(row=0, column=2)
feesLabel = Label(win, text="Fees", width=10)
feesLabel.grid(row=0, column=3)
durationLabel = Label(win, text="Duration", width=10)
durationLabel.grid(row=0, column=4)
row=1
for record in records:
Label(win, text=record[0]).grid(row=row, column=0)
Label(win, text=record[1]).grid(row=row, column=1)
Label(win, text=record[2]).grid(row=row, column=2)
Label(win, text=record[3]).grid(row=row, column=3)
Label(win, text=record[4]).grid(row=row, column=4)
row=row+1
| UTF-8 | Python | false | false | 1,022 | py | 10 | tkinter-db-2.py | 5 | 0.694716 | 0.649706 | 0 | 39 | 25.102564 | 56 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.