repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
---|---|---|---|---|---|---|---|---|
niorehkids/firmanal
|
analyze.py
|
Python
|
mit
| 6,828 | 0.006737 |
#!/usr/bin/env python
import argparse
import sys
import re
import os
import locale
import subprocess
from multiprocessing import Process
def dbquery(query):
import psycopg2
db = psycopg2.connect(dbname = "firmware", user = "firmadyne", password = "firmadyne", host = "127.0.0.1")
ret = None
try:
cur = db.cursor()
cur.execute(query)
except BaseException:
traceback.print_exc()
finally:
if cur:
ret = cur.fetchall()
cur.close()
return ret
def source(iid):
# source code analysis
script = os.getcwd() + '/analysis/source.sh'
p = subprocess.run([script, str(iid)], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
print(p.stdout.decode())
print(p.stderr.decode())
# calculate the score of security
resultdir = os.getcwd() + '/results/' + str(iid) + '/source'
firmware_score = 0
for (rootdir, dirs, files) in os.walk(resultdir):
for outfile in files:
if outfile.endswith('.dec.c.out'):
file_score = 0
# calculate the score of this file
for line in open(rootdir + '/' + outfile, "r"):
line = line.strip()
if re.search('Hits/KSLOC@level\+', line):
sp = line.split()
file_score += float(sp[3])
file_score += float(sp[5])
file_score += float(sp[7])
file_score += float(sp[9])
file_score += float(sp[11])
file_score += float(sp[13])
# file_score transition function
file_score = 10 - 600 / (file_score + 60)
# store the file_score information in the database
#print(rootdir + '/' + outfile + ": " + str(file_score))
firmware_score += file_score
# firmware_score transition function
firmware_score = 10 - 500 / (firmware_score + 50)
# store the firmware_score information in the database
#print(str(iid) + ": " + firmware_score)
def angr(iid):
print('warning: the Angr function is under development')
# TODO
def afl(iid):
sys.path.append('./analysis')
import afl
resultdir = os.getcwd() + '/results/' + iid + '/afl'
afl.process(iid, resultdir)
def netafl(iid, ip):
resultdir = os.getcwd() + '/results/' + iid + '/netafl'
script = os.getcwd() + '/analysis/netafl.py'
print('warning: the network AFL function is under development')
# TODO
def metasploit(iid, ip):
sys.path.append('./analysis/metasploit')
import runExploits
exploits = list (runExploits.METASPLOIT_EXPLOITS.keys()) + list (runExploits.SHELL_EXPLOITS.keys())
resultdir = os.getcwd() + '/results/' + iid + '/metasploit'
if not os.path.isdir(resultdir):
if os.path.exists(resultdir):
os.remove(resultdir)
os.makedirs(resultdir, 0o755)
outfile = resultdir + "/%(exploit)s.log"
runExploits.process(ip, exploits, outfile)
def extract(input_file):
sys.path.append('./scripts')
import extractor
e = extractor.Extractor(input_file, 'images', True, False, False, '127.0.0.1', None)
ocwd = os.getcwd()
(iid, repeated) = e.extract()
os.chdir(ocwd)
return (iid, repeated)
def importdb(iid):
sys.path.append('./db')
import importdb
image = './images/' + str(iid) + '.tar.gz'
importdb.getarch(image)
importdb.process(iid, image)
def makeimage(iid):
p = subprocess.run(['sudo', './qemu/scripts/makeImage.sh', str(iid)], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
print(p.stdout.decode())
print(p.stderr.decode())
def infernetwork(iid):
p = subprocess.run(['./qemu/scripts/inferNetwork.sh', str(iid)], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
print(p.stdout.decode())
print(p.stderr.decode())
def getIP(iid):
ip = None
import psycopg2
db = psycopg2.connect(d
|
bname = "firmware", user = "firmadyne", password = "firmadyne", host = "127.0.0.1")
try:
cur = db.cursor()
cur.execute("SELECT ip FROM image WHERE id=" + iid)
except BaseException:
traceback.print_exc(
|
)
finally:
if cur:
ip = cur.fetchone()[0]
cur.close()
return ip
def rootfs_extracted(iid):
query = 'select rootfs_extracted from image where id=' + iid + ';'
return dbquery(query)[0][0]
def main():
os.chdir(os.path.dirname(os.path.realpath(__file__)))
parser = argparse.ArgumentParser(description="Linux-based firmware analysis")
parser.add_argument("input_file", action="store", help="Input firmware image")
parser.add_argument("-i", dest="id", action="store",
default=None, help="firmware ID")
parser.add_argument("-s", dest="source", action="store_true",
default=False, help="Enable source code analysis")
parser.add_argument("-a", dest="angr", action="store_true",
default=False, help="Enable static analysis with Angr")
parser.add_argument("-f", dest="afl", action="store_true",
default=False, help="Fuzzing the firmware binaries with AFL")
parser.add_argument("-n", dest="netafl", action="store_true",
default=False, help="Fuzzing the network services with AFL")
parser.add_argument("-m", dest="metasploit", action="store_true",
default=False, help="Penetration test with metasploit exploits")
arg = parser.parse_args()
(iid, repeated) = extract(arg.input_file)
if arg.id != None and iid != arg.id:
print('error: frontend firmware ID and backend image ID conflict')
sys.exit(1)
if not rootfs_extracted(iid):
print('error: cannot find rootfs')
sys.exit(1)
# importdb
if not repeated:
importdb(iid)
if arg.source:
iid = arg.id
s = Process(target=source, args=(iid,))
s.start()
# makeImage, inferNetwork
if not repeated:
makeimage(iid)
infernetwork(iid)
ip = getIP(iid)
if not ip:
print('warning: no interface detected')
if arg.angr:
a = Process(target=angr, args=(iid,))
a.start()
if arg.afl:
f = Process(target=afl, args=(iid,))
f.start()
if arg.netafl and ip:
n = Process(target=netafl, args=(iid, ip))
n.start()
if arg.metasploit and ip:
m = Process(target=metasploit, args=(iid, ip))
m.start()
# join
if arg.source:
s.join()
if arg.angr:
a.join()
if arg.afl:
f.join()
if arg.netafl and ip:
n.join()
if arg.metasploit and ip:
m.join()
if __name__ == '__main__':
main ()
|
skulumani/asteroid_dumbbell
|
blender_sim.py
|
Python
|
gpl-3.0
| 26,548 | 0.007571 |
"""Simulation of controlled dumbbell around Itokawa with
simulated imagery using Blender
This will generate the imagery of Itokawa from a spacecraft following
a vertical descent onto the surface.
4 August 2017 - Shankar Kulumani
"""
from __future__ import absolute_import, division, print_function, unicode_literals
from scipy import integrate
import numpy as np
import pdb
import h5py, cv2
import visualization.plotting as plotting
from visualization import blender_camera
from dynamics import asteroid, dumbbell, controller, eoms
from kinematics import attitude
from visualization import blender
import inertial_driver as idriver
import relative_driver as rdriver
import datetime
def eoms_controlled_blender(t, state, dum, ast):
"""Inertial dumbbell equations of motion about an asteroid
This method must be used with the scipy.integrate.ode class instead of the
more convienent scipy.integrate.odeint. In addition, we can control the
dumbbell given full state feedback. Blender is used to generate imagery
during the simulation.
Inputs:
t - Current simulation time step
state - (18,) array which defines the state of the vehicle
pos - (3,) position of the dumbbell with respect to the
asteroid center of mass and expressed in the inertial frame
vel - (3,) velocity of the dumbbell with respect to the
asteroid center of mass and expressed in the inertial frame
R - (9,) attitude of the dumbbell with defines the
transformation of a vector in the dumbbell frame to the
inertial frame ang_vel - (3,) angular velocity of the dumbbell
with respect to the inertial frame and represented in the
dumbbell frame
ast - Asteroid class object holding the asteroid gravitational
model and other useful parameters
"""
# unpack the state
pos = state[0:3] # location of the center of mass in the inertial frame
vel = state[3:6] # vel of com in inertial frame
R = np.reshape(state[6:15],(3,3)) # sc body frame to inertial frame
ang_vel = state[15:18] # angular velocity of sc wrt inertial frame defined in body frame
Ra = attitude.rot3(ast.omega*t, 'c') # asteroid body frame to inertial frame
# unpack parameters for the dumbbell
J = dum.J
rho1 = dum.zeta1
rho2 = dum.zeta2
# position of each mass in the asteroid frame
z1 = Ra.T.dot(pos + R.dot(rho1))
z2 = Ra.T.dot(pos + R.dot(rho2))
z = Ra.T.dot(pos) # position of COM in asteroid frame
# compute the potential at this state
(U1, U1_grad, U1_grad_mat, U1laplace) = ast.polyhedron_potential(z1)
(U2, U2_grad, U2_grad_mat, U2laplace) = ast.polyhedron_potential(z2)
F1 = dum.m1*Ra.dot(U1_grad)
F2 = dum.m2*Ra.dot(U2_grad)
M1 = dum.m1 * attitude.hat_map(rho1).dot(R.T.dot(Ra).dot(U1_grad))
M2 = dum.m2 * attitude.hat_map(rho2).dot(R.T.dot(Ra).dot(U2_grad))
# generate image at this current state only at a specifc time
# blender.driver(pos, R, ast.omega * t, [5, 0, 1], 'test' + str.zfill(str(t), 4))
# use the imagery to figure out motion and pass to the controller instead
# of the true state
# calculate the desired attitude and translational trajectory
des_att_tuple = controller.body_fixed_pointing_attitude(t, state)
des_tran_tuple = controller.traverse_then_land_vertically(t, ast, final_pos=[0.550, 0, 0],
initial_pos=[2.550, 0, 0],
descent_tf=3600)
# input trajectory and compute the control inputs
# compute the control input
u_m = controller.attitude_controller(t, state, M1+M2, dum, ast, des_att_tuple)
u_f = controller.translation_controller(t, state, F1+F2, dum, ast, des_tran_tuple)
pos_dot = vel
vel_dot = 1/(dum.m1+dum.m2) *(F1 + F2 + u_f)
R_dot = R.dot(attitude.hat_map(ang_vel)).reshape(9)
ang_vel_dot = np.linalg.inv(J).dot(-np.cross(ang_vel,J.dot(ang_vel)) + M1 + M2 + u_m)
statedot = np.hstack((pos_dot, vel_dot, R_dot, ang_vel_dot))
return statedot
def eoms_controlled_blender_traverse_then_land(t, state, dum, ast):
"""Inertial dumbbell equations of motion about an asteroid
This method must be used with the scipy.integrate.ode class instead of the
more convienent scipy.integrate.odeint. In addition, we can control the
dumbbell given full state feedback. Blender is used to generate imagery
during the simulation.
The spacecraft will move horizontally for the first 3600 sec to a positon
[2.550, 0, 0] in the asteroid (and inertial) frame, then descend vertically
in the asteroid frame.
Inputs:
t - Current simulation time step
state - (18,) array which defines the state of the vehicle
pos - (3,) position of the dumbbell with respect to the
asteroid center of mass and expressed in the inertial frame
vel - (3,) velocity of the dumbbell with respect to the
asteroid center of mass and expressed in the inertial frame
R - (9,) attitude of the dumbbell with defines the
transformation of a vector in the dumbbell frame to the
inertial frame ang_vel - (3,) angular velocity of the dumbbell
with respect to the inertial frame and represented in the
dumbbell frame
ast - Asteroid class object holding the asteroid gravitational
model and other useful parameters
"""
# unpack the state
pos = state[0:3] # location of the center of mass in the inertial frame
vel = state[3:6] # vel of com in inertial frame
R = np.reshape(state[6:15],(3,3)) # sc body frame to inertial frame
ang_vel = state[15:18] # angular velocity of sc wrt inertial frame defined in body frame
Ra = attitude.rot3(ast.omega*(t - 3600), 'c') # asteroid body frame to inertial frame
# unpack parameters for the dumbbell
J = dum.J
rho1 = dum.zeta1
rho2 = dum.zeta2
# position of each mass in the asteroid frame
z1 = Ra.T.dot(pos + R.dot(rho1))
z2 = Ra.T.dot(pos + R.dot(rho2))
z = Ra.T.dot(pos) # position of COM in asteroid frame
# compute the potential at this state
(U1, U1_grad, U1_grad_mat, U1laplace) = ast.polyhedron_potential(z1)
(U2, U2_grad, U2_grad_mat, U2laplace) = ast.polyhedron_potential(z2)
F1 = dum.m1*Ra.dot(U1_grad)
F2 = dum.m2*Ra.dot(U2_grad)
M1 = dum.m1 * attitude.hat_map(rho1).dot(R.T.dot(Ra).dot(U1_grad))
M2 = dum.m2 * attitude.hat_map(rho2).dot(R.T.dot(Ra).dot(U2_grad))
# generate image at this current state only at a specifc time
# blender.driver(pos, R, ast.omega * t, [5, 0, 1], 'test' + str.zfill(str(t), 4))
# use the imagery to figure out motion and pass to the controller instead
# of the true state
# compute the control input
u_m = controller.attitude_traverse_then_land_controller(t, state, M1+M2, dum, ast)
u_f = controller.translation_traverse_then_land_controller(t, state, F1+F2, dum, ast)
pos_dot = vel
vel_dot = 1/(dum.m1+dum.m2) *(F1 + F2 + u_f)
R_dot = R.dot(attitude.hat_map(ang_vel)).reshape(9)
ang_vel_dot = np.linalg.inv(J).dot(-np.cross(ang_vel,J.dot(ang_vel)) + M1 + M2 + u_m)
statedot = np.hstack((pos_dot, vel_dot, R_dot, ang_vel_dot))
return statedot
def blender_traverse_then_land_sim():
# simulation parameters
outp
|
ut_path = './visualization/blender'
asteroid_name = 'itokawa_low'
# create a HDF5 dataset
hdf5_path = './data/itokawa_landing/{}_controlled_vertical_landing.
|
hdf5'.format(
datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S"))
dataset_name = 'landing'
render = 'BLENDER'
image_modulus = 400
RelTol = 1e-6
AbsTol = 1e-6
ast_name = 'itokawa'
num_faces = 64
t0 = 0
dt = 1
tf = 7200
num_steps = 7200
periodic_pos = np.array([1.495746722510590,0.000001002669660,0.006129720493607])
periodic_vel = np.array([0.000000302161724,-0.000899607989820,-0.000000013286327])
ast = astero
|
Moth-Tolias/LetterBoy
|
LetterBoy_backend.py
|
Python
|
gpl-3.0
| 788 | 0.01269 |
"""Functions for the backend of LetterBoy"""
def lb_standardcase():
"""Capitalise the first letter of each sentence, and set all others to lowercase."""
pass
def lb_uppercase():
"""Cap
|
italise each letter."""
pass
def lb_lowercase():
"""Set all letters to lowercase."""
pass
def lb_camelcase():
"""Capitalise the first letter of each word, and set all others to lowercase."""
pass
def lb_staggercase():
"""Alternate each character between upper- and lower-case."""
pass
def lb_jumbles_nontrobo():
"""Jumble up text between the first and last letters in each word."""
pass
def lb_zcorrupt():
|
"""Add glitch text to the plaintext."""
pass
def lb_zstrip():
"""Remove glitch text."""
pass
|
G4brym/GetCompany.info
|
Main/handlers/utilities.py
|
Python
|
mit
| 1,191 | 0.006717 |
import uuid
import datetime as dt
import json
import urllib.request
import urllib.parse
from Main.handlers.settings import RECAPTCHA_SECRET_KEY
def get_title(title=""):
if title == "":
return "GetCompany info"
else:
return title + " - GetCompany info"
def get_new_token():
return str(str(uuid.uuid4()) + str(uuid.uuid4())).replace("-", "")[:32]
def get_timestamp(datetime):
return int(dt.datetime.strptime(datetime, "%Y-%m-%d %H:%M:%S.%f").timestamp())
def remove_microseconds(datetime):
return dt.datetime.strptime(datetime, "%Y-%m-%d %H:%M:%S.%f")
def get_remote_IP(request):
ip = request.META.get('HTTP_CF_CONNECTING_IP')
if ip is None:
ip = request.META.get('REMOTE_ADDR')
return ip
def check_recaptcha(response, ip):
if response == "":
return False
data = urllib.parse.urlencode({"
|
secret": RECAPTCHA_SECRET_KEY, "response": response, "remoteip": ip})
binary_data = data.encode('utf-8')
u = urllib.request.urlopen("https://www.google.com/recaptcha/api/siteverify", binary_data)
r
|
esult = u.read()
recaptcha_result = json.loads(result.decode('utf-8'))
return recaptcha_result["success"]
|
lmazuel/azure-sdk-for-python
|
azure-mgmt-network/azure/mgmt/network/v2017_09_01/models/effective_network_security_rule.py
|
Python
|
mit
| 5,618 | 0.002492 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class EffectiveNetworkSecurityRule(Model):
"""Effective network security rules.
:param name: The name of the security rule s
|
pecified by the user (if
created by the user).
:type name: str
:param protocol: The network protocol this rule applies to. Possible
values are: 'Tcp', 'Udp', and 'All'. Possible values include: 'Tcp',
'Udp', 'All'
:type protocol: str or
~azure.mgmt.network.v2017_09_01.
|
models.EffectiveSecurityRuleProtocol
:param source_port_range: The source port or range.
:type source_port_range: str
:param destination_port_range: The destination port or range.
:type destination_port_range: str
:param source_port_ranges: The source port ranges. Expected values include
a single integer between 0 and 65535, a range using '-' as seperator (e.g.
100-400), or an asterix (*)
:type source_port_ranges: list[str]
:param destination_port_ranges: The destination port ranges. Expected
values include a single integer between 0 and 65535, a range using '-' as
seperator (e.g. 100-400), or an asterix (*)
:type destination_port_ranges: list[str]
:param source_address_prefix: The source address prefix.
:type source_address_prefix: str
:param destination_address_prefix: The destination address prefix.
:type destination_address_prefix: str
:param source_address_prefixes: The source address prefixes. Expected
values include CIDR IP ranges, Default Tags (VirtualNetwork,
AureLoadBalancer, Internet), System Tags, and the asterix (*).
:type source_address_prefixes: list[str]
:param destination_address_prefixes: The destination address prefixes.
Expected values include CIDR IP ranges, Default Tags (VirtualNetwork,
AureLoadBalancer, Internet), System Tags, and the asterix (*).
:type destination_address_prefixes: list[str]
:param expanded_source_address_prefix: The expanded source address prefix.
:type expanded_source_address_prefix: list[str]
:param expanded_destination_address_prefix: Expanded destination address
prefix.
:type expanded_destination_address_prefix: list[str]
:param access: Whether network traffic is allowed or denied. Possible
values are: 'Allow' and 'Deny'. Possible values include: 'Allow', 'Deny'
:type access: str or
~azure.mgmt.network.v2017_09_01.models.SecurityRuleAccess
:param priority: The priority of the rule.
:type priority: int
:param direction: The direction of the rule. Possible values are: 'Inbound
and Outbound'. Possible values include: 'Inbound', 'Outbound'
:type direction: str or
~azure.mgmt.network.v2017_09_01.models.SecurityRuleDirection
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'protocol': {'key': 'protocol', 'type': 'str'},
'source_port_range': {'key': 'sourcePortRange', 'type': 'str'},
'destination_port_range': {'key': 'destinationPortRange', 'type': 'str'},
'source_port_ranges': {'key': 'sourcePortRanges', 'type': '[str]'},
'destination_port_ranges': {'key': 'destinationPortRanges', 'type': '[str]'},
'source_address_prefix': {'key': 'sourceAddressPrefix', 'type': 'str'},
'destination_address_prefix': {'key': 'destinationAddressPrefix', 'type': 'str'},
'source_address_prefixes': {'key': 'sourceAddressPrefixes', 'type': '[str]'},
'destination_address_prefixes': {'key': 'destinationAddressPrefixes', 'type': '[str]'},
'expanded_source_address_prefix': {'key': 'expandedSourceAddressPrefix', 'type': '[str]'},
'expanded_destination_address_prefix': {'key': 'expandedDestinationAddressPrefix', 'type': '[str]'},
'access': {'key': 'access', 'type': 'str'},
'priority': {'key': 'priority', 'type': 'int'},
'direction': {'key': 'direction', 'type': 'str'},
}
def __init__(self, **kwargs):
super(EffectiveNetworkSecurityRule, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.protocol = kwargs.get('protocol', None)
self.source_port_range = kwargs.get('source_port_range', None)
self.destination_port_range = kwargs.get('destination_port_range', None)
self.source_port_ranges = kwargs.get('source_port_ranges', None)
self.destination_port_ranges = kwargs.get('destination_port_ranges', None)
self.source_address_prefix = kwargs.get('source_address_prefix', None)
self.destination_address_prefix = kwargs.get('destination_address_prefix', None)
self.source_address_prefixes = kwargs.get('source_address_prefixes', None)
self.destination_address_prefixes = kwargs.get('destination_address_prefixes', None)
self.expanded_source_address_prefix = kwargs.get('expanded_source_address_prefix', None)
self.expanded_destination_address_prefix = kwargs.get('expanded_destination_address_prefix', None)
self.access = kwargs.get('access', None)
self.priority = kwargs.get('priority', None)
self.direction = kwargs.get('direction', None)
|
FireballDWF/cloud-custodian
|
tools/c7n_azure/c7n_azure/resources/storage.py
|
Python
|
apache-2.0
| 14,922 | 0.002211 |
# Copyright 2018 Capital One Services, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import logging
import jsonpickle
from azure.cosmosdb.table import TableService
from azure.mgmt.storage.models import IPRule, \
NetworkRuleSet, StorageAccountUpdateParameters, VirtualNetworkRule
from azure.storage.blob import BlockBlobService
from azure.storage.common.models import RetentionPolicy, Logging
from azure.storage.file import FileService
from azure.storage.queue import QueueService
from c7n_azure.actions.base import AzureBaseAction
from c7n_azure.constants import BLOB_TYPE, FILE_TYPE, QUEUE_TYPE, TABLE_TYPE
from c7n_azure.filters import FirewallRulesFilter, ValueFilter
from c7n_azure.provider import resources
from c7n_azure.resources.arm import ArmResourceManager
from c7n_azure.storage_utils import StorageUtilities
from c7n_azure.utils import ThreadHelper
from netaddr import IPSet
from c7n.exceptions import PolicyValidationError
from c7n.filters.core import type_schema
from c7n.utils import local_session, get_annotation_prefix
@resources.register('storage')
class Storage(ArmResourceManager):
"""Storage Account Resource
:example:
Finds all Storage Accounts in the subscription.
.. code-block:: yaml
policies:
- name: find-all-storage-accounts
resource: azure.storage
"""
class resource_type(ArmResourceManager.resource_type):
doc_groups = ['Storage']
service = 'azure.mgmt.storage'
client = 'StorageManagementClient'
enum_spec = ('storage_accounts', 'list', None)
diagnostic_settings_enabled = False
resource_type = 'Microsoft.Storage/storageAccounts'
@Storage.action_registry.register('set-network-rules')
class StorageSetNetworkRulesAction(AzureBaseAction):
""" Set Network Rules Action
Updates Azure Storage Firewalls and Virtual Networks settings.
:example:
Find storage accounts without any firewall rules.
Configure default-action to ``Deny`` and then allow:
- Azure Logging and Metrics services
- Two specific IPs
- Two subnets
.. code-block:: yaml
policies:
- name: add-storage-firewall
resource: azure.storage
filters:
- type: value
key: properties.networkAcls.ipRules
value_type: size
op: eq
value
|
: 0
actions:
- type: set-network-rules
default-action: Deny
bypass: [Logging, Metrics]
ip-rules:
- ip-address-or-range: 11.12.13.14
- ip-address-or-range: 21.22.23.24
virtual-network-rules:
- virtual-network-
|
resource-id: <subnet_resource_id>
- virtual-network-resource-id: <subnet_resource_id>
"""
schema = type_schema(
'set-network-rules',
required=['default-action'],
**{
'default-action': {'enum': ['Allow', 'Deny']},
'bypass': {'type': 'array', 'items': {'enum': ['AzureServices', 'Logging', 'Metrics']}},
'ip-rules': {
'type': 'array',
'items': {'ip-address-or-range': {'type': 'string'}}
},
'virtual-network-rules': {
'type': 'array',
'items': {'virtual-network-resource-id': {'type': 'string'}}
}
}
)
def _prepare_processing(self,):
self.client = self.manager.get_client()
def _process_resource(self, resource):
rule_set = NetworkRuleSet(default_action=self.data['default-action'])
if 'ip-rules' in self.data:
rule_set.ip_rules = [
IPRule(
ip_address_or_range=r['ip-address-or-range'],
action='Allow') # 'Allow' is the only allowed action
for r in self.data['ip-rules']]
if 'virtual-network-rules' in self.data:
rule_set.virtual_network_rules = [
VirtualNetworkRule(
virtual_network_resource_id=r['virtual-network-resource-id'],
action='Allow') # 'Allow' is the only allowed action
for r in self.data['virtual-network-rules']]
if len(self.data.get('bypass', [])) > 0:
rule_set.bypass = ','.join(self.data['bypass'])
else:
rule_set.bypass = 'None'
self.client.storage_accounts.update(
resource['resourceGroup'],
resource['name'],
StorageAccountUpdateParameters(network_rule_set=rule_set))
@Storage.filter_registry.register('firewall-rules')
class StorageFirewallRulesFilter(FirewallRulesFilter):
def __init__(self, data, manager=None):
super(StorageFirewallRulesFilter, self).__init__(data, manager)
self._log = logging.getLogger('custodian.azure.storage')
@property
def log(self):
return self._log
def _query_rules(self, resource):
ip_rules = resource['properties']['networkAcls']['ipRules']
resource_rules = IPSet([r['value'] for r in ip_rules])
return resource_rules
@Storage.filter_registry.register('storage-diagnostic-settings')
class StorageDiagnosticSettingsFilter(ValueFilter):
"""Filters storage accounts based on its diagnostic settings. The filter requires
specifying the storage type (blob, queue, table, file) and will filter based on
the settings for that specific type.
:example:
Find all storage accounts that have a 'delete' logging setting disabled.
.. code-block:: yaml
policies:
- name: find-accounts-with-delete-logging-disabled
resource: azure.storage
filters:
- or:
- type: storage-diagnostic-settings
storage-type: blob
key: logging.delete
op: eq
value: False
- type: storage-diagnostic-settings
storage-type: queue
key: logging.delete
op: eq
value: False
- type: storage-diagnostic-settings
storage-type: table
key: logging.delete
op: eq
value: False
"""
schema = type_schema('storage-diagnostic-settings',
rinherit=ValueFilter.schema,
required=['storage-type'],
**{'storage-type': {
'type': 'string',
'enum': [BLOB_TYPE, QUEUE_TYPE, TABLE_TYPE, FILE_TYPE]}}
)
def __init__(self, data, manager=None):
super(StorageDiagnosticSettingsFilter, self).__init__(data, manager)
self.storage_type = data.get('storage-type')
self.log = logging.getLogger('custodian.azure.storage')
def process(self, resources, event=None):
session = local_session(self.manager.session_factory)
token = StorageUtilities.get_storage_token(session)
result, errors = ThreadHelper.execute_in_parallel(
resources=resources,
event=event,
execution_method=self.process_resource_set,
executor_factory=self.executor_factory,
log=self.log,
session=session,
token=token
)
return result
def process_resource_set(self, resources, event=None, session=None, token=
|
incuna/incuna-bookmarks
|
bookmarks/templatetags/bookmark_tags.py
|
Python
|
mit
| 421 | 0.007126 |
from django import template
from bookmarks.models import BookmarkInstance
from tagging.models import Tag
register = template.Library()
@registe
|
r.inclusion_tag('bookmarks/tags.html')
def show_bookmarks_tags():
""" Show a box with tags for all articles that belong to current site.
"""
return {'bookmark_tags': Tag.objects.usage_for_queryset
|
(queryset=BookmarkInstance.on_site.all(), counts=True, min_count=1)}
|
meerkat-code/meerkat_auth
|
translate.py
|
Python
|
mit
| 1,780 | 0.008989 |
"""
Helper file to manage translations for the Meerkat Authentication module.
We have two types of translations, general and implementation specific
The general translations are extracted from the python, jijna2 and js files.
"""
from csv import DictReader
import a
|
rgparse
import os
import shutil
import datetime
from babel.messages.pofile import read_po, write_po
from babel.messages.catalog import Catalog, Message
from babel._compat import BytesIO
parser = argparse.ArgumentParser()
parser.add_
|
argument("action",
choices=["update-po", "initialise", "compile" ],
help="Choose action" )
parser.add_argument("-l", type=str,
help="Two letter langauge code")
if __name__ == "__main__":
args = parser.parse_args()
lang_dir = "meerkat_auth"
if args.action == "update-po":
os.system("pybabel extract -F babel.cfg -o {}/messages.pot .".format(lang_dir) )
os.system("pybabel update -i {}/messages.pot -d {}/translations".format(lang_dir, lang_dir) )
os.system("rm {}/messages.pot".format(lang_dir))
elif args.action == "initialise":
if args.l and len(args.l) == 2:
os.system("pybabel extract -F babel.cfg -o {}/messages.pot .".format(lang_dir) )
os.system("pybabel init -i {}/messages.pot -d {}/translations -l {}".format(
lang_dir, lang_dir,args.l
))
os.system("pybabel update -i {}/messages.pot -d {}/translations".format(lang_dir, lang_dir) )
os.system("rm {}/messages.pot".format(lang_dir))
else:
print("Need to specify a two letter language code")
elif args.action == "compile":
os.system("pybabel compile -d {}/translations".format(lang_dir))
|
Diego-debian/Free-infrarossi
|
free_infrarossi/bin/Atenuacion.py
|
Python
|
gpl-3.0
| 2,477 | 0.031617 |
#/usr/bin/python
#!*-* coding:utf-8 *-*
# Este script es sofware libre. Puede redistribuirlo y/o modificarlo bajo
# los terminos de la licencia pública general de GNU, según es publicada
# por la free software fundation bien la versión 3 de la misma licencia
# o de cualquier versión posterior. (según su elección ).
# Si usted hace alguna modificación en esta aplicación, deberá siempre
# mencionar el autor original de la misma.
# Autor:
# Universidad Distrital Francisco Jose
# Grupo de fisica e informatica
# Diego Alberto Parra Garzón
# Dr Julian Andres Salamanca Bernal
# Colombia, Bogota D.C.
import serial
import os
import subprocess
import math
import time
import Gnuplot
from Tkinter import *
import tkMessageBox
import Tkinter
import shutil
class Gramo():
def Atenua(self):
bicho = Tk()
bicho.geometry("280x170+200+90")
bicho.config(bg="white")
bicho.title("Infrarossi")
bicho.resizable(width=0, height=0)
def Verifica():
print "ola"
def Salir():
tkMessageBox.showinfo("Infrarossi", message= "Saliendo .... ")
arduino = serial.Serial("/dev/rfcomm0", 9600)
arduino.write('aa')
ex
|
it()
exit()
def Grafica():
os.system("python g_p_Ate.py &")
def Comenzar1():
tkMe
|
ssageBox.showinfo("Infrarossi", message= "Se procede a capturar datos, para detener el proceso cierre la ventana de captura de datos 'de color azul'")
os.system("xterm -T Infrarossi -geom 50x8+185+100 +cm -bg blue -e python bin/c_p_Ate.py &")
# os.system("python bin/c_p_Ate.py")
# --------------------------------CONFIGURACION DE VENTANA ------------------------------------------------------------------------------
X=8
Y=10
lblTitulo = Label(bicho, text="ATENUACION", fg = ("blue"), bg = ("white"), font = ("Century Schoolbook L",23)).place(x=30, y=20)
btnConectar1 = Button(bicho, text= " INICIAR ", width=5, height=1, command= Comenzar1).place(x=20+X, y=100+Y)
btnSalir = Button(bicho, text= " SALIR ", width=5, height=1, command= Salir).place(x=170+X, y=100+Y)
btnGrafica = Button(bicho, text= " GRAFICA ", width=5, height=1, command= Grafica).place(x=95+X, y=100+Y)
Verifica()
bicho.mainloop()
def __init__(self):
self.Atenua()
self.__del__()
def __del__(self):
print ("PROGRAMA TERMINADO")
modulo = Gramo()
|
lasote/conan
|
conans/client/conf/config_installer.py
|
Python
|
mit
| 4,484 | 0.001338 |
import os
from conans.tools import unzip
import shutil
from conans.util.files import rmdir, mkdir
from conans.client.remote_registry import RemoteRegistry
from conans import tools
from conans.errors import ConanException
def _handle_remotes(registry_path, remote_file, output):
registry = RemoteRegistry(registry_path, output)
new_registry = RemoteRegistry(remote_file, output)
registry.define_remotes(new_registry.remotes)
def _handle_profiles(source_folder, target_folder, output):
mkdir(target_folder)
for root, _, files in os.walk(source_folder):
relative_path = os.path.relpath(root, source_folder)
if relative_path == ".":
relative_path = ""
for f in files:
profile = os.path.join(relative_path, f)
output.info(" Installing profile %s" % profile)
s
|
hutil.copy(os.path.join(root, f), os.path.join(target_folder, profile))
def _process_git_repo(repo_url, client_cache, output, runner, tmp_folder):
output.info("Trying to clone repo %s" % repo_url)
wit
|
h tools.chdir(tmp_folder):
runner('git clone "%s" config' % repo_url, output=output)
tmp_folder = os.path.join(tmp_folder, "config")
_process_folder(tmp_folder, client_cache, output)
def _process_zip_file(zippath, client_cache, output, tmp_folder, remove=False):
unzip(zippath, tmp_folder)
if remove:
os.unlink(zippath)
_process_folder(tmp_folder, client_cache, output)
def _handle_conan_conf(current_conan_conf, new_conan_conf_path):
current_conan_conf.read(new_conan_conf_path)
with open(current_conan_conf.filename, "w") as f:
current_conan_conf.write(f)
def _process_folder(folder, client_cache, output):
for root, dirs, files in os.walk(folder):
for f in files:
if f == "settings.yml":
output.info("Installing settings.yml")
settings_path = client_cache.settings_path
shutil.copy(os.path.join(root, f), settings_path)
elif f == "conan.conf":
output.info("Processing conan.conf")
conan_conf = client_cache.conan_config
_handle_conan_conf(conan_conf, os.path.join(root, f))
elif f == "remotes.txt":
output.info("Defining remotes")
registry_path = client_cache.registry
_handle_remotes(registry_path, os.path.join(root, f), output)
else:
output.info("Copying file %s to %s" % (f, client_cache.conan_folder))
shutil.copy(os.path.join(root, f), client_cache.conan_folder)
for d in dirs:
if d == "profiles":
output.info("Installing profiles")
profiles_path = client_cache.profiles_path
_handle_profiles(os.path.join(root, d), profiles_path, output)
break
dirs[:] = [d for d in dirs if d not in ("profiles", ".git")]
def _process_download(item, client_cache, output, tmp_folder):
output.info("Trying to download %s" % item)
zippath = os.path.join(tmp_folder, "config.zip")
tools.download(item, zippath, out=output)
_process_zip_file(zippath, client_cache, output, tmp_folder, remove=True)
def configuration_install(item, client_cache, output, runner):
tmp_folder = os.path.join(client_cache.conan_folder, "tmp_config_install")
# necessary for Mac OSX, where the temp folders in /var/ are symlinks to /private/var/
tmp_folder = os.path.realpath(tmp_folder)
mkdir(tmp_folder)
try:
if item is None:
try:
item = client_cache.conan_config.get_item("general.config_install")
except ConanException:
raise ConanException("Called config install without arguments and "
"'general.config_install' not defined in conan.conf")
if item.endswith(".git"):
_process_git_repo(item, client_cache, output, runner, tmp_folder)
elif os.path.exists(item):
# is a local file
_process_zip_file(item, client_cache, output, tmp_folder)
elif item.startswith("http"):
_process_download(item, client_cache, output, tmp_folder)
else:
raise ConanException("I don't know how to process %s" % item)
finally:
if item:
client_cache.conan_config.set_item("general.config_install", item)
rmdir(tmp_folder)
|
yangalex/Otomata-python
|
Otomata.py
|
Python
|
mit
| 7,139 | 0.001541 |
"""
RUN FROM THIS FILE
Alexandre Yang
ITP 115
Final Project
05/08/2014
Description:
Refer to readme.txt
"""
import pygame
from Oto import Oto
from Button import Button
from Label import Label
# Input: pygame.Surface, tuple, int, int, int, int
# Output: none
# Side-effect: Draws the grid on the screen
def drawBoard(surface, color, w, h, tileWidth, tileHeight):
# Draw lines
for x in range(tileWidth, w+1, tileWidth):
pygame.draw.line(surface, color, (x, 0), (x, h))
for y in range(tileHeight, h+1, tileHeight):
pygame.draw.line(surface, color, (0, y), (w, y))
# Input: int, int
# Output: pygame.sprite.Sprite
# Side-effect: none
# Description: Creates a sprite to represent the position of the mouse-click
def createMouseClick(mouseX, mouseY):
mouseClick = pygame.sprite.Sprite()
mouseClick.image = pygame.Surface((1, 1))
mouseClick.rect = mouseClick.image.get_rect()
mouseClick.rect.x = mouseX
mouseClick.rect.y = mouseY
return mouseClick
def main():
# Set general variables
screenW = 850
screenH = 775
boardW = 675
boardH = 675
tileWidth = 75
tileHeight = 75
running = True
screen = pygame.display.set_mode((screenW, screenH)) # Create pygame Surface
clock = pygame.time.Clock() # Create pygame Clock
BPM = 4
active = False
bgColor = 0, 0, 0
lineColor = 255, 255, 255
# Create sprite groups (necessary to call draw() method)
otoList = pygame.sprite.Group()
buttonList = pygame.sprite.Group()
labelList = pygame.sprite.Group()
# Create Menu Buttons and add them to buttonList sprite group
playButton = Button(screen, 100, boardH+40, 50, 50, "Play")
buttonList.add(playButton)
pauseButton = Button(screen, 200, boardH+40, 75, 50, "Pause")
buttonList.add(pauseButton)
clearButton = Button(screen, 320, boardH+40, 70, 50, "Clear")
buttonList.add(clearButton)
plusBPMButton = Button(screen, 430, boardH+40, 65, 50, "BPM+")
buttonList.add(plusBPMButton)
minusBPMButton = Button(screen, 530, boardH+40, 65, 50, "BPM-")
buttonList.add(minusBPMButton)
originalButton = Button(screen, 700, 30, 140, 50
|
, "Original")
buttonList.add(o
|
riginalButton)
clarinetButton = Button(screen, 700, 130, 140, 50, "Clarinet")
buttonList.add(clarinetButton)
guitarButton = Button(screen, 700, 220, 140, 50, "Guitar")
buttonList.add(guitarButton)
synthButton = Button(screen, 700, 320, 140, 50, "Synth")
buttonList.add(synthButton)
pianoButton = Button(screen, 700, 420, 140, 50, "Piano")
buttonList.add(pianoButton)
piano2Button = Button(screen, 700, 520, 140, 50, "Piano2")
buttonList.add(piano2Button)
trumpetButton = Button(screen, 700, 620, 140, 50, "Trumpet")
buttonList.add(trumpetButton)
# main Pygame loop
while running:
# Resets the screen
screen.fill(bgColor)
# Draws the grid
drawBoard(screen, lineColor, boardW, boardH, tileWidth, tileHeight)
# Draw menu
buttonList.draw(screen)
# Listen for events
for event in pygame.event.get():
# If user closes window
if event.type == pygame.QUIT:
running = False
# If user clicks mouse
elif event.type == pygame.MOUSEBUTTONDOWN:
mouseX, mouseY = pygame.mouse.get_pos()
# Rounds mouse positions down to nearest hundred (Used to position the cells and for simplicity)
otoPosX = (mouseX // tileWidth) * tileWidth
otoPosY = (mouseY//tileHeight) * tileHeight
# Create a tiny sprite where the mouse was clicked to use in collision detection
mouseClick = createMouseClick(mouseX, mouseY)
# If left button was clicked
if event.button == 1:
# Check to see if mouseClick collided with any sprite in the otoList
clickedBlock = pygame.sprite.spritecollide(mouseClick, otoList, False)
# Check to see if mouseClick collided with any menu button
clickedMenu = pygame.sprite.spritecollide(mouseClick, buttonList, False)
# If a cell was clicked, then delete it
if clickedBlock:
otoList.remove(clickedBlock[0])
# Handle the menu button click events
elif clickedMenu:
if clickedMenu[0] == playButton:
active = True
elif clickedMenu[0] == pauseButton:
active = False
elif clickedMenu[0] == clearButton:
otoList.empty()
elif clickedMenu[0] == plusBPMButton:
BPM += 1
elif clickedMenu[0] == minusBPMButton and BPM != 1:
BPM -= 1
elif clickedMenu[0] == originalButton:
Oto.changeInstrument("")
elif clickedMenu[0] == clarinetButton:
Oto.changeInstrument("clarinet")
elif clickedMenu[0] == guitarButton:
Oto.changeInstrument("Guitar")
elif clickedMenu[0] == synthButton:
Oto.changeInstrument("Synth")
elif clickedMenu[0] == pianoButton:
Oto.changeInstrument("Piano")
elif clickedMenu[0] == piano2Button:
Oto.changeInstrument("Piano2")
elif clickedMenu[0] == trumpetButton:
Oto.changeInstrument("trumpet")
# If the grid was clicked then create a new cell at the position (an 'Oto' object)
else:
if mouseY < boardH and mouseX < boardW:
oto = Oto(screen, tileWidth, tileHeight, boardW, boardH)
oto.rect.x = otoPosX
oto.rect.y = otoPosY
otoList.add(oto)
# if right button was clicked
elif event.button == 3:
clickedBlock = pygame.sprite.spritecollide(mouseClick, otoList, False)
# Rotate cell clockwise
if clickedBlock:
clickedBlock[0].changeState()
# Draw every cell to the screen
otoList.draw(screen)
# Move the cells
if active:
otoList.update()
# Check to see if any cells collided
for oto in otoList:
oto.checkCollision(otoList)
# Draw and update BPM label
BPMLabel = Label(screen, 620, boardH+40, 50, 50, str(BPM))
labelList.empty()
labelList.add(BPMLabel)
labelList.draw(screen)
# Update the screen
pygame.display.flip()
# Set the Frames Per Second
clock.tick(BPM)
main()
|
diego04/cmput410-project
|
Distributed_Social_Networking/SocialNetworkModels/migrations/0007_comments_comment_author.py
|
Python
|
apache-2.0
| 483 | 0 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
fr
|
om django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('SocialNetworkModels', '0006_remove_commen
|
ts_post_author'),
]
operations = [
migrations.AddField(
model_name='comments',
name='comment_author',
field=models.CharField(default='aaa', max_length=200),
preserve_default=False,
),
]
|
Cytrill/tools
|
led_tools/set_led.py
|
Python
|
gpl-3.0
| 407 | 0.014742 |
#!/usr/bin/env python
import sys
import socket
import colorsys
import time
try:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
except:
|
print('Failed to create socket')
sys.exit(1)
host = sys.argv[1];
port = 1337;
r = int(sys.argv[3])
g = int(sys.argv[4])
b = int(sys.argv[5])
msg = bytes([ 0x20 + int(sys.argv[2]), r, g, b, 0x1F, 0x20 + int(sys.a
|
rgv[2]) ])
s.sendto(msg, (host, port))
|
noelevans/sandpit
|
kaggle/titanic/categorical_and_scaler_prediction.py
|
Python
|
mit
| 773 | 0 |
from __future__ import print_function
import pandas
from sklearn.naive_bayes import MultinomialNB
from sklearn.cross_validation import train_test_split
from sklearn.preprocessing import LabelEncoder
def main():
train_all = pandas.DataFrame.from_csv('train.csv')
|
train = train_all[['Survived', 'Sex', 'Fare']][:200]
gender_label = LabelEncoder()
train.Sex = gender_label.fit_transform(train.Sex)
X = tr
|
ain[['Sex', 'Fare']]
y = train['Survived']
X_train, X_test, y_train, y_test = train_test_split(
X, y, test_size=0.33, random_state=42)
clf = MultinomialNB()
clf.fit(X_train, y_train)
print('Accuracy: ', end='')
print(sum(clf.predict(X_test) == y_test) / float(len(y_test)))
if __name__ == '__main__':
main()
|
mattseymour/django
|
django/db/backends/utils.py
|
Python
|
bsd-3-clause
| 7,044 | 0.000568 |
import datetime
import decimal
import hashlib
import logging
from time import time
from django.conf import settings
from django.utils.encoding import force_bytes
from django.utils.timezone import utc
logger = logging.getLogger('django.db.backends')
class CursorWrapper:
def __init__(self, cursor, db):
self.cursor = cursor
self.db = db
WRAP_ERROR_ATTRS = frozenset(['fetchone', 'fetchmany', 'fetchall', 'nextset'])
def __getattr__(self, attr):
cursor_attr = getattr(self.cursor, attr)
if attr in CursorWrapper.WRAP_ERROR_ATTRS:
return self.db.wrap_database_errors(cursor_attr)
else:
return cursor_attr
def __iter__(self):
with self.db.wrap_database_errors:
for item in self.cursor:
yield item
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
# Close instead of passing through to avoid ba
|
ckend-specific behavior
# (#17671). Catch errors liberally because errors in cleanup code
# aren't useful.
try:
self.close()
except self.db.Database.Error:
pass
# The following methods cannot be implemented in __getattr__, because the
# code must run when the method is invoked, not just when it is accessed.
def callproc(self, procname, params=None):
self.db.validate_no_broken_t
|
ransaction()
with self.db.wrap_database_errors:
if params is None:
return self.cursor.callproc(procname)
else:
return self.cursor.callproc(procname, params)
def execute(self, sql, params=None):
self.db.validate_no_broken_transaction()
with self.db.wrap_database_errors:
if params is None:
return self.cursor.execute(sql)
else:
return self.cursor.execute(sql, params)
def executemany(self, sql, param_list):
self.db.validate_no_broken_transaction()
with self.db.wrap_database_errors:
return self.cursor.executemany(sql, param_list)
class CursorDebugWrapper(CursorWrapper):
# XXX callproc isn't instrumented at this time.
def execute(self, sql, params=None):
start = time()
try:
return super(CursorDebugWrapper, self).execute(sql, params)
finally:
stop = time()
duration = stop - start
sql = self.db.ops.last_executed_query(self.cursor, sql, params)
self.db.queries_log.append({
'sql': sql,
'time': "%.3f" % duration,
})
logger.debug(
'(%.3f) %s; args=%s', duration, sql, params,
extra={'duration': duration, 'sql': sql, 'params': params}
)
def executemany(self, sql, param_list):
start = time()
try:
return super(CursorDebugWrapper, self).executemany(sql, param_list)
finally:
stop = time()
duration = stop - start
try:
times = len(param_list)
except TypeError: # param_list could be an iterator
times = '?'
self.db.queries_log.append({
'sql': '%s times: %s' % (times, sql),
'time': "%.3f" % duration,
})
logger.debug(
'(%.3f) %s; args=%s', duration, sql, param_list,
extra={'duration': duration, 'sql': sql, 'params': param_list}
)
###############################################
# Converters from database (string) to Python #
###############################################
def typecast_date(s):
return datetime.date(*map(int, s.split('-'))) if s else None # returns None if s is null
def typecast_time(s): # does NOT store time zone information
if not s:
return None
hour, minutes, seconds = s.split(':')
if '.' in seconds: # check whether seconds have a fractional part
seconds, microseconds = seconds.split('.')
else:
microseconds = '0'
return datetime.time(int(hour), int(minutes), int(seconds), int((microseconds + '000000')[:6]))
def typecast_timestamp(s): # does NOT store time zone information
# "2005-07-29 15:48:00.590358-05"
# "2005-07-29 09:56:00-05"
if not s:
return None
if ' ' not in s:
return typecast_date(s)
d, t = s.split()
# Extract timezone information, if it exists. Currently we just throw
# it away, but in the future we may make use of it.
if '-' in t:
t, tz = t.split('-', 1)
tz = '-' + tz
elif '+' in t:
t, tz = t.split('+', 1)
tz = '+' + tz
else:
tz = ''
dates = d.split('-')
times = t.split(':')
seconds = times[2]
if '.' in seconds: # check whether seconds have a fractional part
seconds, microseconds = seconds.split('.')
else:
microseconds = '0'
tzinfo = utc if settings.USE_TZ else None
return datetime.datetime(
int(dates[0]), int(dates[1]), int(dates[2]),
int(times[0]), int(times[1]), int(seconds),
int((microseconds + '000000')[:6]), tzinfo
)
def typecast_decimal(s):
if s is None or s == '':
return None
return decimal.Decimal(s)
###############################################
# Converters from Python to database (string) #
###############################################
def rev_typecast_decimal(d):
if d is None:
return None
return str(d)
def truncate_name(name, length=None, hash_len=4):
"""Shortens a string to a repeatable mangled version with the given length.
"""
if length is None or len(name) <= length:
return name
hsh = hashlib.md5(force_bytes(name)).hexdigest()[:hash_len]
return '%s%s' % (name[:length - hash_len], hsh)
def format_number(value, max_digits, decimal_places):
"""
Formats a number into a string with the requisite number of digits and
decimal places.
"""
if value is None:
return None
if isinstance(value, decimal.Decimal):
context = decimal.getcontext().copy()
if max_digits is not None:
context.prec = max_digits
if decimal_places is not None:
value = value.quantize(decimal.Decimal(".1") ** decimal_places, context=context)
else:
context.traps[decimal.Rounded] = 1
value = context.create_decimal(value)
return "{:f}".format(value)
if decimal_places is not None:
return "%.*f" % (decimal_places, value)
return "{:f}".format(value)
def strip_quotes(table_name):
"""
Strip quotes off of quoted table names to make them safe for use in index
names, sequence names, etc. For example '"USER"."TABLE"' (an Oracle naming
scheme) becomes 'USER"."TABLE'.
"""
has_quotes = table_name.startswith('"') and table_name.endswith('"')
return table_name[1:-1] if has_quotes else table_name
|
lluxury/codewars
|
Simple Pig Latin.py
|
Python
|
mit
| 564 | 0.006198 |
def pig_it(text):
return ' '.j
|
oin([x[1:]+x[0]+'ay' if x.isalpha() else x for x in text.split()])
# 其实就是2个字符串过滤拼接,比移动方便多了,思路巧妙
# a if xx else b, 单行判断处理异常字符,xx为判断,标准套路
for x in text.split()
if x.isalpha()
x[1:]+x[0]+'ay'
else x
return ' '.join([
|
])
|
ghchinoy/tensorflow
|
tensorflow/python/grappler/layout_optimizer_test.py
|
Python
|
apache-2.0
| 60,128 | 0.015018 |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for Grappler LayoutOptimizer."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.core.protobuf import config_pb2
from tensorflow.core.protobuf import device_properties_pb2
from tensorflow.core.protobuf import rewriter_config_pb2
from tensorflow.core.protobuf import saver_pb2
from tensorflow.python.client import session
from tensorflow.python.compat import compat
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import random_seed
from tensorflow.python.framework import test_util
from tensorflow.python.grappler import cluster as gcluster
from tensorflow.python.grappler import tf_optimizer
from tensorflow.python.layers import convolutional as conv_layers
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_array_ops
from tensorflow.python.ops import gen_math_ops
from tensorflow.python.ops import gen_nn_ops
from tensorflow.python.ops import map_fn
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.training import gradient_descent
from tensorflow.python.training import saver as saver_lib
def _weight(shape):
"""Generates a weight of a given shape."""
return random_ops.truncated_normal(shape, seed=0, stddev=0.1)
def _bias(shape):
"""Generates a bias of a given shape."""
return constant_op.constant(0.1, shape=shape)
def _conv2d(x, w):
"""Returns a 2d convolution layer with full stride."""
return nn
|
.conv2d(x, w, strides=[1, 1, 1, 1], padding='SAME')
def _max_pool_2x2(x):
"""Downsamples a feature map by 2X."""
return nn.max_pool(
x, ksize=[1, 2, 2
|
, 1], strides=[1, 2, 2, 1], padding='SAME')
# Taken from tensorflow/examples/tutorials/mnist/mnist_deep.py
def _two_layer_model(x):
x_image = array_ops.reshape(x, [-1, 28, 28, 1])
w_conv1 = _weight([5, 5, 1, 32])
b_conv1 = _bias([32])
h_conv1 = nn.relu(_conv2d(x_image, w_conv1) + b_conv1)
h_pool1 = _max_pool_2x2(h_conv1)
w_conv2 = _weight([5, 5, 32, 64])
b_conv2 = _bias([64])
h_conv2 = nn.relu(_conv2d(h_pool1, w_conv2) + b_conv2)
h_pool2 = _max_pool_2x2(h_conv2)
return h_pool2
def _model_with_second_port():
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([2, 5, 5, 4], seed=0)
scale = constant_op.constant(0.1, shape=[4])
offset = constant_op.constant(0.3, shape=[4])
y, mean, _ = nn.fused_batch_norm(x, scale, offset)
mul = math_ops.add(y, mean)
output = array_ops.identity(mul)
return output
def _model_with_branch(x):
x_image = array_ops.reshape(x, [-1, 28, 28, 1])
w_conv1 = _weight([5, 5, 1, 32])
w_conv2 = _weight([5, 5, 1, 32])
c_conv1 = _conv2d(x_image, w_conv1)
c_conv2 = _conv2d(x_image, w_conv2)
add = math_ops.add(c_conv1, c_conv2)
return add
def _model_with_vec_and_4d(x):
x_image = array_ops.reshape(x, [-1, 28, 28, 1])
w_conv1 = _weight([5, 5, 1, 32])
c_conv1 = _conv2d(x_image, w_conv1)
vector = constant_op.constant(6.4, shape=[32])
add = math_ops.add(c_conv1, vector)
return add
def _loop():
random_seed.set_random_seed(0)
x1 = random_ops.truncated_normal([1, 784], seed=0)
x2 = random_ops.truncated_normal([1, 784], seed=0)
x3 = random_ops.truncated_normal([1, 784], seed=0)
x4 = random_ops.truncated_normal([1, 784], seed=0)
elems = (x1, x2, x3, x4)
outputs = map_fn.map_fn(_two_layer_model, elems, dtype=dtypes.float32)
return outputs
def _loop_with_branch():
random_seed.set_random_seed(0)
x1 = random_ops.truncated_normal([1, 784], seed=0)
x2 = random_ops.truncated_normal([1, 784], seed=0)
x3 = random_ops.truncated_normal([1, 784], seed=0)
x4 = random_ops.truncated_normal([1, 784], seed=0)
elems = (x1, x2, x3, x4)
outputs = map_fn.map_fn(_model_with_branch, elems, dtype=dtypes.float32)
return outputs
def _loop_with_vec_and_4d():
random_seed.set_random_seed(0)
x1 = random_ops.truncated_normal([1, 784], seed=0)
x2 = random_ops.truncated_normal([1, 784], seed=0)
x3 = random_ops.truncated_normal([1, 784], seed=0)
x4 = random_ops.truncated_normal([1, 784], seed=0)
elems = (x1, x2, x3, x4)
outputs = map_fn.map_fn(_model_with_vec_and_4d, elems, dtype=dtypes.float32)
return outputs
def _get_config(layout_optimizer=True):
if layout_optimizer:
rewrite_options = rewriter_config_pb2.RewriterConfig(
layout_optimizer=rewriter_config_pb2.RewriterConfig.ON,
# do not remove duplicated nodes
arithmetic_optimization=rewriter_config_pb2.RewriterConfig.OFF)
else:
rewrite_options = rewriter_config_pb2.RewriterConfig(
layout_optimizer=rewriter_config_pb2.RewriterConfig.OFF,
# do not remove duplicated nodes
arithmetic_optimization=rewriter_config_pb2.RewriterConfig.OFF)
rewrite_options.min_graph_nodes = -1
graph_options = config_pb2.GraphOptions(
rewrite_options=rewrite_options, build_cost_model=1)
config = config_pb2.ConfigProto(graph_options=graph_options)
config.graph_options.optimizer_options.opt_level = -1
return config
def _simple_metagraph(depthwise=False):
random_seed.set_random_seed(0)
x = variables.Variable(random_ops.truncated_normal([1, 200, 200, 3], seed=0))
conv = conv_layers.separable_conv2d if depthwise else conv_layers.conv2d
y = conv(x, 32, [3, 3])
z = conv(y, 32, [3, 3])
optimizer = gradient_descent.GradientDescentOptimizer(1e-4)
loss = math_ops.reduce_mean(z)
train_op = optimizer.minimize(loss)
graph = ops.get_default_graph()
graph.add_to_collection('train_op', train_op)
meta_graph = saver_lib.export_meta_graph(graph_def=graph.as_graph_def())
return meta_graph
def _get_cluster():
named_device = device_properties_pb2.NamedDevice()
named_device.name = '/GPU:0'
named_device.properties.type = 'GPU'
named_device.properties.num_cores = 24
named_device.properties.frequency = 1000
named_device.properties.environment['architecture'] = '4'
cluster = gcluster.Cluster(devices=[named_device])
return cluster
def _is_transpose(node):
return node.endswith('TransposeNHWCToNCHW-LayoutOptimizer') or node.endswith(
'TransposeNCHWToNHWC-LayoutOptimizer')
def _is_permute(node):
return node.endswith('VecPermuteNHWCToNCHW-LayoutOptimizer') or node.endswith(
'VecPermuteNCHWToNHWC-LayoutOptimizer')
@test_util.for_all_test_methods(test_util.no_xla_auto_jit,
'Test does not apply in XLA setting')
class LayoutOptimizerTest(test.TestCase):
"""Tests the Grappler layout optimizer."""
def _assert_trans_nchw_to_nhwc(self, name, nodes):
self.assertIn(name + '-TransposeNCHWToNHWC-LayoutOptimizer', nodes)
def _assert_trans_nhwc_to_nchw(self, name, nodes):
self.assertIn(name + '-TransposeNHWCToNCHW-LayoutOptimizer', nodes)
def _assert_map_nhwc_to_nchw(self, name, nodes):
self.assertIn(name + '-DimMapNHWCToNCHW-LayoutOptimizer', nodes)
def _assert_vec_nchw_to_nhwc(self, name, nodes):
self.assertIn(name + '-VecPermuteNCHWToNHWC-LayoutOptimizer', nodes)
def _assert_vec_nhwc_to_nchw(self, name, nodes):
self.assertIn(name + '-VecPermuteNHWCToNCHW-LayoutOptimizer', nodes)
def _train(self, checkpoint_path,
|
cgomezfandino/Project_PTX
|
API_Connection_Oanda/PTX_oandaInfo.py
|
Python
|
mit
| 972 | 0.009259 |
from configparser import ConfigParser
import v20
# Create an object config
config = ConfigParser()
# Read the config
config.read("../API_Connection_Oanda/pyalgo.cfg")
ctx = v20.Context(
'api-fxpractice.oanda.com',
443,
True,
application = 'sample_code',
token = config['oanda_v20']['access_token'],
datetime_format = 'RFC3339')
# class oanda_info():
def get_Id_Account():
response = ctx.account.list()
# Ask for the Oanda ID Account
accounts = response.get('accounts')
# Show the ID
for account in accounts:
# account(
|
'Account: %s' %account)
print account
def get_instruments():
response = ctx.account.instruments(
config['oanda_v20']['account_id'])
instruments = response.get('instruments')
# instruments[0].dict()
for instrument in instruments:
ins = instrument.dict()
print('%20s | %10s' % (ins['displayName'],
|
ins['name']))
|
cloudzfy/euler
|
src/18.py
|
Python
|
mit
| 1,684 | 0.006532 |
# By starting at the top of the triangle below and moving to adjacent numbers on the
# row below, the maximum total from top to bottom is 23.
# 3
# 7 4
# 2 4 6
# 8 5 9 3
# That is, 3 + 7 + 4 + 9 = 23.
# Find the maximum total from top to bottom of the triangle below:
# 75
# 95 64
# 17 47 82
# 18 35 87 10
# 20 04 82 47 65
# 19 01 23 75 03 34
# 88 02 77 73 07 63 67
# 99 65 04 28 06 16 70 92
# 41 41 26 56 83 40 80 70 33
# 41 48 72 33 47 32 37 16 94 29
# 53 71 44 65 25 43 91 52 97 51 14
# 70 11 33 28 77 73 17 78 39 68 17 57
# 91 71 52 38 17 14 91 43 58 50 27 29 48
# 63 66 04 68 89 53 67 30 73 16 69 87 40 31
# 04 62 98 27 23 09 70 98 73 93 38 53 60 04 23
# NOTE: As there are only 16384 routes, it is possible to solve this problem by trying
# every route. However, Problem 67, is the same challenge with a triangle containing
# one-hundred rows; it cannot be
|
solved by brute force, and requires a clever method! ;o)
text = '75\n\
95 64\n\
17 47 82\n\
18 35 87 10\n\
20 04 82 47 65\n\
19 01 23 75 03 34\n\
88 02 77 73 07 63 67\n\
99 65 04 28 06 16 70 92\n\
41 41 26 56 83 40 80 70 33\n\
41 48 72 33 47 32 37 16 94 29\n\
53 71 44 65 25 43 91 52 97 51 14\n\
70 11
|
33 28 77 73 17 78 39 68 17 57\n\
91 71 52 38 17 14 91 43 58 50 27 29 48\n\
63 66 04 68 89 53 67 30 73 16 69 87 40 31\n\
04 62 98 27 23 09 70 98 73 93 38 53 60 04 23'
digits = [[int (y) for y in x.split(' ')] for x in text.split('\n')]
for i in range(1, len(digits)):
digits[i][0] += digits[i - 1][0]
digits[i][len(digits[i]) - 1] += digits[i - 1][len(digits[i - 1]) - 1]
for j in range(1, len(digits[i]) - 1):
digits[i][j] += max(digits[i - 1][j - 1], digits[i - 1][j])
print max(digits[len(digits) - 1])
|
vsquare95/JiyuuBot
|
modules/permissions.py
|
Python
|
gpl-3.0
| 1,361 | 0.005878 |
def get_perm_argparser(self, args):
args = args.split(" ")
if args[0] == "nick":
self.conman.gen_send("Permission level for %s: %s" % (args[1], self.permsman.get_nick_perms(args[1])))
elif args[0] == "cmd":
if args[1].startswith("."):
args[1] = args[1][1:]
self.conman.gen_send("Permission level for %s: %s" % (args[1], self.permsman.get_cmd_perms(args[1])))
elif args[0] == "msg":
self.conman.gen_send("Message permissions for %s: %s" % (args[1], self.permsman.get_msg_perms(args[1])))
def set_perm_argparser(self, args):
args = args.split(" ")
if args[0] == "nick":
self.conman.gen_send("Setting permission level for %s: %s" % (args[1], args[2]))
self.permsman.set_nick_perms(args[1], args[2])
elif args[0] == "cmd":
if args[1].starts
|
with("."):
args[1] = args[1][1:]
self.conman.gen_send("Setting permission level for %s: %s" % (args[1], args[2]))
self.permsman.set_cmd_perms(args[1], args[2])
elif args[0] == "msg":
args[2] = args[2].lower() == "true" or args[2] == "1"
self.conman.gen_send("Setting message permissions for %s: %s" % (args[1], args[2]))
self.permsman.set_msg_p
|
erms(args[1], args[2])
self._map("command", "getperm", get_perm_argparser)
self._map("command", "setperm", set_perm_argparser)
|
wm3ndez/realestate
|
testproject/manage.py
|
Python
|
bsd-2-clause
| 464 | 0.002155 |
#!/usr/bin/env python
import os
import sys
PROJECT_DIR = os.path.abspath(os.path.dirname(__file__))
sys.path.append(PROJECT_DIR)
sys.path.append(os.path.abspath(PROJECT_DIR + '/../'))
sys.path.append(os.path.abspath(PROJECT_DIR + '/../realestate/'))
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETT
|
INGS_MODULE", "testproject.settings")
from django.core.management import execute_from_command_line
execute_from_command_
|
line(sys.argv)
|
nagaozen/my-os-customizations
|
home/nagaozen/.gnome2/gedit/plugins/codecompletion/utils.py
|
Python
|
gpl-3.0
| 1,420 | 0.008451 |
# -*- coding: utf-8 -*-
# gedit CodeCompletion plugin
# Copyright (C) 2011 Fabio Zendhi Nagao
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
def get_word(piter):
a = piter.copy()
b = piter.copy()
while True:
if a.starts_line():
break
a.backward_char()
ch = a.get_cha
|
r()
#if not (ch.isalnum
|
() or ch in ['_', ':', '.', '-', '>']):
if not (ch.isalnum() or ch in "_:.->"):
a.forward_char()
break
word = a.get_visible_text(b)
return a, word
def get_document(piter):
a = piter.copy()
b = piter.copy()
while True:
if not a.backward_char():
break
while True:
if not b.forward_char():
break
return a.get_visible_text(b)
# ex:ts=4:et:
|
jambonsw/django-improved-user
|
src/improved_user/admin.py
|
Python
|
bsd-2-clause
| 1,256 | 0 |
"""Admin Configuration for Improved User"""
from django.contrib.auth.admin import UserAdmin as BaseUserAdmin
from django.utils.translation import gettext_lazy as _
from .forms import UserChangeForm, UserCreationForm
class UserAdmin(B
|
aseUserAdmin):
"""Admin panel for Improved User, mimics Django's default"""
fieldsets = (
(None, {"fields": ("email", "password")}),
(_("Personal info"), {"fields": ("full_name", "short_name")}),
(
_("Permissions"),
{
"fields": (
"is_active",
"is_staff",
"is_superuser",
|
"groups",
"user_permissions",
),
},
),
(_("Important dates"), {"fields": ("last_login", "date_joined")}),
)
add_fieldsets = (
(
None,
{
"classes": ("wide",),
"fields": ("email", "short_name", "password1", "password2"),
},
),
)
form = UserChangeForm
add_form = UserCreationForm
list_display = ("email", "full_name", "short_name", "is_staff")
search_fields = ("email", "full_name", "short_name")
ordering = ("email",)
|
Bloodoff/raidinfo
|
lib/raid_megaraid.py
|
Python
|
gpl-3.0
| 8,614 | 0.003018 |
import os
import re
import struct
from . import helpers
from .raid import RaidCont
|
roller, RaidLD, RaidPD, DeviceCapacity
from .mixins import TextAttributeParser
from .smart import SMARTinfo
if os.name == 'nt':
raidUtil = 'C:\\Program Files (x86)\\MegaRAID Storage Manager\\StorCLI64.exe'
elif 'VMkern
|
el' in os.uname():
raidUtil = '/opt/lsi/storcli/storcli'
else:
raidUtil = '/opt/MegaRAID/storcli/storcli64'
class RaidControllerLSI(TextAttributeParser, RaidController):
_attributes = [
(r'(?i)^Model\s=\s(.*)$', 'Model', None, False, None),
(r'(?i)^Serial\sNumber\s=\s(.*)$', 'Serial', None, False, None),
(r'(?i)^Controller\sStatus\s=\s(.*)$', 'Status', None, False, None),
(r'(?i)^Bios\sVersion\s=\s(.*)$', 'BIOS', None, False, None),
(r'(?i)^Firmware\sVersion\s=\s(.*)$', 'Firmware', None, False, None),
(r'(?i)^On\sBoard\sMemory\sSize\s=\s(.*)$', 'CacheSize', None, False, None),
(r'(?i)^BBU\s=\s(.*)$', 'Battery', None, False, lambda match: {'Absent': False}.get(match.group(1), True)),
(r'(?i)^BBU\sStatus\s=\s(.*)$', 'BatteryStatus', None, False, lambda match: {'32': 'Degraded'}.get(match.group(1), match.group(1)))
]
def __init__(self, name):
super(self.__class__, self).__init__(name)
self.Type = 'LSIMegaRAID'
self.Serial = '-'
self.__fill_data()
self.__enumerate_ld()
@staticmethod
def probe():
if not os.path.isfile(raidUtil):
return []
output = helpers.getOutput('{} show nolog'.format(raidUtil))
controllers = []
for line in output:
match = re.search(r'^(\d+)\s\S+\s+\d+', line)
if match:
controllers.append(match.group(1))
return controllers
def __enumerate_ld(self):
ld_section = False
for line in helpers.getOutput('{} /c{} show all nolog'.format(raidUtil, self.Name)):
if re.match(r'(?i)^VD\sLIST\s:', line):
ld_section = True
continue
if not ld_section:
continue
if re.match(r'(?i)Physical\sDrives.*', line):
break
match = re.search(r'(?i)(\d+/\d+)\s+', line)
if match:
self.LDs.append(RaidLDvendorLSI(match.group(1), self))
def printSpecificInfo(self):
print('Model: {}, s/n {}, {}'.format(self.Model, self.Serial, self.Status))
print('Cache: {}'.format(self.CacheSize))
if self.Battery:
print('BBU status: {}'.format(self.BatteryStatus))
print('BIOS version: {}'.format(self.BIOS))
print('FW version : {}'.format(self.Firmware))
def __fill_data(self):
for line in helpers.getOutput('{} /c{} show all nolog'.format(raidUtil, self.Name)):
if re.match(r'(?i)^TOPOLOGY\s:', line):
break
if self._process_attributes_line(line):
continue
class RaidLDvendorLSI(RaidLD):
def __init__(self, name, controller):
(self.DG, self.VD) = name.split('/')
super(self.__class__, self).__init__(name, controller)
self.Device = self.Name
self.Level = ''
self.State = ''
self.Size = ''
self.__fill_data()
self.__find_devicename()
self.__enumerate_pd()
self.DriveCount = len(self.PDs)
self.DriveActiveCount = self.DriveCount
def __enumerate_pd(self):
pd_section = False
for line in helpers.getOutput('{} /c{}/v{} show all nolog'.format(raidUtil, self.Controller.Name, self.VD)):
if re.match(r'(?i)PDs\sfor\sVD', line):
pd_section = True
continue
if not pd_section:
continue
match = re.search(r'(?i)^(\d+):(\d+)\s+(\d+)\s+\S+', line)
if match:
self.PDs.append(RaidPDvendorLSI(match.group(1), match.group(2), match.group(3), self))
def __fill_data(self):
for line in helpers.getOutput('{} /c{}/v{} show all nolog'.format(raidUtil, self.Controller.Name, self.VD)):
match = re.search(r'(?i)SCSI\sNAA\sId\s=\s(.*)$', line)
if match:
self.NAA = match.group(1)
match = re.search(r'(?i)^(\d+)\/(\d+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)', line)
if match:
self.Level = match.group(3)
self.State = {'Optl': 'Optimal',
'Rec': 'Recovery',
'OfLn': 'OffLine',
'Pdgd': 'Partially Degraded',
'Dgrd': 'Degraded'}.get(match.group(4), match.group(4))
self.Size = DeviceCapacity(int(float(match.group(10)) * 1024), {'TB': 'GiB', 'GB': 'MiB', 'MB': 'KiB'}.get(match.group(11), None))
def __find_devicename(self):
try:
for filename in [f for f in os.listdir('/dev/disk/by-id')]:
match = re.search(r'^scsi-\d+' + self.NAA, filename)
if match:
self.Device = '/dev/disk/by-id/' + filename
except:
pass
class RaidPDvendorLSI(TextAttributeParser, RaidPD):
_attributes = [
(r'(?i)^SN\s+=\s+(.*)$', 'Serial', None, False, None),
(r'(?i)^Manufacturer\sId\s=\s+(.*)$', 'Vendor', None, False, None),
(r'(?i)^Drive\sTemperature\s=\s+(\d+)C', 'Temperature', None, False, None),
(r'(?i)^Model\sNumber\s=\s+(.*)$', 'Model', None, False, None),
(r'(?i)^Media\sError\sCount\s=\s+(\d+)', 'ErrorCount', None, True, lambda match: int(match.group(1))),
(r'(?i)^Predictive\sFailure\sCount\s=\s+(\d+)', 'ErrorCount', None, True, lambda match: int(match.group(1)))
]
def __init__(self, enclosure, slot, did, ld):
super(self.__class__, self).__init__('{}:{}'.format(enclosure, slot), ld)
self.Enclosure = enclosure
self.Slot = slot
self.Device = did
self.PHYCount = 0
self.__fill_basic_info()
if hasattr(self, 'Vendor'):
self.Model = self.Vendor + ' ' + self.Model
if 'VMkernel' in os.uname():
self.__fill_LSI_smart_info()
else:
self.__fill_smart_info()
def __fill_basic_info(self):
for line in helpers.getOutput('{} /c{}/e{}/s{} show all nolog'.format(raidUtil, self.LD.Controller.Name, self.Enclosure, self.Slot)):
match = re.search(r'^(\d+):(\d+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)', line)
if match:
self.Capacity = DeviceCapacity(int(float(match.group(6)) * 1024), {'TB': 'GiB', 'GB': 'MiB', 'MB': 'KiB'}.get(match.group(7), None))
self.Technology = match.group(8)
self.State = {
'DHS': 'Dedicated Hot Spare',
'UGood': 'Unconfigured Good',
'GHS': 'Global Hotspare',
'UBad': 'Unconfigured Bad',
'Onln': 'Optimal',
'Rbld': 'Rebuild',
'Offln': 'Offline'
}.get(match.group(4), match.group(4))
if self._process_attributes_line(line):
continue
def __fill_smart_info(self):
smart = SMARTinfo('-d megaraid,{}'.format(int(self.Device)), self.LD.Device)
if not smart.SMART:
return
for prop in ['Model', 'Serial', 'Firmware', 'Capacity', 'SectorSizes', 'FormFactor', 'PHYCount', 'PHYSpeed', 'RPM', 'PowerOnHours', 'ErrorCount', 'Temperature', 'SCT']:
if hasattr(smart, prop):
setattr(self, prop, getattr(smart, prop))
def __fill_LSI_smart_info(self):
data_dump = []
for line in helpers.getOutput('{} /c{}/e{}/s{} show smart nolog'.format(raidUtil, self.LD.Controller.Name, self.Enclosure, self.Slot)):
match = re.search(r'^(\S\S\s){15}\S\S$', line)
if match:
for c in line.split(' '):
data_dump.append(int(c, 16))
data_dump = data_dump[2:]
smart
|
mscuthbert/abjad
|
abjad/demos/desordre/test/test_demos_desordre.py
|
Python
|
gpl-3.0
| 198 | 0.005051 |
# -*- encoding: utf-8 -*-
import os
from abjad import abjad_configuration
from abjad.demo
|
s import desordre
def test_demos_desordre_01(
|
):
lilypond_file = desordre.make_desordre_lilypond_file()
|
anhstudios/swganh
|
data/scripts/templates/object/tangible/furniture/all/shared_frn_all_lamp_free_s01_lit.py
|
Python
|
mit
| 461 | 0.047722 |
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LO
|
ST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/furniture/all/shared_frn_all_lamp_free_s01_lit.iff"
result.attribute_template_id = 6
result.stfName("frn
|
_n","frn_lamp_free")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
mrcslws/nupic.research
|
src/nupic/research/frameworks/vernon/mixins/step_based_logging.py
|
Python
|
agpl-3.0
| 4,696 | 0.000213 |
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2020, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
import math
from collections import defaultdict
from nupic.research.frameworks.vernon import interfaces
__all__ = [
"StepBasedLogging",
]
class StepBasedLogging(
interfaces.Experiment, # Requires
interfaces.StepBasedLogging, # Implements
):
@staticmethod
def step_based_logging_interface_implemented():
return True
def setup_experiment(self, config):
"""
:param config: Dictionary containing the configuration parameters
- log_timest
|
ep_freq: Configures mixins and subclasses that log every
timestep to only log every nth times
|
tep (in
addition to the final timestep of each epoch).
Set to 0 to log only at the end of each epoch.
"""
super().setup_experiment(config)
self._current_timestep = 0
self.log_timestep_freq = config.get("log_timestep_freq", 1)
@property
def current_timestep(self):
return self._current_timestep
@current_timestep.setter
def current_timestep(self, value):
self._current_timestep = value
def run_iteration(self):
timestep_begin = self.current_timestep
ret = super().run_iteration()
ret.update(
timestep_begin=timestep_begin,
timestep_end=self.current_timestep,
)
return ret
def post_batch(self, **kwargs):
super().post_batch(**kwargs)
# FIXME: move to post_optimizer_step
self.current_timestep += 1
def should_log_batch(self, train_batch_idx):
return (train_batch_idx == self.total_batches - 1) or (
self.log_timestep_freq > 0
and (self.current_timestep % self.log_timestep_freq) == 0)
def get_state(self):
state = super().get_state()
state["current_timestep"] = self.current_timestep
return state
def set_state(self, state):
super().set_state(state)
if "current_timestep" in state:
self.current_timestep = state["current_timestep"]
@classmethod
def get_recorded_timesteps(cls, result, config):
log_timestep_freq = config.get("log_timestep_freq", 1)
timestep_end = result["timestep_end"]
if log_timestep_freq == 0:
ret = [timestep_end - 1]
else:
# Find first logged timestep in range
logged_begin = int(math.ceil(result["timestep_begin"]
/ log_timestep_freq)
* log_timestep_freq)
ret = list(range(logged_begin, timestep_end, log_timestep_freq))
last_batch_timestep = timestep_end - 1
if last_batch_timestep % log_timestep_freq != 0:
ret.append(last_batch_timestep)
return ret
@classmethod
def expand_result_to_time_series(cls, result, config):
result_by_timestep = defaultdict(dict)
# Assign the epoch result to the appropriate timestep.
result_by_timestep[result["timestep_end"]].update(
cls.get_readable_result(result)
)
return result_by_timestep
@classmethod
def get_execution_order(cls):
eo = super().get_execution_order()
exp = "StepBasedLoggingCore"
eo["run_iteration"].append(exp + ": Add timestep info")
eo["post_batch"].append(exp + ": Increment timestep")
eo["get_state"].append(exp + ": Get current timestep")
eo["set_state"].append(exp + ": Set current timestep")
eo.update(
# StepBasedLogging
expand_result_to_time_series=[exp + ": common result dict keys"],
)
return eo
|
openhealthcare/randomise.me
|
rm/trials/migrations/0035_auto__del_field_trial_max_participants.py
|
Python
|
agpl-3.0
| 7,643 | 0.008112 |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'Trial.max_participants'
db.delete_column(u'trials_trial', 'max_participants')
def backwards(self, orm):
# User chose to not deal with backwards NULL issues for 'Trial.max_participants'
raise RuntimeError("Cannot reverse this migration. 'Trial.max_participants' and its values cannot be restored.")
models = {
u'trials.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'trial': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['trials.Trial']"})
},
u'trials.invitation': {
'Meta': {'object_name': 'Invitation'},
'email': ('django.db.models.fields.EmailField', [], {'max_length': '254'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'sent': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'trial': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['trials.Trial']"})
},
u'trials.participant': {
'Meta': {'object_name': 'Participant'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['trials.Group']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'trial': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['trials.Trial']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['userprofiles.RMUser']", 'null': 'True', 'blank': 'True'})
},
u'trials.report': {
'Meta': {'object_name': 'Report'},
'binary': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'count': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'date': ('django.db.models.fields.DateField', [], {}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['trials.Group']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'participant': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['trials.Participant']", 'null': 'True', 'blank': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'trial': ('django.db.models.fields.related.Foreign
|
Key', [], {'to': u"orm['trials.Trial']"}),
'variable': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['trials.Variable']"})
},
u'trials.trial': {
'Meta': {'object_name': 'Trial'},
'de
|
scription': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'featured': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'finish_date': ('django.db.models.fields.DateField', [], {}),
'group_a': ('django.db.models.fields.TextField', [], {}),
'group_a_desc': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'group_a_expected': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'group_b': ('django.db.models.fields.TextField', [], {}),
'group_b_desc': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'group_b_impressed': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instruction_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'instruction_delivery': ('django.db.models.fields.TextField', [], {'default': "'im'", 'max_length': '2'}),
'instruction_hours_after': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'is_edited': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'min_participants': ('django.db.models.fields.IntegerField', [], {}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['userprofiles.RMUser']"}),
'participants': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'private': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'recruiting': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'recruitment': ('django.db.models.fields.CharField', [], {'default': "'an'", 'max_length': '2'}),
'reporting_freq': ('django.db.models.fields.CharField', [], {'default': "'da'", 'max_length': '200'}),
'start_date': ('django.db.models.fields.DateField', [], {}),
'stopped': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
u'trials.variable': {
'Meta': {'object_name': 'Variable'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'question': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'style': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'trial': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['trials.Trial']"})
},
u'userprofiles.rmuser': {
'Meta': {'object_name': 'RMUser'},
'account': ('django.db.models.fields.CharField', [], {'default': "'st'", 'max_length': '2'}),
'dob': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'unique': 'True', 'max_length': '254'}),
'gender': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'postcode': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'receive_questions': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '40', 'db_index': 'True'})
}
}
complete_apps = ['trials']
|
kvar/ansible
|
test/units/modules/network/check_point/test_cp_mgmt_host.py
|
Python
|
gpl-3.0
| 3,853 | 0.001557 |
# Ansible module to manage CheckPoint Firewall (c) 2019
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import pytest
from units.modules.utils import set_module_args, exit_json, fail_json, AnsibleExitJson
from ansible.module_utils import basic
from ansible.modules.network.check_point import cp_mgmt_host
OBJECT = {
"name": "New Host 1",
"ip_address": "192.0.2.1"
}
CREATE_PAYLOAD = {
"name": "New Host 1",
"ip_address": "192.0.2.1"
}
UPDATE_PAYLOAD = {
"name": "New Host 1",
"color": "blue",
"ipv4_address": "192.0.2.2"
}
OBJECT_AFTER_UPDATE = UPDATE_PAYLOAD
DELETE_PAYLOAD = {
"name": "New Host 1",
"state": "absent"
}
function_path = 'ansible.modules.network.check_point.cp_mgmt_host.api_call'
api_call_object = 'host'
class TestCheckpointHost(object):
module = cp_mgmt_host
@pytest.fixture(autouse=True)
def module_mock(self, mocker):
return mocker.patch.multiple(basic.AnsibleModule, exit_json=exit_json, fail_json=fail_json)
@pytest.fixture
def connection_mock(self, mocker):
connection_class_mock = mocker.patch('ansible.module_utils.network.checkpoint.checkpoint.Connection')
return connection_class_mock.return_value
def test_create(self, mocker, connection_mock):
mock_function = mocker.patch(function_path)
mock_function.return_value = {'changed': True, api_call_object: OBJECT}
result = self._run_module(CREATE_PAYLOAD)
assert result['changed']
assert OBJECT.items() == result[api_call_object].items()
def test_c
|
reate_idempotent(self, mocker, connection_mock):
mock_function = mocker.patch(function_path)
mock_function.return_value = {'changed': False, api_call_object: OBJECT}
result = self._run_module(CREATE_PAYLOAD)
assert not result['changed']
def test_update(self, mocker, connection_mock):
mock_function = mocker.patch(function_path)
mock_function.return_value = {
|
'changed': True, api_call_object: OBJECT_AFTER_UPDATE}
result = self._run_module(UPDATE_PAYLOAD)
assert result['changed']
assert OBJECT_AFTER_UPDATE.items() == result[api_call_object].items()
def test_update_idempotent(self, mocker, connection_mock):
mock_function = mocker.patch(function_path)
mock_function.return_value = {'changed': False, api_call_object: OBJECT_AFTER_UPDATE}
result = self._run_module(UPDATE_PAYLOAD)
assert not result['changed']
def test_delete(self, mocker, connection_mock):
mock_function = mocker.patch(function_path)
mock_function.return_value = {'changed': True}
result = self._run_module(DELETE_PAYLOAD)
assert result['changed']
def test_delete_idempotent(self, mocker, connection_mock):
mock_function = mocker.patch(function_path)
mock_function.return_value = {'changed': False}
result = self._run_module(DELETE_PAYLOAD)
assert not result['changed']
def _run_module(self, module_args):
set_module_args(module_args)
with pytest.raises(AnsibleExitJson) as ex:
self.module.main()
return ex.value.args[0]
|
promil23/django-remote-forms
|
django_remote_forms/fields.py
|
Python
|
mit
| 10,077 | 0.002481 |
import datetime
from django.conf import settings
from django_remote_forms import logger, widgets
class RemoteField(object):
"""
A base object for being able to return a Django Form Field as a Python
dictionary.
This object also takes into account if there is initial data for the field
coming in from the form directly, which overrides any initial data
specified on the field per Django's rules:
https://docs.djangoproject.com/en/dev/ref/forms/api/#dynamic-initial-values
"""
def __init__(self, field, form_initial_data=None, field_name=None):
self.field_name = field_name
self.field = field
self.form_initial_data = form_initial_data
def
|
as_dict(self):
field_dict = OrderedDict()
field_dict['title'] = self.field.__class__.__name__
field_dict['required'] = self.field.required
field_dict['label'] = self.fi
|
eld.label
field_dict['initial'] = self.form_initial_data or self.field.initial
field_dict['help_text'] = self.field.help_text
field_dict['error_messages'] = self.field.error_messages
# Instantiate the Remote Forms equivalent of the widget if possible
# in order to retrieve the widget contents as a dictionary.
remote_widget_class_name = 'Remote%s' % self.field.widget.__class__.__name__
try:
remote_widget_class = getattr(widgets, remote_widget_class_name)
remote_widget = remote_widget_class(self.field.widget, field_name=self.field_name)
except Exception, e:
logger.warning('Error serializing %s: %s', remote_widget_class_name, str(e))
widget_dict = {}
else:
widget_dict = remote_widget.as_dict()
field_dict['widget'] = widget_dict
return field_dict
class RemoteCharField(RemoteField):
def as_dict(self):
field_dict = super(RemoteCharField, self).as_dict()
field_dict.update({
'max_length': self.field.max_length,
'min_length': self.field.min_length
})
return field_dict
class RemoteIntegerField(RemoteField):
def as_dict(self):
field_dict = super(RemoteIntegerField, self).as_dict()
field_dict.update({
'max_value': self.field.max_value,
'min_value': self.field.min_value
})
return field_dict
class RemoteFloatField(RemoteIntegerField):
def as_dict(self):
return super(RemoteFloatField, self).as_dict()
class RemoteDecimalField(RemoteIntegerField):
def as_dict(self):
field_dict = super(RemoteDecimalField, self).as_dict()
field_dict.update({
'max_digits': self.field.max_digits,
'decimal_places': self.field.decimal_places
})
return field_dict
class RemoteTimeField(RemoteField):
def as_dict(self):
field_dict = super(RemoteTimeField, self).as_dict()
field_dict['input_formats'] = self.field.input_formats
if (field_dict['initial']):
if callable(field_dict['initial']):
field_dict['initial'] = field_dict['initial']()
# If initial value is datetime then convert it using first available input format
if (isinstance(field_dict['initial'], (datetime.datetime, datetime.time, datetime.date))):
if not len(field_dict['input_formats']):
if isinstance(field_dict['initial'], datetime.date):
field_dict['input_formats'] = settings.DATE_INPUT_FORMATS
elif isinstance(field_dict['initial'], datetime.time):
field_dict['input_formats'] = settings.TIME_INPUT_FORMATS
elif isinstance(field_dict['initial'], datetime.datetime):
field_dict['input_formats'] = settings.DATETIME_INPUT_FORMATS
input_format = field_dict['input_formats'][0]
field_dict['initial'] = field_dict['initial'].strftime(input_format)
return field_dict
class RemoteDateField(RemoteTimeField):
def as_dict(self):
return super(RemoteDateField, self).as_dict()
class RemoteDateTimeField(RemoteTimeField):
def as_dict(self):
return super(RemoteDateTimeField, self).as_dict()
class RemoteRegexField(RemoteCharField):
def as_dict(self):
field_dict = super(RemoteRegexField, self).as_dict()
# We don't need the pattern object in the frontend
# field_dict['regex'] = self.field.regex
return field_dict
class RemoteEmailField(RemoteCharField):
def as_dict(self):
return super(RemoteEmailField, self).as_dict()
class RemoteFileField(RemoteField):
def as_dict(self):
field_dict = super(RemoteFileField, self).as_dict()
field_dict['max_length'] = self.field.max_length
return field_dict
class RemoteImageField(RemoteFileField):
def as_dict(self):
return super(RemoteImageField, self).as_dict()
class RemoteURLField(RemoteCharField):
def as_dict(self):
return super(RemoteURLField, self).as_dict()
class RemoteBooleanField(RemoteField):
def as_dict(self):
return super(RemoteBooleanField, self).as_dict()
class RemoteNullBooleanField(RemoteBooleanField):
def as_dict(self):
return super(RemoteNullBooleanField, self).as_dict()
class RemoteBCTChoiceFieldWithTitles(RemoteField):
def as_dict(self):
return super(RemoteBCTChoiceFieldWithTitles, self).as_dict()
def get_dict(self):
#field_dict = {'widget': {'attrs' : self.field.widget.attrs}}
#field_dict = {'results': self.field.widget.attrs['results']}
field_dict = {'results': self.field.results}
if hasattr(self.field, 'img_url'):
field_dict['img_url'] = self.field.img_url
return field_dict
class RemoteInlineForeignKeyField(RemoteField):
def as_dict(self):
return super(RemoteInlineForeignKeyField, self).as_dict()
class RemoteChoiceField(RemoteField):
def as_dict(self):
field_dict = super(RemoteChoiceField, self).as_dict()
#temporary switch off
'''
field_dict['choices'] = []
for key, value in self.field.choices:
field_dict['choices'].append({
'value': key,
'display': value
})
'''
#field_dict['choices'] = []
field_dict['widget']['choices'] = []
return field_dict
def get_dict(self):
field_dict = {'choices': []}
'''
for key, value in self.field.choices:
field_dict['choices'].append({
'value': key,
})
'''
#'display': value
#return field_dict
return {}
class RemoteTypedChoiceField(RemoteChoiceField):
def as_dict(self):
field_dict = super(RemoteTypedChoiceField, self).as_dict()
field_dict.update({
'coerce': self.field.coerce,
'empty_value': self.field.empty_value
})
return field_dict
class RemoteToolChoiceField(RemoteTypedChoiceField):
def get_dict(self):
field_dict = {'choices': self.field.choices,
'ng-options': self.field.widget.attrs['ng-options'],
}
#print dir(self.field.widget)
#print self.field.to_python()
'''
for key, value in self.field.choices:
field_dict['choices'].append({
'value': key,
})
'''
#'display': value
#return field_dict
return field_dict
class RemoteModelChoiceField(RemoteChoiceField):
def as_dict(self):
return super(RemoteModelChoiceField, self).as_dict()
'''
def get_dict(self):
#field_dict = {'widget': {'attrs' : self.field.widget.attrs}}
#field_dict = {'results': self.field.widget.attrs['results']}
field_dict = {'results': self.field.results}
if hasattr(self.field, 'img_url'):
field_dict['img_url'] = self.field.img_url
return field_dict
'''
class RemoteMultipleChoiceField(RemoteChoiceField):
|
janusnic/21v-python
|
unit_01/23.py
|
Python
|
mit
| 210 | 0.033333 |
#!/usr/bin/python
import math
# return statement
def printLog(x):
if x <= 0:
print "Positive number only, please."
return
result = math.log(x)
print "The log of x is", result
x, y = -2, 3
printLog(
|
y)
|
|
RandyLowery/erpnext
|
erpnext/projects/doctype/project/project.py
|
Python
|
gpl-3.0
| 8,909 | 0.02851 |
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
|
import frappe
from frappe.utils import flt, getdate, get_url
from frappe import _
from frappe.model.document import Document
from erpnext.controllers.queries import get_filters_cond
from frappe.desk.reportview import get_match_cond
class Project(Document):
def get_feed(self):
return '{0}: {1}'.format(_(self.status), self.project_name)
def onload(self
|
):
"""Load project tasks for quick view"""
if not self.get('__unsaved') and not self.get("tasks"):
self.load_tasks()
self.set_onload('activity_summary', frappe.db.sql('''select activity_type,
sum(hours) as total_hours
from `tabTimesheet Detail` where project=%s and docstatus < 2 group by activity_type
order by total_hours desc''', self.name, as_dict=True))
def __setup__(self):
self.onload()
def load_tasks(self):
"""Load `tasks` from the database"""
self.tasks = []
for task in self.get_tasks():
task_map = {
"title": task.subject,
"status": task.status,
"start_date": task.exp_start_date,
"end_date": task.exp_end_date,
"description": task.description,
"task_id": task.name,
"task_weight": task.task_weight
}
self.map_custom_fields(task, task_map)
self.append("tasks", task_map)
def get_tasks(self):
return frappe.get_all("Task", "*", {"project": self.name}, order_by="exp_start_date asc")
def validate(self):
self.validate_dates()
self.validate_weights()
self.sync_tasks()
self.tasks = []
self.send_welcome_email()
def validate_dates(self):
if self.expected_start_date and self.expected_end_date:
if getdate(self.expected_end_date) < getdate(self.expected_start_date):
frappe.throw(_("Expected End Date can not be less than Expected Start Date"))
def validate_weights(self):
sum = 0
for task in self.tasks:
if task.task_weight > 0:
sum = sum + task.task_weight
if sum > 0 and sum != 1:
frappe.throw(_("Total of all task weights should be 1. Please adjust weights of all Project tasks accordingly"))
def sync_tasks(self):
"""sync tasks and remove table"""
if self.flags.dont_sync_tasks: return
task_names = []
for t in self.tasks:
if t.task_id:
task = frappe.get_doc("Task", t.task_id)
else:
task = frappe.new_doc("Task")
task.project = self.name
task.update({
"subject": t.title,
"status": t.status,
"exp_start_date": t.start_date,
"exp_end_date": t.end_date,
"description": t.description,
"task_weight": t.task_weight
})
self.map_custom_fields(t, task)
task.flags.ignore_links = True
task.flags.from_project = True
task.flags.ignore_feed = True
task.save(ignore_permissions = True)
task_names.append(task.name)
# delete
for t in frappe.get_all("Task", ["name"], {"project": self.name, "name": ("not in", task_names)}):
frappe.delete_doc("Task", t.name)
self.update_percent_complete()
self.update_costing()
def map_custom_fields(self, source, target):
project_task_custom_fields = frappe.get_all("Custom Field", {"dt": "Project Task"}, "fieldname")
for field in project_task_custom_fields:
target.update({
field.fieldname: source.get(field.fieldname)
})
def update_project(self):
self.update_percent_complete()
self.update_costing()
self.flags.dont_sync_tasks = True
self.save(ignore_permissions = True)
def update_percent_complete(self):
total = frappe.db.sql("""select count(name) from tabTask where project=%s""", self.name)[0][0]
if not total and self.percent_complete:
self.percent_complete = 0
if (self.percent_complete_method == "Task Completion" and total > 0) or (not self.percent_complete_method and total > 0):
completed = frappe.db.sql("""select count(name) from tabTask where
project=%s and status in ('Closed', 'Cancelled')""", self.name)[0][0]
self.percent_complete = flt(flt(completed) / total * 100, 2)
if (self.percent_complete_method == "Task Progress" and total > 0):
progress = frappe.db.sql("""select sum(progress) from tabTask where
project=%s""", self.name)[0][0]
self.percent_complete = flt(flt(progress) / total, 2)
if (self.percent_complete_method == "Task Weight" and total > 0):
weight_sum = frappe.db.sql("""select sum(task_weight) from tabTask where
project=%s""", self.name)[0][0]
if weight_sum == 1:
weighted_progress = frappe.db.sql("""select progress,task_weight from tabTask where
project=%s""", self.name,as_dict=1)
pct_complete=0
for row in weighted_progress:
pct_complete += row["progress"] * row["task_weight"]
self.percent_complete = flt(flt(pct_complete), 2)
def update_costing(self):
from_time_sheet = frappe.db.sql("""select
sum(costing_amount) as costing_amount,
sum(billing_amount) as billing_amount,
min(from_time) as start_date,
max(to_time) as end_date,
sum(hours) as time
from `tabTimesheet Detail` where project = %s and docstatus = 1""", self.name, as_dict=1)[0]
from_expense_claim = frappe.db.sql("""select
sum(total_sanctioned_amount) as total_sanctioned_amount
from `tabExpense Claim` where project = %s and approval_status='Approved'
and docstatus = 1""",
self.name, as_dict=1)[0]
self.actual_start_date = from_time_sheet.start_date
self.actual_end_date = from_time_sheet.end_date
self.total_costing_amount = from_time_sheet.costing_amount
self.total_billing_amount = from_time_sheet.billing_amount
self.actual_time = from_time_sheet.time
self.total_expense_claim = from_expense_claim.total_sanctioned_amount
self.gross_margin = flt(self.total_billing_amount) - flt(self.total_costing_amount)
if self.total_billing_amount:
self.per_gross_margin = (self.gross_margin / flt(self.total_billing_amount)) *100
def update_purchase_costing(self):
total_purchase_cost = frappe.db.sql("""select sum(base_net_amount)
from `tabPurchase Invoice Item` where project = %s and docstatus=1""", self.name)
self.total_purchase_cost = total_purchase_cost and total_purchase_cost[0][0] or 0
def update_sales_costing(self):
total_sales_cost = frappe.db.sql("""select sum(grand_total)
from `tabSales Order` where project = %s and docstatus=1""", self.name)
self.total_sales_cost = total_sales_cost and total_sales_cost[0][0] or 0
def send_welcome_email(self):
url = get_url("/project/?name={0}".format(self.name))
messages = (
_("You have been invited to collaborate on the project: {0}".format(self.name)),
url,
_("Join")
)
content = """
<p>{0}.</p>
<p><a href="{1}">{2}</a></p>
"""
for user in self.users:
if user.welcome_email_sent==0:
frappe.sendmail(user.user, subject=_("Project Collaboration Invitation"), content=content.format(*messages))
user.welcome_email_sent=1
def on_update(self):
self.load_tasks()
self.sync_tasks()
def get_timeline_data(doctype, name):
'''Return timeline for attendance'''
return dict(frappe.db.sql('''select unix_timestamp(from_time), count(*)
from `tabTimesheet Detail` where project=%s
and from_time > date_sub(curdate(), interval 1 year)
and docstatus < 2
group by date(from_time)''', name))
def get_project_list(doctype, txt, filters, limit_start, limit_page_length=20):
return frappe.db.sql('''select distinct project.*
from tabProject project, `tabProject User` project_user
where
(project_user.user = %(user)s
and project_user.parent = project.name)
or project.owner = %(user)s
order by project.modified desc
limit {0}, {1}
'''.format(limit_start, limit_page_length),
{'user':frappe.session.user},
as_dict=True,
update={'doctype':'Project'})
def get_list_context(context=None):
return {
"show_sidebar": True,
"show_search": True,
'no_breadcrumbs': True,
"title": _("Projects"),
"get_list": get_project_list,
"row_template": "templates/includes/projects/project_row.html"
}
def get_users_for_project(doctype, txt, searchfield, start, page_len, filters):
conditions = []
return frappe.db.sql("""select name, concat_ws(' ', first_name, middle_name, last_name)
from
|
aino/aino-convert
|
convert/__init__.py
|
Python
|
bsd-3-clause
| 105 | 0 |
from base
|
import MediaFile
from fields import Media
|
FileField
from widgets import AdminMediaFileWidget
|
jmesteve/saas3
|
openerp/addons/auth_crypt/__openerp__.py
|
Python
|
agpl-3.0
| 1,628 | 0 |
# -*- encoding: utf-8 -*-
###########################
|
###################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Founda
|
tion, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Password Encryption',
'version': '1.1',
'author': ['OpenERP SA', 'FS3'],
'maintainer': 'OpenERP SA',
'website': 'http://www.openerp.com',
'category': 'Tools',
'description': """
Ecrypted passwords
==================
Interaction with LDAP authentication:
-------------------------------------
This module is currently not compatible with the ``user_ldap`` module and
will disable LDAP authentication completely if installed at the same time.
""",
'depends': ['base'],
'data': [],
'auto_install': False,
'installable': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
ProfessorX/Config
|
.PyCharm30/system/python_stubs/-1247972723/pyexpat/__init__.py
|
Python
|
gpl-2.0
| 1,861 | 0.009672 |
# encoding: utf-8
# module pyexpat
# from /usr/lib/python2.7/lib-dynload/pyexpat.x86_64-linux-gnu.so
# by ge
|
nerator 1.135
""" Python wrapper for Expat parser. """
# imports
import pyexpat.errors as errors # <module 'pyexpat.errors' (built-in)>
import pyexpat.mode
|
l as model # <module 'pyexpat.model' (built-in)>
# Variables with simple values
EXPAT_VERSION = 'expat_2.1.0'
native_encoding = 'UTF-8'
XML_PARAM_ENTITY_PARSING_ALWAYS = 2
XML_PARAM_ENTITY_PARSING_NEVER = 0
XML_PARAM_ENTITY_PARSING_UNLESS_STANDALONE = 1
__version__ = '2.7.8'
# functions
def ErrorString(errno): # real signature unknown; restored from __doc__
"""
ErrorString(errno) -> string
Returns string error for given number.
"""
return ""
def ParserCreate(encoding=None, namespace_separator=None): # real signature unknown; restored from __doc__
"""
ParserCreate([encoding[, namespace_separator]]) -> parser
Return a new XML parser object.
"""
pass
# classes
from Exception import Exception
class ExpatError(Exception):
# no doc
def __init__(self, *args, **kwargs): # real signature unknown
pass
__weakref__ = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""list of weak references to the object (if defined)"""
error = ExpatError
from object import object
class XMLParserType(object):
""" XML parser """
def __init__(self, *args, **kwargs): # real signature unknown
pass
# variables with complex values
expat_CAPI = None # (!) real value is ''
features = [
(
'sizeof(XML_Char)',
1,
),
(
'sizeof(XML_LChar)',
1,
),
(
'XML_DTD',
0,
),
(
'XML_CONTEXT_BYTES',
1024,
),
(
'XML_NS',
0,
),
]
version_info = (
2,
1,
0,
)
|
rohitranjan1991/home-assistant
|
homeassistant/components/accuweather/weather.py
|
Python
|
mit
| 6,728 | 0.002081 |
"""Support for the AccuWeather service."""
from __future__ import annotations
from statistics import mean
from typing import Any, cast
from homeassistant.components.weather import (
ATTR_FORECAST_CONDITION,
ATTR_FORECAST_PRECIPITATION,
ATTR_FORECAST_PRECIPITATION_PROBABILITY,
ATTR_FORECAST_TEMP,
ATTR_FORECAST_TEMP_LOW,
ATTR_FORECAST_TIME,
ATTR_FORECAST_WIND_BEARING,
ATTR_FORECAST_WIND_SPEED,
Forecast,
WeatherEntity,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
CONF_NAME,
SPEED_MILES_PER_HOUR,
TEMP_CELSIUS,
TEMP_FAHRENHEIT,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.device_registry import DeviceEntryType
from homeassistant.helpers.entity import DeviceInfo
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from homeassistant.util.dt import utc_from_timestamp
from . import AccuWeatherDataUpdateCoordinator
from .const import (
API_IMPERIAL,
API_METRIC,
ATTR_FORECAST,
ATTRIBUTION,
CONDITION_CLASSES,
DOMAIN,
MANUFACTURER,
NAME,
)
PARALLEL_UPDATES = 1
async def async_setup_entry(
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
) -> None:
"""Add a AccuWeather weather entity from a config_entry."""
name: str = entry.data[CONF_NAME]
coordinator: AccuWeatherDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id]
async_add_entities([AccuWeatherEntity(name, coordinator)])
class AccuWeatherEntity(CoordinatorEntity, WeatherEntity):
"""Define an AccuWeather entity."""
coordinator: AccuWeatherDataUpdateCoordinator
def __init__(
self, name: str, coordinator: AccuWeatherDataUpdateCoordinator
) -> None:
"""Initialize."""
super().__init__(coordinator)
self._unit_system = API_METRIC if coordinator.is_metric else API_IMPERIAL
wind_speed_unit = self.coordinator.data["Wind"]["Speed"][self._unit_system][
"Unit"
]
if wind_speed_unit == "mi/h":
self._attr_wind_speed_unit = SPEED_MILES_PER_HOUR
else:
self._attr_wind_speed_unit = wind_speed_unit
self._attr_name = name
self._attr_unique_id = coordinator.location_key
self._attr_temperature_unit = (
TEMP_CELSIUS if coordinator.is_metric else TEMP_FAHRENHEIT
)
self._attr_attribution = ATTRIBUTION
self._attr_device_info = DeviceInfo(
entry_type=DeviceEntryType.SERVICE,
identifiers={(DOMAIN, coordinator.location_key)},
manufacturer=MANUFACTURER,
name=NAME,
# You don't need to provide specific details for the URL,
# so passing in _ characters is fine if the location key
# is correct
configuration_url="http://accuweather.com/en/"
f"_/_/{coordinator.location_key}/"
f"weather-forecast/{coordinator.location_key}/",
)
@property
def condition(self) -> str | None:
"""Return the current condition."""
try:
return [
k
for k, v in CONDITION_CLASSES.items()
if self.coordinator.data["WeatherIcon"] in v
][0]
except IndexError:
return None
@property
def temperature(self) -> float:
"""Return the temperature."""
return cast(
float, self.coordinator.data["Temperature"][self._unit_system]["Value"]
)
@property
def pressure(self) -> float:
"""Return the pressure."""
return cast(
float, self.coordinator.data["Pressure"][self._unit_system]["Value"]
)
@property
def humidity(self) -> int:
"""Return the humidity."""
return cast(int, self.coordinator.data["RelativeHumidity"])
@property
def wind_sp
|
eed(self) -> float:
"""Return the wind speed."""
return cast(
float, self.coordinator.data["Wind"]["Speed"][self._unit_system]["Value"]
)
@property
def wind_bearing(self) -> int:
"""Return the wind bearing."""
return cast(int, self.coordinator.data["Wind"]["Dir
|
ection"]["Degrees"])
@property
def visibility(self) -> float:
"""Return the visibility."""
return cast(
float, self.coordinator.data["Visibility"][self._unit_system]["Value"]
)
@property
def ozone(self) -> int | None:
"""Return the ozone level."""
# We only have ozone data for certain locations and only in the forecast data.
if self.coordinator.forecast and self.coordinator.data[ATTR_FORECAST][0].get(
"Ozone"
):
return cast(int, self.coordinator.data[ATTR_FORECAST][0]["Ozone"]["Value"])
return None
@property
def forecast(self) -> list[Forecast] | None:
"""Return the forecast array."""
if not self.coordinator.forecast:
return None
# remap keys from library to keys understood by the weather component
return [
{
ATTR_FORECAST_TIME: utc_from_timestamp(item["EpochDate"]).isoformat(),
ATTR_FORECAST_TEMP: item["TemperatureMax"]["Value"],
ATTR_FORECAST_TEMP_LOW: item["TemperatureMin"]["Value"],
ATTR_FORECAST_PRECIPITATION: self._calc_precipitation(item),
ATTR_FORECAST_PRECIPITATION_PROBABILITY: round(
mean(
[
item["PrecipitationProbabilityDay"],
item["PrecipitationProbabilityNight"],
]
)
),
ATTR_FORECAST_WIND_SPEED: item["WindDay"]["Speed"]["Value"],
ATTR_FORECAST_WIND_BEARING: item["WindDay"]["Direction"]["Degrees"],
ATTR_FORECAST_CONDITION: [
k for k, v in CONDITION_CLASSES.items() if item["IconDay"] in v
][0],
}
for item in self.coordinator.data[ATTR_FORECAST]
]
@staticmethod
def _calc_precipitation(day: dict[str, Any]) -> float:
"""Return sum of the precipitation."""
precip_sum = 0
precip_types = ["Rain", "Snow", "Ice"]
for precip in precip_types:
precip_sum = sum(
[
precip_sum,
day[f"{precip}Day"]["Value"],
day[f"{precip}Night"]["Value"],
]
)
return round(precip_sum, 1)
|
kghatala/googlePythonCourse
|
basic/string1.py
|
Python
|
apache-2.0
| 3,654 | 0.011768 |
#!/usr/bin/python -tt
# Copyright 2010 Google Inc.
# Licensed under the Apache License, Version 2.0
# http://www.apache.org/licenses/LICENSE-2.0
# Google's Python Class
# http://code.google.com/edu/languages/google-python-class/
# Basic string exercises
# Fill in the code for the functions below. main() is already set up
# to call the functions with a few different inputs,
# printing 'OK' when each function is correct.
# The starter code for each function includes a 'return'
# which is just a placeholder for your code.
# It's ok if you do not complete all the functions, and there
# are some additional functions to try in string2.py.
# A. donuts
# Given an int count of a number of donuts, return a string
# of the form 'Number of donuts: <count>', where <count> is the number
# passed in. However, if the count is 10 or more, then use the word 'many'
# instead of the actual count.
# So donuts(5) returns 'Number of donuts: 5'
# and donuts(23) returns 'Number of donuts: many'
def donuts(count):
if count < 10:
return 'Number of donuts: ' + str(count)
else:
return 'Number of donuts: many'
# B. both_ends
# Given a string s, return a string made of the first 2
# and the last 2 chars of the original string,
# so 'spring' yields 'spng'. However, if the string length
# is less than 2, return instead the empty string.
def both_ends(s):
if len(s) >= 2:
return s[0] + s[1] + s[-2] + s[-1]
else:
return ''
# C. fix_start
# Given a string s, return a string
# where all occurences of its first char have
# been changed to '*', except do not change
# the first char itself.
# e.g. 'babble' yields 'ba**le'
# Assume that the string is length 1 or more.
# Hint: s.replace(stra, strb) returns a version of string s
# where all instances of stra have been replaced by strb.
def fix_start(s):
first_char = s[0]
rest = s[1:]
return first_char + rest.replace(first_char,'*')
# D. MixUp
# Given strings a and b, return a single string with a and b separated
# by a space '<a> <b>', except swap the first 2 chars of each string.
# e.g.
# 'mix', pod' -> 'pox mid'
# 'dog', 'dinner' -> 'dig donner'
# Assume a and b are length 2 or more.
def mix_up(a, b):
first_a = a[:2]
rest_a = a[2:]
first_b = b[:2]
rest_b = b[2:]
return first_b + rest_a + ' ' + first_a + rest_b
# Provided simple test() function used in main() to print
# what each function returns vs. what it's supposed to return.
def test(got, expected):
if got == expected:
prefix = ' OK '
else:
prefix = ' X '
print '%s got: %s expected: %s' % (prefix, repr(got), repr(expected))
# Provided main() calls the above functions with interesting inputs,
# using test() to check if each result is correct or not.
def main():
print 'donuts'
# Each line calls donuts, compares its result to the expected f
|
or that call.
test(donuts(4), 'Number of donuts: 4')
test(donuts(9), 'Number of donuts: 9')
test(donuts(10), 'Number of donuts: many')
test(donuts(99), 'Number of donuts: many')
print
print 'both_ends'
test(both_ends('spring'), 'spng')
test(both_ends('Hello'), 'Helo')
test(both_ends('a'), '')
test(both_ends('xyz'), 'xyyz')
print
print 'fix_start'
test(fix_start('babble'), 'ba**le')
test(fix_start('aardvark'), 'a*rdv*rk')
test(fix_start('goo
|
gle'), 'goo*le')
test(fix_start('donut'), 'donut')
print
print 'mix_up'
test(mix_up('mix', 'pod'), 'pox mid')
test(mix_up('dog', 'dinner'), 'dig donner')
test(mix_up('gnash', 'sport'), 'spash gnort')
test(mix_up('pezzy', 'firm'), 'fizzy perm')
# Standard boilerplate to call the main() function.
if __name__ == '__main__':
main()
|
jeremykid/FunAlgorithm
|
python_practice/data_structure/array/array.py
|
Python
|
mit
| 869 | 0.073648 |
def main():
#init an array named a
a = list()
a = []
b = [1,'1',[1,2]]
#Get the size of a list
a_size = len(a)
#how to check if a list is empty
if (a):
print ("not empty")
else:
print ("empty")
index = 0
a = ['a','b','c']
print (a[index])
a.append('d')
a.extend(['e'])
print ('After append a, extend [e]')
print (a)
a.insert(2,'bb')
print ('After insert bb at 2')
print (a)
a.insert(0, 'a0')
print ('After insert a0 at 0')
print (a)
|
#Find the index of a item in an array
answer_1 = a.index('a')
answer_0 = a.index('a0')
print ('use a.index(item) to find the index only for the first item')
#list.pop() r
eturn last item in the list and remove the last item
print 'Before a.pop(), a = ', a
print 'a.pop() = ', a.pop()
print 'After a.pop(), a = ', a
#Rem
|
ove an item
a.remove('a0')
print 'After remove(a0), a = ' a
main()
|
rackerlabs/django-DefectDojo
|
dojo/tool_type/views.py
|
Python
|
bsd-3-clause
| 2,344 | 0.002133 |
# # product
import logging
from django.contrib import messages
from django.contrib.auth.decorators import user_passes_test
from django.urls import reverse
from django.http import HttpResponseRedirect
from django.shortcuts import render
from dojo.utils import add_breadcrumb
from dojo.forms import ToolTypeForm
from dojo.models import Tool_Type
logger = logging.getLogger(__name__)
@user_passes_test(lambda u: u.is_staff)
def new_tool_type(request):
if request.method == 'POST':
tform = ToolTypeForm(request.POST, instance=Tool_Type())
if tform.is
|
_valid():
tform.save()
messages.add_message(request,
messages.SUCCESS,
'Tool Type Configuration Successfully Created.',
extra_tags='alert-success')
return HttpResponseRedirec
|
t(reverse('tool_type', ))
else:
tform = ToolTypeForm()
add_breadcrumb(title="New Tool Type Configuration", top_level=False, request=request)
return render(request, 'dojo/new_tool_type.html',
{'tform': tform})
@user_passes_test(lambda u: u.is_staff)
def edit_tool_type(request, ttid):
tool_type = Tool_Type.objects.get(pk=ttid)
if request.method == 'POST':
tform = ToolTypeForm(request.POST, instance=tool_type)
if tform.is_valid():
tform.save()
messages.add_message(request,
messages.SUCCESS,
'Tool Type Configuration Successfully Updated.',
extra_tags='alert-success')
return HttpResponseRedirect(reverse('tool_type', ))
else:
tform = ToolTypeForm(instance=tool_type)
add_breadcrumb(title="Edit Tool Type Configuration", top_level=False, request=request)
return render(request,
'dojo/edit_tool_type.html',
{
'tform': tform,
})
@user_passes_test(lambda u: u.is_staff)
def tool_type(request):
confs = Tool_Type.objects.all().order_by('name')
add_breadcrumb(title="Tool Type List", top_level=not len(request.GET), request=request)
return render(request,
'dojo/tool_type.html',
{'confs': confs,
})
|
R-daneel-olivaw/mutation-tolerance-voting
|
pyvotecore/schulze_helper.py
|
Python
|
lgpl-3.0
| 8,939 | 0.002685 |
# Copyright (C) 2009, Brad Beattie
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from pygraph.algorithms.accessibility import accessibility, mutual_accessibility
from pygraph.classes.digraph import digraph
from pygraph.algorithms.minmax import maximum_flow
from pyvotecore.condorcet import CondorcetHelper
from pyvotecore.common_functions import matching_keys, unique_permutations
PREFERRED_LESS = 1
PREFERRED_SAME = 2
PREFERRED_MORE = 3
STRENGTH_TOLERANCE = 0.0000000001
STRENGTH_THRESHOLD = 0.1
NODE_SINK = -1
NODE_SOURCE = -2
# This class implements the Schulze Method (aka the beatpath method)
class SchulzeHelper(CondorcetHelper):
def condorcet_completion_method(self):
self.schwartz_set_heuristic()
def schwartz_set_heuristic(self):
# Iterate through using the Schwartz set heuristic
self.actions = []
while len(self.graph.edges()) > 0:
access = accessibility(self.graph)
mutual_access = mutual_accessibility(self.graph)
candidates_to_remove = set()
for candidate in self.graph.nodes():
candidates_to_remove |= (set(access[candidate]) - set(mutual_access[candidate]))
# Remove nodes at the end of non-cycle paths
if len(candidates_to_remove) > 0:
self.actions.append({'nodes': candidates_to_remove})
for candidate in candidates_to_remove:
self.graph.del_node(candidate)
# If none exist, remove the weakest edges
else:
edge_weights = self.edge_weights(self.graph)
self.actions.append({'edges': matching_keys(edge_weights, min(edge_weights.values()))})
for edge in self.actions[-1]["edges"]:
self.graph.del_edge(edge)
self.graph_winner()
def generate_vote_management_graph(self):
self.vote_management_graph = digraph()
self.vote_management_graph.add_nodes(self.completed_patterns)
self.vote_management_graph.del_node(tuple([PREFERRED_MORE] * self.required_winners))
self.pattern_nodes = self.vote_management_graph.nodes()
self.vote_management_graph.add_nodes([NODE_SOURCE, NODE_SINK])
for pattern_node in self.pattern_nodes:
self.vote_management_graph.add_edge((NODE_SOURCE, pattern_node))
for i in range(self.required_winners):
self.vote_management_graph.add_node(i)
for pattern_node in self.pattern_nodes:
for i in range(self.required_winners):
if pattern_node[i] == 1:
self.vote_management_graph.add_edge((pattern_node, i))
for i in range(self.required_winners):
self.vote_management_graph.add_edge((i, NODE_SINK))
# Generates a list of all patterns that do not contain indifference
def generate_completed_patterns(self):
self.completed_patterns = []
for i in rang
|
e(0, self.required_winners + 1):
for pattern in unique_permutations(
[PREFERRED_LESS] * (self.required_winners - i)
+ [PREFERRED_MORE] * (i)
):
self.completed_patterns.append(tuple(pattern))
def proportional_completion(self, candidate, other_candidates):
profile = dict(zip(self.completed_patterns, [0]
|
* len(self.completed_patterns)))
# Obtain an initial tally from the ballots
for ballot in self.ballots:
pattern = []
for other_candidate in other_candidates:
if ballot["ballot"][candidate] < ballot["ballot"][other_candidate]:
pattern.append(PREFERRED_LESS)
elif ballot["ballot"][candidate] == ballot["ballot"][other_candidate]:
pattern.append(PREFERRED_SAME)
else:
pattern.append(PREFERRED_MORE)
pattern = tuple(pattern)
if pattern not in profile:
profile[pattern] = 0.0
profile[pattern] += ballot["count"]
weight_sum = sum(profile.values())
# Peel off patterns with indifference (from the most to the least) and apply proportional completion to them
for pattern in sorted(profile.keys(), key=lambda pattern: pattern.count(PREFERRED_SAME), reverse=True):
if pattern.count(PREFERRED_SAME) == 0:
break
self.proportional_completion_round(pattern, profile)
try:
assert round(weight_sum, 5) == round(sum(profile.values()), 5)
except:
print ("Proportional completion broke (went from %s to %s)" % (weight_sum, sum(profile.values())))
return profile
def proportional_completion_round(self, completion_pattern, profile):
# Remove pattern that contains indifference
weight_sum = sum(profile.values())
completion_pattern_weight = profile[completion_pattern]
del profile[completion_pattern]
patterns_to_consider = {}
for pattern in profile.keys():
append = False
append_target = []
for i in range(len(completion_pattern)):
if completion_pattern[i] == PREFERRED_SAME:
append_target.append(pattern[i])
if pattern[i] != PREFERRED_SAME:
append = True
else:
append_target.append(completion_pattern[i])
append_target = tuple(append_target)
if append is True and append_target in profile:
append_target = tuple(append_target)
if append_target not in patterns_to_consider:
patterns_to_consider[append_target] = set()
patterns_to_consider[append_target].add(pattern)
denominator = 0
for (append_target, patterns) in patterns_to_consider.items():
for pattern in patterns:
denominator += profile[pattern]
# Reweight the remaining items
for pattern in patterns_to_consider.keys():
if denominator == 0:
profile[pattern] += completion_pattern_weight / len(patterns_to_consider)
else:
if pattern not in profile:
profile[pattern] = 0
profile[pattern] += sum(profile[considered_pattern] for considered_pattern in patterns_to_consider[pattern]) * completion_pattern_weight / denominator
try:
assert round(weight_sum, 5) == round(sum(profile.values()), 5)
except:
print ("Proportional completion round broke (went from %s to %s)" % (weight_sum, sum(profile.values())))
return profile
# This method converts the voter profile into a capacity graph and iterates
# on the maximum flow using the Edmonds Karp algorithm. The end result is
# the limit of the strength of the voter management as per Markus Schulze's
# Calcul02.pdf (draft, 28 March 2008, abstract: "In this paper we illustrate
# the calculation of the strengths of the vote managements.").
def strength_of_vote_management(self, voter_profile):
# Initialize the graph weights
for pattern in self.pattern_nodes:
self.vote_management_graph.set_edge_weight((NODE_SOURCE, pattern), voter_profile[pattern])
for i in range(self.required_winners):
if pattern[i] == 1:
self.vote_management_graph.set_edge_weight((pattern, i), voter_profile[pattern])
# Iterate towards the limit
r = [(fl
|
UManPychron/pychron
|
pychron/experiment/conflict_resolver.py
|
Python
|
apache-2.0
| 4,802 | 0.001874 |
# ===============================================================================
# Copyright 2015 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# ============= enthought library imports =======================
from traits.api import HasTraits, Str, List, Instance
from traitsui.api import View, UItem, Item, TableEditor
from traitsui.table_column import ObjectColumn
from pychron.core.helpers.traitsui_shortcuts import okcancel_view
from pychron.core.ui.enum_editor import myEnumEditor
class Conflict(HasTraits):
queue_name = Str
runspec = Instance('pychron.experiment.automated_run.spec.AutomatedRunSpec')
identifier = Str
position = Str
repository_identifier = Str
repository_ids = Str
available_ids = List
class ConflictResolver(HasTraits):
conflicts = List
available_ids = List
def apply(self):
for c in self.conflicts:
c.runspec.repository_identifier = c.repository_identifier
def add_conflicts(self, qname, cs):
for ai, exps in cs:
self.conflicts.append(Conflict(queue_name=qname,
runspec=ai,
position=ai.position,
repository_identifier=ai.repository_identifier,
identifier=ai.identifier,
repository_ids=','.join(exps),
available_ids=self.available_ids))
def traits_view(self):
cols = [ObjectColumn(name='queue_name', editable=False),
ObjectColumn(name='identifier', editable=False),
ObjectColumn(name='position', editable=False),
ObjectColumn(name='repository_identifier',
label='Assigned Repository',
tooltip='Repository assigned to this analysis in the Experiment Queue',
editor=myEnumEditor(name='available_ids')),
ObjectColumn(name='repository_ids',
label='Existing Repositories',
tooltip='Set of repositories that already contain
|
this L#',
editable=False)]
v = okcancel_view(UItem('conflicts', editor=TableEditor(columns=cols)),
title='Resolve Repository Conflicts')
return v
if __name__ == '__main__':
def main():
from pychron.paths import paths
paths.build('_dev')
from pychron.c
|
ore.helpers.logger_setup import logging_setup
from pychron.experiment.automated_run.spec import AutomatedRunSpec
logging_setup('dvcdb')
from pychron.dvc.dvc_database import DVCDatabase
from itertools import groupby
db = DVCDatabase(kind='mysql', host='localhost', username='root', name='pychronmeta', password='Argon')
db.connect()
identifiers = ['63290', '63291']
runs = [AutomatedRunSpec(identifier='63290', repository_identifier='Cather_McIntoshd')]
cr = ConflictResolver()
experiments = {}
cr.available_ids = db.get_repository_identifiers()
eas = db.get_associated_repositories(identifiers)
for idn, exps in groupby(eas, key=lambda x: x[1]):
experiments[idn] = [e[0] for e in exps]
conflicts = []
for ai in runs:
identifier = ai.identifier
es = experiments[identifier]
if ai.repository_identifier not in es:
conflicts.append((ai, es))
if conflicts:
cr.add_conflicts('Foo', conflicts)
if cr.conflicts:
info = cr.edit_traits(kind='livemodal')
if info.result:
cr.apply()
# for ci in runs:
# print ci.identifier, ci.experiment_identifier
from traits.api import Button
class Demo(HasTraits):
test = Button
def traits_view(self):
return View(Item('test'))
def _test_fired(self):
main()
d = Demo()
d.configure_traits()
# ============= EOF =============================================
|
bitmazk/django-review
|
review/south_migrations/0001_initial.py
|
Python
|
mit
| 10,388 | 0.007605 |
# flake8: noqa
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
from ..compat import USER_MODEL
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Review'
db.create_table(u'review_review', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('content_type', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['contenttypes.ContentType'])),
('object_id', self.gf('django.db.models.fields.PositiveIntegerField')()),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm[USER_MODEL['orm_label']], null=True, blank=True)),
('content', self.gf('django.db.models.fields.TextField')(max_length=1024, blank=True)),
('language', self.gf('django.db.models.fields.CharField')(max_length=5, blank=True)),
('creation_date', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
))
db.send_create_signal(u'review', ['Review'])
# Adding model 'ReviewExtraInfo'
db.create_table(u'review_reviewextrainfo', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('type', self.gf('django.db.models.fields.CharField')(max_length=256)),
('review', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['review.Review'])),
('content_type', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['contenttypes.ContentType'])),
('object_id', self.gf('django.db.models.fields.PositiveIntegerField')()),
))
db.send_create_signal(u'review', ['ReviewExtraInfo'])
# Adding model 'RatingCategory'
db.create_table(u'review_ratingcategory', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
))
db.send_create_signal(u'review', ['RatingCategory'])
# Adding model 'RatingCategoryTranslation'
db.create_table(u'review_ratingcategorytranslation', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=256)),
('category', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['review.RatingCategory'])),
('language', self.gf
|
('django.db.models.fields.CharField')(max_length=2)),
))
db.send_create_signal(u'review', ['RatingCategoryTranslation'])
# Adding model 'Rating'
db.create_table(u'review_rating', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('value', self.gf('django.db.models.fields.CharField')(max_length=20)),
('review', self.gf('django.db.models.fields.related.ForeignKey')(related_name='ratings',
|
to=orm['review.Review'])),
('category', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['review.RatingCategory'])),
))
db.send_create_signal(u'review', ['Rating'])
def backwards(self, orm):
# Deleting model 'Review'
db.delete_table(u'review_review')
# Deleting model 'ReviewExtraInfo'
db.delete_table(u'review_reviewextrainfo')
# Deleting model 'RatingCategory'
db.delete_table(u'review_ratingcategory')
# Deleting model 'RatingCategoryTranslation'
db.delete_table(u'review_ratingcategorytranslation')
# Deleting model 'Rating'
db.delete_table(u'review_rating')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
USER_MODEL['model_label']: {
'Meta': {'object_name': USER_MODEL['object_name']},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'review.rating': {
'Meta': {'ordering': "['category', 'review']", 'object_name': 'Rating'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['review.RatingCategory']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'review': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ratings'", 'to': u"orm['review.Review']"}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '20'})
},
u'review.ratingcategory': {
'Meta': {'object_name': 'RatingCategory'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
u'review.ratingcategorytranslation': {
'Meta': {'object_name': 'RatingCategoryTranslation'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['review.RatingCategory']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '256'})
},
u'review.review': {
'Meta': {'ordering': "['-creation_date']", 'object_name': 'Review'},
'content': ('django.db.models.fields.Text
|
joakim-hove/ert
|
ert_gui/tools/plot/__init__.py
|
Python
|
gpl-3.0
| 555 | 0 |
from .plot_widget import PlotWidget
from .filter_popup import FilterPopup
from .filterable
|
_kw_list_model import FilterableKwListModel
from .data_type_keys_list_model import DataTypeKeysListModel
from .data_type_proxy_model import DataTypeProxyModel
from .data_type_keys_widget import DataTypeKeysWidget
from .plot_case_model import PlotCaseModel
from .plot_case_selection_widget import CaseSelectionWidget
from .color_chooser import ColorBox
from .style_chooser import StyleChooser
from .plot_window import PlotWindow
from .plot_t
|
ool import PlotTool
|
munk/play2048
|
tfe.py
|
Python
|
mit
| 1,792 | 0.004464 |
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from random import randint
from time import sleep
import brain
import game
drv = webdriver.Firefox()
drv.get('http://gabrielecirulli.github.io/2048/')
container = drv.find_element_by_class_name('tile-container')
retry = drv.find_element_by_class_name('retry-button')
board = [[None, None, None, None],
[None, None, None, None],
[None, None, None, None],
[None, None, None, None]]
def move_up():
container.send_keys(Keys.UP)
def move_down():
container.send_keys(Keys.DOWN)
def move_left():
container.send_keys(Keys.LEFT)
def move_right():
container.send_keys(Keys.RIGHT)
def zero_board()
|
:
global board
board = [[None, None, None, No
|
ne],
[None, None, None, None],
[None, None, None, None],
[None, None, None, None]]
def update_board():
global board
sleep(0.1)
tiles = container.find_elements_by_class_name('tile')
tiledata = list(map(lambda x: x.get_attribute('class').split(), tiles))
zero_board()
for tile in tiledata:
value = tile[1].split('-')[1]
pos = tile[2].split('-')[-2:]
board[int(pos[1]) - 1][int(pos[0]) - 1] = int(value)
def pick_move():
global board
g = game.Game(board)
predictions = brain.predict_next_board(g)
scores = []
for p in predictions[1:]:
print(p, len(p))
score = brain.weight_boards(predictions[0], p)
scores.append(score)
return brain.choose(scores)
while not retry.is_displayed():
update_board()
pick_move()()
sleep(2)
update_board()
for b in board:
print(b)
sleep(2)
print("Score: ", drv.find_element_by_class_name('score-container').text.splitlines()[0])
print("Game Over")
|
diagramsoftware/l10n-spain
|
l10n_es_igic/data/__init__.py
|
Python
|
agpl-3.0
| 985 | 0 |
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2004-2011 Pexego Sistemas Informáticos. All Rights Reserved
# $Omar Castiñeira Saavedra$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FO
|
R A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##########################################
|
####################################
|
sean797/tracer
|
tracer/resources/collections.py
|
Python
|
gpl-2.0
| 3,632 | 0.022577 |
#-*- coding: utf-8 -*-
# collections.py
# Define various kind of collections
#
# Copyright (C) 2016 Jakub Kadlcik
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
#
from __future__ import absolute_import
from operator import attrgetter, methodcaller
from psutil import NoSuchProcess
class Collection(list):
def replace_values(self, attribute, source_value, required_value):
for app in self:
if getattr(app, attribute) == source_value:
app.update(attribute, required_value)
def sorted(self, attribute):
self.replace_values(attribute, None, "")
try:
return sorted(self, key=methodcaller(attribute))
except TypeError:
return sorted(self, key=attrgetter(attribute))
class ApplicationsCollection(Collection):
def with_helpers(self):
applications = filter(lambda app: app.helper, self)
return ApplicationsCollection(applications)
def without_helpers(self):
applications = filter(lambda app: not app.helper, self)
return ApplicationsCollection(applications)
def exclude_
|
types(self, app_types):
|
"""app_types -- see Applications.TYPES"""
applications = filter(lambda app: app.type not in app_types, self)
return ApplicationsCollection(applications)
def filter_types(self, app_types):
"""app_types -- see Applications.TYPES"""
applications = filter(lambda app: app.type in app_types, self)
return ApplicationsCollection(applications)
def count_type(self, app_type):
count = 0
for application in self:
if application.type == app_type:
count += 1
return count
class ProcessesCollection(Collection):
def owned_by(self, user):
if not user:
return self
return self.filtered(lambda process: process.username() == user)
def newer_than(self, timestamp):
return self.filtered(lambda process: process.create_time() >= timestamp)
def unique(self):
unique = set()
for process in self:
try: unique.add(process)
except NoSuchProcess: pass
return ProcessesCollection(unique)
def filtered(self, function):
processes = ProcessesCollection()
for process in self:
try:
if function(process):
processes.append(process)
except NoSuchProcess: pass
return processes
class AffectedProcessesCollection(ProcessesCollection):
def update(self, iterable):
for x in iterable:
if x in self:
self[self.index(x)].update(x)
else:
self.append(x)
class PackagesCollection(Collection):
_package_manager = None
def __init__(self, *args):
list.__init__(self, *args)
def intersection(self, packages):
if packages is not None:
return PackagesCollection(set(packages).intersection(self))
return self
@property
def files(self):
files = []
for package in self:
files.extend(self._package_manager.package_files(package.name))
return set(files)
def unique_newest(self):
packages = {}
for p in self:
if p.name in packages:
if packages[p.name].modified > p.modified:
continue
packages[p.name] = p
return PackagesCollection(packages.values())
|
linkedin/indextank-service
|
storefront/templatetags/analytical.py
|
Python
|
apache-2.0
| 2,352 | 0.002976 |
"""
Analytical template tags and filters.
"""
from __future__ import absolute_import
import logging
from django import template
from django.template import Node, TemplateSyntaxError
from django.utils.importlib import import_module
from templatetags.utils import AnalyticalException
TAG_LOCATIONS = ['head_top', 'head_bottom', 'body_top', 'body_bottom']
TAG_POSITIONS = ['first', None, 'last']
TAG_MODULES = [
'storefront.clicky',
'storefront.mixpanel',
'storefront.google_analytics',
]
'''
'storefront.olark',
'analytical.chartbeat',
'analytical.crazy_egg',
'analytical.gosquared',
'analytical.hubspot',
'analytical.kiss_insights',
'analytical.kiss_metrics',
'analytical.optimizely',
'analytical.performable',
'analytical.reinvigorate',
'
|
analytical.woopra',
'''
logger = logging.getLogger(__name__)
register = template.Library()
def _location_tag(location):
def analytical_tag(parser, token):
bits = token.split_contents()
if len(bits) > 1:
|
raise TemplateSyntaxError("'%s' tag takes no arguments" % bits[0])
return AnalyticalNode(location)
return analytical_tag
for loc in TAG_LOCATIONS:
register.tag('analytical_%s' % loc, _location_tag(loc))
class AnalyticalNode(Node):
def __init__(self, location):
self.nodes = [node_cls() for node_cls in template_nodes[location]]
def render(self, context):
return "".join([node.render(context) for node in self.nodes])
def _load_template_nodes():
template_nodes = dict((l, dict((p, []) for p in TAG_POSITIONS))
for l in TAG_LOCATIONS)
def add_node_cls(location, node, position=None):
template_nodes[location][position].append(node)
for path in TAG_MODULES:
module = _import_tag_module(path)
try:
module.contribute_to_analytical(add_node_cls)
except AnalyticalException, e:
logger.debug("not loading tags from '%s': %s", path, e)
for location in TAG_LOCATIONS:
template_nodes[location] = sum((template_nodes[location][p]
for p in TAG_POSITIONS), [])
return template_nodes
def _import_tag_module(path):
app_name, lib_name = path.rsplit('.', 1)
return import_module("%s.templatetags.%s" % (app_name, lib_name))
template_nodes = _load_template_nodes()
|
alex/warehouse
|
warehouse/migrations/versions/a65114e48d6f_set_user_last_login_automatically_in_.py
|
Python
|
apache-2.0
| 1,008 | 0 |
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# ht
|
tp://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing p
|
ermissions and
# limitations under the License.
"""
Set User.last_login automatically in the DB
Revision ID: a65114e48d6f
Revises: 104b4c56862b
Create Date: 2016-06-11 00:28:39.176496
"""
from alembic import op
import sqlalchemy as sa
revision = 'a65114e48d6f'
down_revision = '104b4c56862b'
def upgrade():
op.alter_column(
"accounts_user",
"last_login",
server_default=sa.func.now(),
)
def downgrade():
op.alter_column("accounts_user", "last_login", server_default=None)
|
northern-bites/nao-man
|
noggin/GameController.py
|
Python
|
gpl-3.0
| 4,831 | 0.002484 |
from man import comm
from . import NogginConstants as Constants
from . import GameStates
from .util import FSA
from . import Leds
TEAM_BLUE = 0
TEAM_RED = 1
class GameController(FSA.FSA):
def __init__(self, brain):
FSA.FSA.__init__(self,brain)
self.brain = brain
self.gc = brain.comm.gc
#jf- self.setTimeFunction(self.brain.nao.getSimulatedTime)
self.addStates(GameStates)
self.currentState = 'gameInitial'
self.setName('GameController')
self.setPrintStateChanges(True)
self.stateChangeColor = 'cyan'
self.setPrintFunction(self.brain.out.printf)
self.timeRemaining = self.gc.timeRemaining()
self.kickOff = self.gc.kickOff
self.penaltyShots = False
self.ownKickOff = False
def run(self):
self.setGCLEDS()
self.ownKickOff = (self.gc.kickOff == self.brain.my.teamColor)
if self.gc.secondaryState == comm.STATE2_PENALTYSHOOT:
if self.gc.state == comm.STATE_INITIAL:
self.switchTo('penaltyShotsGameInitial')
elif self.gc.state == comm.STATE_SET:
self.switchTo('penaltyShotsGameSet')
elif self.gc.state == comm.STATE_READY:
self.switchTo('penaltyShotsGameReady')
elif self.gc.state == comm.STATE_PLAYING:
if self.gc.penalty != comm.PENALTY_NONE:
self.switchTo('penaltyShotsGamePenalized')
else:
self.switchTo("penaltyShotsGamePlaying")
elif self.gc.state == comm.STATE_FINISHED:
self.switchTo('penaltyShotsGameFinished')
elif self.gc.secondaryState == comm.STATE2_NORMAL:
if self.gc.state ==
|
comm.STATE_INITIAL:
self.switchTo('gameInitial')
|
elif self.gc.state == comm.STATE_SET:
self.switchTo('gameSet')
elif self.gc.state == comm.STATE_READY:
self.switchTo('gameReady')
elif self.gc.state == comm.STATE_PLAYING:
if self.gc.penalty != comm.PENALTY_NONE:
self.switchTo("gamePenalized")
else:
self.switchTo("gamePlaying")
elif self.gc.state == comm.STATE_FINISHED:
self.switchTo('gameFinished')
self.timeRemaining = self.gc.timeRemaining()
#Set team color
if self.gc.color != self.brain.my.teamColor:
self.brain.my.teamColor = self.gc.color
self.brain.makeFieldObjectsRelative()
self.printf("Switching team color to " +
Constants.teamColorDict[self.brain.my.teamColor])
if self.gc.kickOff != self.kickOff:
self.printf("Switching kickoff to team #%g"%self.gc.kickOff +
" from team #%g"% self.kickOff)
self.kickOff = self.gc.kickOff
FSA.FSA.run(self)
def timeRemaining(self):
return self.timeRemaining()
def timeSincePlay(self):
return Constants.LENGTH_OF_HALF - self.timeRemaining
def getScoreDifferential(self):
'''
negative when we're losing
'''
return self.brain.gameController.gc.teams(self.brain.my.teamColor)[1] -\
self.brain.gameController.gc.teams((self.brain.my.teamColor+1)%2)[1]
def setGCLEDS(self):
'''
Method to set the chest and feet according to the current
GC states and infos
'''
####### KICKOFF ######
if (self.gc.kickOff == self.gc.team and
(self.gc.state == comm.STATE_INITIAL or
self.gc.state == comm.STATE_READY or
self.gc.state == comm.STATE_PLAYING)):
self.brain.leds.executeLeds(Leds.HAVE_KICKOFF_LEDS)
else:
self.brain.leds.executeLeds(Leds.NO_KICKOFF_LEDS)
###### TEAM COLOR ######
if self.gc.color == TEAM_BLUE:
self.brain.leds.executeLeds(Leds.TEAM_BLUE_LEDS)
else:
self.brain.leds.executeLeds(Leds.TEAM_RED_LEDS)
###### GAME STATE ######
if self.gc.state == comm.STATE_INITIAL:
self.brain.leds.executeLeds(Leds.STATE_INITIAL_LEDS)
elif self.gc.state == comm.STATE_SET:
self.brain.leds.executeLeds(Leds.STATE_SET_LEDS)
elif self.gc.state == comm.STATE_READY:
self.brain.leds.executeLeds(Leds.STATE_READY_LEDS)
elif self.gc.state == comm.STATE_PLAYING:
if self.gc.penalty != comm.PENALTY_NONE:
self.brain.leds.executeLeds(Leds.STATE_PENALIZED_LEDS)
else:
self.brain.leds.executeLeds(Leds.STATE_PLAYING_LEDS)
elif self.gc.state == comm.STATE_FINISHED:
self.brain.leds.executeLeds(Leds.STATE_FINISHED_LEDS)
|
ahmedbodi/AutobahnPython
|
autobahn/autobahn/wamp/message.py
|
Python
|
apache-2.0
| 84,035 | 0.020694 |
###############################################################################
##
## Copyright (C) 2013-2014 Tavendo GmbH
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
###############################################################################
from __future__ import absolute_import
__all__ = ['Hello',
'Welcome',
'Abort',
'Challenge',
'Authenticate',
'Goodbye',
'Heartbeat'
'Error',
'Publish',
'Published',
'Subscribe',
'Subscribed',
'Unsubscribe',
'Unsubscribed',
'Event',
'Call',
'Cancel',
'Result',
'Register',
'Registered',
'Unregister',
'Unregistered',
'Invocation',
'Interrupt',
'Yield']
import re
import six
import autobahn
from autobahn import util
from autobahn.wamp.exce
|
ption import ProtocolError
from autobahn.wamp.interfaces import IMessage
from autobahn.wamp.role import ROLE_NAME_TO_CLASS
## strict URI check allowing empty URI components
_URI_PAT_STRICT = re.compile(r"^(([0-9a-z_]{2,}\.)|\.)*([0-9a-z_]{2,})?$")
## loose URI check allowing empty URI components
_URI_PAT_LOOSE = re.com
|
pile(r"^(([^\s\.#]+\.)|\.)*([^\s\.#]+)?$")
## strict URI check disallowing empty URI components
_URI_PAT_STRICT_NON_EMPTY = re.compile(r"^([0-9a-z_]{2,}\.)*([0-9a-z_]{2,})?$")
## loose URI check disallowing empty URI components
_URI_PAT_LOOSE_NON_EMPTY = re.compile(r"^([^\s\.#]+\.)*([^\s\.#]+)?$")
def check_or_raise_uri(value, message):
if type(value) != six.text_type:
raise ProtocolError("{}: invalid type {} for URI".format(message, type(value)))
if not _URI_PAT_LOOSE.match(value):
raise ProtocolError("{}: invalid value '{}' for URI".format(message, value))
return value
def check_or_raise_id(value, message):
if type(value) not in six.integer_types:
raise ProtocolError("{}: invalid type {} for ID".format(message, type(value)))
if value < 0 or value > 9007199254740992: # 2**53
raise ProtocolError("{}: invalid value {} for ID".format(message, value))
return value
def check_or_raise_extra(value, message):
if type(value) != dict:
raise ProtocolError("{}: invalid type {}".format(message, type(value)))
for k in value.keys():
if type(k) != six.text_type:
raise ProtocolError("{}: invalid type {} for key '{}'".format(message, type(k), k))
return value
class Message(util.EqualityMixin):
"""
WAMP message base class. This is not supposed to be instantiated.
"""
def __init__(self):
"""
Base constructor.
"""
## serialization cache: mapping from ISerializer instances
## to serialized bytes
##
self._serialized = {}
def uncache(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.uncache`
"""
self._serialized = {}
def serialize(self, serializer):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.serialize`
"""
## only serialize if not cached ..
if not serializer in self._serialized:
self._serialized[serializer] = serializer.serialize(self.marshal())
return self._serialized[serializer]
IMessage.register(Message)
class Hello(Message):
"""
A WAMP `HELLO` message.
Format: `[HELLO, Realm|uri, Details|dict]`
"""
MESSAGE_TYPE = 1
"""
The WAMP message code for this type of message.
"""
def __init__(self, realm, roles, authmethods = None):
"""
Message constructor.
:param realm: The URI of the WAMP realm to join.
:type realm: str
:param roles: The WAMP roles to announce.
:type roles: list of :class:`autobahn.wamp.role.RoleFeatures`
"""
assert(type(realm) == six.text_type)
assert(type(roles) == list)
for role in roles:
assert(isinstance(role, autobahn.wamp.role.RoleFeatures))
if authmethods:
assert(type(authmethods) == list)
for authmethod in authmethods:
assert(type(authmethod) == six.text_type)
Message.__init__(self)
self.realm = realm
self.roles = roles
self.authmethods = authmethods
@staticmethod
def parse(wmsg):
"""
Verifies and parses an unserialized raw message into an actual WAMP message instance.
:param wmsg: The unserialized raw message.
:type wmsg: list
:returns obj -- An instance of this class.
"""
## this should already be verified by WampSerializer.unserialize
##
assert(len(wmsg) > 0 and wmsg[0] == Hello.MESSAGE_TYPE)
if len(wmsg) != 3:
raise ProtocolError("invalid message length {} for HELLO".format(len(wmsg)))
realm = check_or_raise_uri(wmsg[1], "'realm' in HELLO")
details = check_or_raise_extra(wmsg[2], "'details' in HELLO")
roles = []
if not u'roles' in details:
raise ProtocolError("missing mandatory roles attribute in options in HELLO")
details_roles = check_or_raise_extra(details[u'roles'], "'roles' in 'details' in HELLO")
if len(details_roles) == 0:
raise ProtocolError("empty 'roles' in 'details' in HELLO")
for role in details_roles:
if role not in ROLE_NAME_TO_CLASS:
raise ProtocolError("invalid role '{}' in 'roles' in 'details' in HELLO".format(role))
details_role = check_or_raise_extra(details_roles[role], "role '{}' in 'roles' in 'details' in HELLO".format(role))
if u'features' in details_role:
details_role_features = check_or_raise_extra(details_role[u'features'], "'features' in role '{}' in 'roles' in 'details' in HELLO".format(role))
## FIXME: skip unknown attributes
role_features = ROLE_NAME_TO_CLASS[role](**details_role[u'features'])
else:
role_features = ROLE_NAME_TO_CLASS[role]()
roles.append(role_features)
authmethods = None
if u'authmethods' in details:
details_authmethods = details[u'authmethods']
if type(details_authmethods) != list:
raise ProtocolError("invalid type {} for 'authmethods' detail in HELLO".format(type(details_authmethods)))
for auth_method in details_authmethods:
if type(auth_method) != six.text_type:
raise ProtocolError("invalid type {} for item in 'authmethods' detail in HELLO".format(type(auth_method)))
authmethods = details_authmethods
obj = Hello(realm, roles, authmethods)
return obj
def marshal(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.marshal`
"""
details = {u'roles': {}}
for role in self.roles:
details[u'roles'][role.ROLE] = {}
for feature in role.__dict__:
if not feature.startswith('_') and feature != 'ROLE' and getattr(role, feature) is not None:
if not u'features' in details[u'roles'][role.ROLE]:
details[u'roles'][role.ROLE] = {u'features': {}}
details[u'roles'][role.ROLE][u'features'][six.u(feature)] = getattr(role, feature)
if self.authmethods:
details[u'authmethods'] = self.authmethods
return [Hello.MESSAGE_TYPE, self.realm, details]
def __str__(self):
"""
Implements :func:`autobahn.wamp.interfaces.IMessage.__str__`
"""
return "WAMP HELLO Message (realm = {}, roles = {}, authmethods = {})".format(self.realm, self.roles, self.authmethods)
class Welcome(Message):
"""
A WAMP `WELCOM
|
mrcslws/nupic.research
|
projects/rsm/rsm_samplers.py
|
Python
|
agpl-3.0
| 9,884 | 0.001315 |
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2019, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
import numpy as np
import torch
from PIL import Image
from torch.nn.utils.rnn import pad_sequence
from torch.utils.data import Sampler
from torchvision import datasets
class MNISTBufferedDataset(datasets.MNIST):
def __init__(
self, root, train=True, transform=None, target_transform=None, download=False
):
super(MNISTBufferedDataset, self).__init__(
root,
train=train,
transform=transform,
target_transform=target_transform,
download=download,
)
def __getitem__(self, index):
"""
Override to allow generation of white noise for index -1
Args:
index (int): Index
Returns:
tuple: (image, target) where target is index of the target class.
"""
if index == -1:
# Noise
target = np.random.randint(10) # -1
img = np.random.rand(28, 28)
else:
img, target = self.data[index].numpy(), int(self.targets[index])
img = Image.fromarray(img, mode="L")
if self.transform is not None:
img = self.transform(img)
if self.target_transform is not None:
target = self.target_transform(target)
return img, target
class MNISTSequenceSampler(Sampler):
"""
Loop through one or more sequences of digits
Draw each digit image (based on label specified by sequence) randomly
TODO: Having this work with a custom DataSet that draws random
MNIST digits may be more appropriate
"""
def __init__(
self,
data_source,
sequences=None,
batch_size=64,
random_mnist_images=True,
randomize_sequence_cursors=True,
max_batches=100,
use_mnist_pct=1.0,
noise_buffer=False,
):
super(MNISTSequenceSampler, self).__init__(data_source)
self.data_source = data_source
self.random_mnist_images = random_mnist_images
self.randomize_sequence_cursors = randomize_sequence_cursors
self.use_mnist_pct = use_mnist_pct
self.noise_buffer = noise_buffer
self.max_batches = max_batches
self.bsz = batch_size
self.label_indices = {} # Digit -> Indices in dataset
self.label_cursors = {} # Digit -> Cursor across images for each digit
sequences = list(sequences) # Avoid changing underlying sequence list
if self.noise_buffer:
for seq in sequences:
if seq[-1] != -1:
seq.append(-1)
self.sequences = sequences
self.n_sequences = len(self.sequences)
self.seq_lengths = torch.tensor([len(subseq) for subseq in self.sequences])
# Each of these stores both current and next batch state (2 x batch_size)
self.sequence_id = torch.stack(
(self._init_sequence_ids(), self._init_sequence_ids())
) # Iterate over subsequences
first_batch_cursors = self._init_sequence_cursors()
self.sequence_cursor = torch.stack(
(first_batch_cursors, first_batch_cursors)
) # Iterates over sequence items
self._increment_next()
self.sequences_mat = pad_sequence(
torch.tensor(self.sequences), batch_first=True, padding_value=-99
)
# Get index for each digit (that appears in a passed sequence)
for seq in sequences:
for digit in seq:
if digit != -1 and digit not in self.label_indices:
mask = (data_source.targets == digit).nonzero().flatten()
idx = torch.randperm(mask.size(0))
if self.use_mnist_pct < 1.0:
idx = idx[: int(self.use_mnist_pct * len(idx))]
self.label_indices[digit] = mask[idx]
self.label_cursors[digit] = 0
def _init_sequence_ids(self):
return torch.LongTensor(self.bsz).random_(0, self.n_sequences)
def _init_sequence_cursors(self):
if self.randomize_sequence_cursors:
lengths = self.seq_lengths[self.sequence_id[0]]
cursors = (
torch.FloatTensor(self.bsz).uniform_(0, 1) * lengths.float()
).long()
else:
cursors = torch.zeros(self.bsz).long()
return cursors
def _increment_next(self):
# Increment cursors and select new random subsequences for those that
# have terminated
self.sequence_cursor[1] += 1
roll_mask = self.sequence_cursor[1] >= self.seq_lengths[self.sequence_id[1]]
if roll_mask.sum() > 0:
# Roll items to 0 of randomly chosen next subsequence
self.sequence_id[1, roll_mask] = torch.
|
LongTensor(
1, roll_mask.sum()
).random_(0, self.n_sequences)
self.sequence_cursor[1, roll_mask] = 0
def _get_next_batch(self):
"""
"""
# First row is current inputs
inp_labels_batch = self.sequences_mat[
self.sequence_id[0], self.sequence_cursor[0]
]
inp_idxs = [self._get_sample_image(digit.item()) for digit in inp_labels_batch]
# Second row is next (predicted)
|
inputs
tgt_labels_batch = self.sequences_mat[
self.sequence_id[1], self.sequence_cursor[1]
]
tgt_idxs = [self._get_sample_image(digit.item()) for digit in tgt_labels_batch]
# Roll next to current
self.sequence_id[0] = self.sequence_id[1]
self.sequence_cursor[0] = self.sequence_cursor[1]
self._increment_next()
return inp_idxs + tgt_idxs
def _get_sample_image(self, digit):
"""
Return a sample image id for digit from MNIST
"""
if digit == -1:
# Generate white noise
return -1
else:
cursor = self.label_cursors[digit]
if self.random_mnist_images:
# If not random, always take first digit
self.label_cursors[digit] += 1
indices = self.label_indices[digit]
if cursor >= len(indices) - 1:
# Begin sequence from beginning & shuffle
self.label_cursors[digit] = cursor = 0
idx = torch.randperm(len(self.label_indices[digit]))
self.label_indices[digit] = indices = self.label_indices[digit][idx]
return indices[cursor].item()
def __iter__(self):
for _i in range(len(self)):
yield self._get_next_batch()
return
def __len__(self):
return self.max_batches if self.max_batches else len(self.data_source)
def pred_sequence_collate(batch):
"""
"""
bsz = len(batch) // 2
inp_tuples = batch[:bsz]
tgt_tuples = batch[bsz:]
inp_images_batch = torch.stack([item[0] for item in inp_tuples]).view(bsz, -1)
tgt_images_batch = torch.stack([item[0] for item in tgt_tuples]).view(bsz, -1)
inp_labels_batch = torch.tensor([item[1] for item in inp_tuples])
tgt_labels_batch = torch.tensor([item[1] for item in tgt_tuples])
return (inp_images_batch, tgt_images_batch, tgt_labels_batch, inp_labels_batch)
class PTBSequenceSampler(Sampler):
"""
"""
def __init__(
self, data_source, batch_size=64, max_batches=10000
|
aplicatii-romanesti/allinclusive-kodi-pi
|
.kodi/addons/plugin.video.salts/service.py
|
Python
|
apache-2.0
| 5,535 | 0.00271 |
"""
SALTS XBMC Addon
Copyright (C) 2014 tknorris
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import xbmc
import xbmcaddon
import xbmcgui
from salts_lib import log_utils
from salts_lib import utils
from salts_lib.constants import MODES
from salts_lib.db_utils import DB_Connection
MAX_ERRORS = 10
kodi = xbmcaddon.Addon(id='plugin.video.salts')
log_utils.log('Service: Installed Version: %s' % (kodi.getAddonInfo('version')))
db_connection = DB_Connection()
if kodi.getSetting('use_remote_db') == 'false' or kodi.getSetting('enable_upgrade') == 'true':
db_connection.init_database()
class Service(xbmc.Player):
def __init__(self, *args, **kwargs):
log_utils.log('Service: starting...')
xbmc.Player.__init__(self, *args, **kwargs)
self.win = xbmcgui.Window(10000)
self.reset()
def reset(self):
log_utils.log('Service: Resetting...')
self.win.clearProperty('salts.playing')
self.win.clearProperty('salts.playing.trakt_id')
self.win.clearProperty('salts.playing.season')
self.win.cle
|
arProperty('salts.playing.episode')
|
self.win.clearProperty('salts.playing.srt')
self.win.clearProperty('salts.playing.resume')
self.tracked = False
self._totalTime = 999999
self.trakt_id = None
self.season = None
self.episode = None
self._lastPos = 0
def onPlayBackStarted(self):
log_utils.log('Service: Playback started')
playing = self.win.getProperty('salts.playing') == 'True'
self.trakt_id = self.win.getProperty('salts.playing.trakt_id')
self.season = self.win.getProperty('salts.playing.season')
self.episode = self.win.getProperty('salts.playing.episode')
srt_path = self.win.getProperty('salts.playing.srt')
resume_point = self.win.getProperty('salts.playing.trakt_resume')
if playing: # Playback is ours
log_utils.log('Service: tracking progress...')
self.tracked = True
if srt_path:
log_utils.log('Service: Enabling subtitles: %s' % (srt_path))
self.setSubtitles(srt_path)
else:
self.showSubtitles(False)
self._totalTime = 0
while self._totalTime == 0:
try:
self._totalTime = self.getTotalTime()
except RuntimeError:
self._totalTime = 0
break
xbmc.sleep(1000)
if resume_point:
resume_time = float(resume_point) * self._totalTime / 100
log_utils.log("Resume Percent: %s, Resume Time: %s Total Time: %s" % (resume_point, resume_time, self._totalTime), log_utils.LOGDEBUG)
self.seekTime(resume_time)
def onPlayBackStopped(self):
log_utils.log('Service: Playback Stopped')
if self.tracked:
playedTime = float(self._lastPos)
try: percent_played = int((playedTime / self._totalTime) * 100)
except: percent_played = 0 # guard div by zero
pTime = utils.format_time(playedTime)
tTime = utils.format_time(self._totalTime)
log_utils.log('Service: Played %s of %s total = %s%%' % (pTime, tTime, percent_played), log_utils.LOGDEBUG)
if playedTime == 0 and self._totalTime == 999999:
log_utils.log('XBMC silently failed to start playback', log_utils.LOGWARNING)
elif playedTime >= 5:
log_utils.log('Service: Setting bookmark on |%s|%s|%s| to %s seconds' % (self.trakt_id, self.season, self.episode, playedTime), log_utils.LOGDEBUG)
db_connection.set_bookmark(self.trakt_id, playedTime, self.season, self.episode)
if percent_played >= 75:
if xbmc.getCondVisibility('System.HasAddon(script.trakt)'):
run = 'RunScript(script.trakt, action=sync, silent=True)'
xbmc.executebuiltin(run)
self.reset()
def onPlayBackEnded(self):
log_utils.log('Service: Playback completed')
self.onPlayBackStopped()
monitor = Service()
utils.do_startup_task(MODES.UPDATE_SUBS)
errors = 0
while not xbmc.abortRequested:
try:
isPlaying = monitor.isPlaying()
utils.do_scheduled_task(MODES.UPDATE_SUBS, isPlaying)
if monitor.tracked and monitor.isPlayingVideo():
monitor._lastPos = monitor.getTime()
except Exception as e:
errors += 1
if errors >= MAX_ERRORS:
log_utils.log('Service: Error (%s) received..(%s/%s)...Ending Service...' % (e, errors, MAX_ERRORS), log_utils.LOGERROR)
break
else:
log_utils.log('Service: Error (%s) received..(%s/%s)...Continuing Service...' % (e, errors, MAX_ERRORS), log_utils.LOGERROR)
else:
errors = 0
xbmc.sleep(1000)
log_utils.log('Service: shutting down...')
|
zstackorg/zstack-woodpecker
|
integrationtest/vm/hybrid/test_attach_detach_oss_bucket.py
|
Python
|
apache-2.0
| 1,062 | 0.003766 |
'''
New Integration Test for hybrid.
@author: Quarkonics
'''
import zstackwoodpecker.test_util as test_util
import zstackwoodpecker.test_lib as test_lib
import zstackwoodpecker.test_state as test_state
import zstackwoodpecker.operations.hybrid_operations as hyb_ops
import zstackwoodpecker.operations.resource_operations as res_ops
import time
import os
postfix = time.strftime('%m%d-%H%M%S', time.localtime())
test_obj_dict = test_state.TestStateDict()
remote_bucket_name = 'test-bucket-%s' % postfix
test_stub = test_lib.lib_get_test_stub()
hybrid = test_stub.HybridObject()
def test():
hybrid.add_datacenter_iz(add_datacenter_only=True)
hybrid.add_bucket()
hybrid.detach_bucket()
hybrid.attach_bucket()
test_util.test_pass('Create Attach Detach OSS Bucket Test Success')
def env_recover():
if
|
hybrid.oss_bucket_create:
hybrid.del_bucket()
#Will be called only if exception happens in test().
def error_cleanup():
global test_obj_dict
|
test_lib.lib_error_cleanup(test_obj_dict)
|
ff0000/scarlet
|
scarlet/assets/models.py
|
Python
|
mit
| 7,186 | 0.000835 |
import os
import uuid
from django.db import models
from django.core.files.uploadedfile import UploadedFile
from django.forms.forms import pretty_name
from . import get_image_cropper
from . import tasks
from . import settings
from . import utils
from . import signals
from .managers import AssetManager
from .fields import AssetRealFileField
try:
from ..versioning import manager
except ValueError:
from versioning import manager
try:
from ..cms.internal_tags.models import AutoTagModel
except ValueError:
from cms.internal_tags.models import AutoTagModel
class AssetBase(AutoTagModel):
UNKNOWN = 'unknown'
IMAGE = 'image'
DOCUMENT = 'document'
AUDIO = 'audio'
VIDEO = 'video'
TYPES = settings.ASSET_TYPES and settings.ASSET_TYPES or \
((UNKNOWN, 'Unknown'),
(IMAGE, 'Image'),
(DOCUMENT, 'Document'),
(AUDIO, 'Audio'),
(VIDEO, 'Video'),)
__original_file = None
title = models.CharField(max_length=255)
file = AssetRealFileField(upload_to=utils.assets_dir)
type = models.CharField(max_length=255, choices=TYPES, db_index=True)
slug = models.SlugField(unique=True, max_length=255)
user_filename = models.CharField(max_length=255)
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
cbversion = models.PositiveIntegerField(editable=False)
objects = AssetManager()
class Meta:
abstract = True
def __init__(self, *args, **kwargs):
super(AssetBase, self).__init__(*args, **kwargs)
self.__original_file = self.file
def rename_file(self):
if self.type == self.DOCUMENT:
return False
return settings.HASH_FILENAME
def url(self):
"""
This is a wrapper of file.url
"""
return self.file.url
def generate_slug(self):
return str(uuid.uuid1())
def assign_tag(self):
pass
def delete_real_file(self, file_obj):
file_obj.storage.delete(file_obj.name)
signals.file_removed.send(file_obj.name)
def _can_crop(self):
return self.type == self.IMAGE
def reset_crops(self):
"""
Reset all known crops to the default crop.
If settings.ASSET_CELERY is specified then
the task will be run async
"""
if self._can_crop():
if settings.CELERY or settings.USE_CELERY_DECORATOR:
# this means that we are using celery
tasks.reset_crops.apply_async(args=[self.pk], countdown=5)
else:
tasks.reset_crops(None, asset=self)
def ensure_crops(self, *required_crops):
"""
Make sure a crop exists for each crop in required_crops.
Existing crops will not be changed.
If settings.ASSET_CELERY is specified then
the task will be run async
"""
if self._can_crop():
if settings.CELERY or settings.USE_CELERY_DECORATOR:
# this means that we are using celery
args = [self.pk]+list(required_crops)
tasks.ensure_crops.apply_async(args=args, countdown=5)
else:
tasks.ensure_crops(None, *required_crops, asset=self)
def create_crop(self, name, x, x2, y, y2):
"""
Create a crop for this asset.
"""
if self._can_crop():
spec = get_image_cropper().create_crop(name, self.file, x=x,
x2=x2, y=y, y2=y2)
ImageDetail.save_crop_spec(self, spec)
def save(self, *args, **kwargs):
"""
For new assets, creates a new slug.
For updates, deletes the old file from storage.
Calls super to actually save the object.
|
"""
if not self.pk and not self.slug:
self.slug = self.generate_slug()
if self.__original_file and self.file != self.__original_file:
self.delete_real_file(self.__original_file)
file_changed = True
if self.pk:
new_value = getattr(self, 'file')
|
if hasattr(new_value, "file"):
file_changed = isinstance(new_value.file, UploadedFile)
else:
self.cbversion = 0
if file_changed:
self.user_filename = os.path.basename(self.file.name)
self.cbversion = self.cbversion + 1
if not self.title:
self.title = self.user_filename
super(AssetBase, self).save(*args, **kwargs)
if file_changed:
signals.file_saved.send(self.file.name)
utils.update_cache_bust_version(self.file.url, self.cbversion)
self.reset_crops()
if self.__original_file and self.file.name != self.__original_file.name:
with manager.SwitchSchemaManager(None):
for related in self.__class__._meta.get_all_related_objects(
include_hidden=True):
field = related.field
if getattr(field, 'denormalize', None):
cname = field.get_denormalized_field_name(field.name)
if getattr(field, 'denormalize'):
related.model.objects.filter(**{
field.name: self.pk
}).update(**{
cname: self.file.name
})
def delete(self, *args, **kwargs):
"""
Deletes the actual file from storage after the object is deleted.
Calls super to actually delete the object.
"""
file_obj = self.file
super(AssetBase, self).delete(*args, **kwargs)
self.delete_real_file(file_obj)
def __unicode__(self):
return '%s' % (self.user_filename)
class ImageDetailBase(models.Model):
image = models.ForeignKey(settings.ASSET_MODEL)
width = models.PositiveIntegerField()
height = models.PositiveIntegerField()
name = models.CharField(max_length=255)
editable = models.BooleanField(editable=False, default=False)
x = models.PositiveIntegerField(null=True)
x2 = models.PositiveIntegerField(null=True)
y = models.PositiveIntegerField(null=True)
y2 = models.PositiveIntegerField(null=True)
class Meta:
abstract = True
def __unicode__(self):
return pretty_name(self.name)
def get_crop_config(self):
return get_image_cropper().get_crop_config(self.name)
@classmethod
def save_crop_spec(cls, asset, spec, update_version=True):
if spec:
cdict = spec.to_dict()
updated = cls.objects.filter(image=asset,
name=cdict['name']).update(**cdict)
if not updated:
cls(image=asset, **cdict).save()
if update_version:
asset.__class__.objects.filter(pk=asset.pk
).update(cbversion=models.F('cbversion')+1)
class Asset(AssetBase):
class Meta:
abstract = False
class ImageDetail(ImageDetailBase):
class Meta:
abstract = False
|
arrayfire/arrayfire_python
|
tests/simple/algorithm.py
|
Python
|
bsd-3-clause
| 3,357 | 0.000298 |
#!/usr/bin/env python
#######################################################
# Copyright (c) 2015, ArrayFire
# All rights reserved.
#
# This file is distributed under 3-clause BSD license.
# The complete license agreement can be obtained at:
# http://arrayfire.com/licenses/BSD-3-Clause
########################################################
import arrayfire as af
from . import _util
def simple_algorithm(verbose=False):
display_func = _util.display_func(verbose)
print_func = _util.print_func(verbose)
a = af.randu(3, 3)
k = af.constant(1, 3, 3, dtype=af.Dtype.u32)
af.eval(k)
print_func(af.sum(a), af.product(a), af.min(a), af.max(a), af.count(a), af.any_true(a), af.all_true(a))
display_func(af.sum(a, 0))
display_func(af.sum(a, 1))
rk = af.constant(1, 3, dtype=af.Dtype.u32)
rk[2] = 0
af.eval(rk)
display_func(af.sumByKey(rk, a, dim=0))
d
|
isplay_func(af.sumByKey(rk, a, dim=1))
display_func(af.productByKey(rk, a, dim=0))
display_func(af.productByKey(rk, a, dim=1))
|
display_func(af.minByKey(rk, a, dim=0))
display_func(af.minByKey(rk, a, dim=1))
display_func(af.maxByKey(rk, a, dim=0))
display_func(af.maxByKey(rk, a, dim=1))
display_func(af.anyTrueByKey(rk, a, dim=0))
display_func(af.anyTrueByKey(rk, a, dim=1))
display_func(af.allTrueByKey(rk, a, dim=0))
display_func(af.allTrueByKey(rk, a, dim=1))
display_func(af.countByKey(rk, a, dim=0))
display_func(af.countByKey(rk, a, dim=1))
display_func(af.product(a, 0))
display_func(af.product(a, 1))
display_func(af.min(a, 0))
display_func(af.min(a, 1))
display_func(af.max(a, 0))
display_func(af.max(a, 1))
display_func(af.count(a, 0))
display_func(af.count(a, 1))
display_func(af.any_true(a, 0))
display_func(af.any_true(a, 1))
display_func(af.all_true(a, 0))
display_func(af.all_true(a, 1))
display_func(af.accum(a, 0))
display_func(af.accum(a, 1))
display_func(af.scan(a, 0, af.BINARYOP.ADD))
display_func(af.scan(a, 1, af.BINARYOP.MAX))
display_func(af.scan_by_key(k, a, 0, af.BINARYOP.ADD))
display_func(af.scan_by_key(k, a, 1, af.BINARYOP.MAX))
display_func(af.sort(a, is_ascending=True))
display_func(af.sort(a, is_ascending=False))
b = (a > 0.1) * a
c = (a > 0.4) * a
d = b / c
print_func(af.sum(d))
print_func(af.sum(d, nan_val=0.0))
display_func(af.sum(d, dim=0, nan_val=0.0))
val, idx = af.sort_index(a, is_ascending=True)
display_func(val)
display_func(idx)
val, idx = af.sort_index(a, is_ascending=False)
display_func(val)
display_func(idx)
b = af.randu(3, 3)
keys, vals = af.sort_by_key(a, b, is_ascending=True)
display_func(keys)
display_func(vals)
keys, vals = af.sort_by_key(a, b, is_ascending=False)
display_func(keys)
display_func(vals)
c = af.randu(5, 1)
d = af.randu(5, 1)
cc = af.set_unique(c, is_sorted=False)
dd = af.set_unique(af.sort(d), is_sorted=True)
display_func(cc)
display_func(dd)
display_func(af.set_union(cc, dd, is_unique=True))
display_func(af.set_union(cc, dd, is_unique=False))
display_func(af.set_intersect(cc, cc, is_unique=True))
display_func(af.set_intersect(cc, cc, is_unique=False))
_util.tests["algorithm"] = simple_algorithm
|
jiahao/godot
|
parseh5.py
|
Python
|
mit
| 5,010 | 0.011976 |
#!/usr/bin/env python
import datetime
import logging
import math
import socket
import tables
import xml.etree.ElementTree as ET
logging.basicConfig(filename = 'mbta_daemon.log', level=logging.INFO)
logger = logging.getLogger('xml2hdf5')
class VehicleLocation(tables.IsDescription):
vehicleID = tables.StringCol(4)
route = tables.StringCol(8)
direction = tables.StringCol(16)
latitude = tables.Float64Col() #Reported latitude
longitude = tables.Float64Col() #Reported longitude
time = tables.Float64Col() #Time stamp in seconds since epoch time
heading = tables.UInt16Col() #Heading in degrees
def parse_mbta_xml(database, thefile, presentData = None):
"""
Parses MBTA XML data and adds it to a HDF5 database.
Inputs:
database: Handle to HDF5 file
thefile: Name of XML file to parse
presentData: A dictionary hash of present data (to save time on the check)
If absent, will use database queries (much slower)
"""
try:
tree = ET.parse(thefile)
root = tree.getroot()
except ET.ParseError: #Error parsing XML content of the file
logger.error('Could not find root of XML file: %s', thefile)
return
#Part 1. Get epoch time to nearest second
# MBTA reports in whole units of milliseconds
timeData = root.find('lastTime')
if timeData is None: #Maybe XML returned an error of some sort
logger.warning('XML file %s does not have time data', thefile)
return
report_time = long(timeData.attrib['time'][:-3])
#Part 2. Parse vehicle location data.
for thevehicle in root.iter('vehicle'):
rawdata = thevehicle.attrib #Raw MBTA data
data= {}
try:
#Bus was here at this epoch time
data['time'] = report_time - long(rawdata['secsSinceReport'])
data['vehicleID'] = rawdata['id']
data['route'] = rawdata['routeTag']
data['direction'] = rawdata['dirTag']
data['latitude'] = rawdata['lat']
data['longitude'] = rawdata['lon']
data['heading'] = rawdata['heading']
except KeyError:
pass
#Part 3. Make sure record is not a duplicate
if presentData is None:
#No hashes. Query database to check that this record wasn't already reported
queryString = '((vehicleID == "%(vehicleID)s") & (time == %(time)s))' % data
try:
query = database.getWhereList(queryString)
except tables.exceptions.HDF5ExtError:
#gets thrown whenHDF5 file is open and being written to
logger.critical("Could not get file lock on HDF5 file. Abort.")
import sys
sys.exit()
if len(query) == 0:
vehiclePosition = database.row
for key, value in data.items():
vehiclePosition[key] = value
vehiclePosition.append()
else:
assert len(query) == 1, "OMG COLLISION"
else:
#Use hashes to check if record is already reported
if (data['vehicleID'], data['time']) not in presentData:
vehiclePosition = database.row
for key, value in data.items():
vehiclePosition[key] = value
vehiclePosition.append()
presentData[data['vehicleID'], data['time']] = True
database.flush()
logger.info('Parsed data from XML file: %s', thefile)
return presentData
def ParseAll(theHDF5FileName = 'mbta_trajectories.h5', Cleanup = True):
import glob, os
try:
s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
## Create an abstract socket, by prefixing it with null.
s.bind('\0mbta_hdf5_writer_'+theHDF5FileName)
compressionOptions = tables.Filters(complevel=9, complib='blosc')
f = tables.openFile(theHDF5FileName, mode = 'a',
filters = compressionOptions, title = 'Historical MBTA bus data')
logging.debug('Lock acquired on %s', theHDF5FileName)
except socket.error:
logging.error('Lock could not be acquired on %s', theHDF5FileName)
return
try:
thetable = f.root.VehicleLocations
except tables.exceptions.NoSuchNodeError:
thetable = f.createTable('/', 'VehicleLocations', VehicleLocation,
'MBTA vehicle pos
|
itions', filters = compressionOptions)
#Create table indexers
thetable.cols.time.createIndex()
thetable.cols.vehicleID.createIndex()
#Hash current data
presentData = {}
for row in thetable:
presentData[row['vehicleID'], row['time']] = True
for filename in sorted(glob.glob('*.xml')):
presentData = parse_mbta_xml(thetable, filename, pr
|
esentData)
if Cleanup:
os.unlink(filename)
f.close()
if __name__ == '__main__':
ParseAll()
|
nathanhilbert/FPA_Core
|
openspending/views/api_v2/cubes_ext.py
|
Python
|
agpl-3.0
| 9,418 | 0.008388 |
import logging
from datetime import datetime
import os
import json
from flask import request, g, Response
#from openspending.core import cache
from openspending.auth import require
from openspending.lib.jsonexport import jsonify
from openspending.views.api_v2.common import blueprint
from openspending.views.error import api_json_errors
#imports prepare_cell_cubes_ext
from openspending.lib.cubes_util import *
from openspending.lib.cache import cache_key
from openspending.core import cache
from cubes.server.utils import *
from cubes.formatters import JSONLinesGenerator, csv_generator, xls_generator
from cubes.browser import SPLIT_DIMENSION_NAME
from cubes.server.decorators import prepare_cell
log = logging.getLogger(__name__)
@blueprint.route("/api/slicer/cube/<star_name>/cubes_model", methods=["JSON", "GET"])
@requires_complex_browser
@api_json_errors
@cache.cached(timeout=60, key_prefix=cache_key)
#@log_request("aggregate", "aggregates")
def cubes_model(star_name):
cubes_arg = request.args.get("cubes", None)
try:
cubes = cubes_arg.split("|")
except:
raise RequestError("Parameter cubes with value '%s'should be a valid cube names separated by a '|'"
% (cubes_arg) )
if len (cubes) > 5:
raise RequestError("You can only join 5 cubes together at one time")
g.cube = get_complex_cube(star_name, cubes)
hier_limits = None
# Copy from the application context
#g.json_record_limit = current_app.slicer.json_record_limit
g.json_record_limit = 10000
if "prettyprint" in request.args:
g.prettyprint = str_to_bool(request.args.get("prettyprint"))
else:
g.prettyprint = current_app.slicer.prettyprint
response = g.cube.to_dict(expand_dimensions=True,
with_mappings=False,
full_attribute_names=True,
create_label=True,
hierarchy_limits=hier_limits)
response["features"] = workspace.cube_features(g.cube)
return jsonify(response)
def xlschecker(*args, **kwargs):
if "format" in request.args:
if request.args.get("format") in ['excel', 'csv']:
return True
return False
@blueprint.route("/api/slicer/cube/<star_name>/cubes_aggregate", methods=["JSON", "GET"])
@requires_complex_browser
@api_json_errors
@cache.cached(timeout=60, key_prefix=cache_key, unless=xlschecker)
def aggregate_cubes(star_name):
cubes_arg = request.args.get("cubes", None)
try:
cubes = cubes_arg.split("|")
except:
raise RequestError("Parameter cubes with value '%s'should be a valid cube names separated by a '|'"
% (cubes_arg) )
if len (cubes) > 5:
raise RequestError("You can only join 5 cubes together at one time")
g.cube = get_complex_cube(star_name, cubes)
g.browser = current_app.cubes_workspace.browser(g.cube)
cube = g.cube
output_format = validated_parameter(request.args, "format",
values=["json", "csv", "excel"],
default="json")
header_type = validated_parameter(request.args, "header",
values=["names", "labels", "none"],
default="labels")
fields_str = request.args.get("fields")
if fields_str:
fields = fields_str.lower().split(',')
else:
fields = None
# Aggregates
# ----------
aggregates = []
for agg in request.args.getlist("aggregates") or []:
aggregates += agg.split("|")
drilldown = []
ddlist = request.args.getlist("drilldown")
if ddlist:
for ddstring in ddlist:
drilldown += ddstring.split("|")
#this handles cuts with geometry__time
prepare_cell_cubes_ext(restrict=False)
prepare_cell("split", "split")
result = g.browser.aggregate(g.cell,
aggregates=aggregates,
drilldown=drilldown,
split=g.split,
page=g.page,
page_size=g.page_size,
order=g.order)
# Hide cuts that were generated internally (default: don't)
if current_app.slicer.hide_private_cuts:
result.cell = result.cell.public_cell()
# Copy from the application context
#g.json_record_limit = current_app.slicer.json_record_limit
g.json_record_limit = 10000
if "prettyprint" in request.args:
g.prettyprint = str_to_bool(request.args.get("prettyprint"))
else:
g.prettyprint = current_app.slicer.prettyprint
if output_format == "json":
resultdict= result.to_dict()
tempcells = list(result._cells)
resultdict['cells'] = tempcells
resultdict['cell'] = list(resultdict['cell'])
if "cluster" in request.args:
clusteragg = request.args.get('clusteragg', 'avg')
if len(cubes) > 1 or len(cubes) < 1:
log.warn("cluster must have one and only one cube. This call had %s"%str(cubes))
if clusteragg in ['avg', 'min', 'max', 'sum']:
clusterfield = "%s__amount_%s"%(cubes[0], clusteragg,)
numclusters = request.args.get('numclusters',5)
tempresult = get_cubes_breaks(resultdict['cells'], clusterfield, method=request.args.get('cluster'), k=numclusters)
tempresult['data'] = list(tempresult['data'])
resultdict.set('cluster', tempresult)
resp = Response(response=json.dumps(resultdict),
status=200, \
mimetype="application/json")
return(resp)
elif output_format not in ["csv","excel"]:
raise RequestError("unknown response format '%s'" % output_format)
# csv
if header_type == "names":
header = result.labels
elif header_type == "labels":
header = []
for l in result.labels:
# TODO: add a little bit of polish to this
if l == SPLIT_DIMENSION_NAME:
header.append('Matches Filters')
else:
header += [ attr.label or attr.name for attr in cube.get_attributes([l], aggregated=True) ]
else:
header = None
fields = result.labels
try:
filename_output = cubes[0] + "_" + datetime.now().strftime("%Y-%m-%d")
except:
filename_output = "aggregate_" + datetime
if output_format == "excel":
output_string = xls_generator(result,
fields,
include_header=bool(header),
header=header)
headers = {"Content-Disposition": 'attachment; filename="' + filename_output + '.xlsx"'}
return Response(output_string,
mimetype="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
headers=headers)
else:
generator = csv_generator(result,
fields,
include_header=bool(header),
header=header)
headers = {"Content-Disposition": 'attachment; filename="' + filename_output + '.csv"'}
return Response(generator,
mimetype='text/csv',
headers=headers)
@bl
|
ueprint.route("/api/slicer/cube/<star_name>/cubes_facts", methods=["JSON", "GET"])
@requires_complex_browser
@api_json_errors
@cache.cached(timeout=60, key_prefix=cache_key)
#@log
|
_request("facts", "fields")
def cubes_facts(star_name):
cubes_arg = request.args.get("cubes", None)
try:
cubes = cubes_arg.split("|")
except:
raise RequestError("Parameter cubes with value '%s'should be a valid cube names separated by a '|'"
% (cubes_arg) )
if len (cubes) > 5:
raise RequestError("You can only join 5 cubes together at one time")
g.cube = get_complex_cube(star_name, cubes)
g.browser = current_app.c
|
JMSkelton/Transformer
|
Transformer/Utilities/__init__.py
|
Python
|
gpl-3.0
| 36 | 0 |
#
|
Transformer/Utilities/__i
|
nit__.py
|
pieterdp/helptux
|
helptux/models/user.py
|
Python
|
gpl-2.0
| 2,613 | 0.001148 |
import bcrypt
from hashlib import sha512
from helptux import db, login_manager
class Role(db.Model):
__tablename__ = 'roles'
id = db.Column(db.Integer, primary_key=True)
role = db.Column(db.String(255), index=True, unique=True)
def __repr__(self):
return '<Role {0}>'.format(self.role)
def __init__(self, role):
self.role = role
users_roles = db.Table('users_roles',
db.Column('user_id', db.Integer, db.ForeignKey('users.id')),
db.Column('role_id', db.Integer, db.ForeignKey('roles.id'))
)
class User(db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(255), index=True, unique=True, nullable=False)
email = db.Column(db.Strin
|
g(255), index=True, unique=True, nullable=False)
password_hash = db.Column(db.String(), nullable=False)
posts = db.relationship('Post', backref='author', lazy='dynamic')
authenticated = db.Column(db.Boolean, default=False)
roles = db.relationship('Role',
secon
|
dary=users_roles,
primaryjoin=(users_roles.c.user_id == id),
secondaryjoin=(users_roles.c.role_id == Role.id),
backref=db.backref('users', lazy='dynamic'),
lazy='dynamic')
def __init__(self, email, password):
self.email = email
self.username = self.email
self.set_password(password)
def __repr__(self):
return '<User {0}>'.format(self.username)
def output_obj(self):
return {
'id': self.id,
'username': self.username,
'posts': [p.id for p in self.posts],
'roles': [r.id for r in self.roles]
}
def set_password(self, input_password):
bit_input = input_password.encode('utf-8')
self.password_hash = bcrypt.hashpw(bit_input, bcrypt.gensalt())
def verify_password(self, input_password):
bit_input = input_password.encode('utf-8')
if bcrypt.hashpw(bit_input, self.password_hash) == self.password_hash:
return True
else:
return False
def is_active(self):
return True
def is_anonymous(self):
return False
def get_id(self):
return str(self.id)
def is_authenticated(self):
return self.authenticated
def has_role(self, role_name):
for role in self.roles:
if role.role == role_name:
return True
return False
|
whereskenneth/Dwarfsquad
|
dwarfsquad/lib/build/from_xlsx/build_full_ac.py
|
Python
|
mit
| 1,706 | 0.004103 |
import csv
from openpyxl import load_workbook
import io
from dwarfsquad.lib.build.from_export import build_compound_methods, build_lots_and_levels
from dwarfsquad.lib.build.from_export.build_assay_configuration import build_assay_configuration
from dwarfsquad.lib.build.from_export.build_rulesettings import add_rules_to_methods
from dwarfsquad.lib.export.export_rulesettings import generate_rule_schemas
from dwarfsquad.lib.macros.generate_macros import generate_macros
def build_full_ac(path_to_xlsx):
wb = load_workbook(path_to_xlsx)
validate_workbook(wb)
ac = build_assay_configuration(read_csv_from_sheet(wb.get_sheet_by_name('Assay')))
ac.compound_methods = build_compound_methods(read_csv_from_sheet(wb.get_sheet_by_name('Compound')))
ac.lots = build_lots_and_levels(read_csv_from_sheet(wb.get_sheet_by_name('Lots')))
ac.compound_methods = add_rules_to_methods(read_csv_from_sheet(wb.get_sheet_by_name('Rule')), ac.compound_methods)
ac.qa_rule_schemas = generate_rule_schemas(ac)
if not ac.macros:
ac.macros = generate_macros(ac)
return ac
def get_column_value(c):
if c.value:
try:
return str(round(c.value, 8))
except TypeError:
return str(c.value)
else:
|
return ''
def read_csv_from_sheet(worksheet):
stream = io.StringIO()
for row in worksheet.rows:
stream.write(u','.join([get_column_value(c) for c in row]))
stream.write(u'\n')
reader = csv.DictReader(stream.getvalue().splitlines())
|
rows = [r for r in reader]
return rows
def validate_workbook(wb):
assert 'Assay' in wb
assert 'Compound' in wb
assert 'Lots' in wb
assert 'Rule' in wb
|
jkimbo/freight
|
freight/config.py
|
Python
|
apache-2.0
| 8,210 | 0.000853 |
from __future__ import absolute_import, unicode_literals
import flask
import os
import logging
from flask_heroku import Heroku
from flask_redis import Redis
from flask_sslify import SSLify
from flask_sqlalchemy import SQLAlchemy
from raven.contrib.flask import Sentry
from werkzeug.contrib.fixers import ProxyFix
from freight.api.controller import ApiController
from freight.constants import PROJECT_ROOT
from freight.utils.celery import ContextualCelery
api = ApiController(prefix='/api/0')
db = SQLAlchemy(session_options={})
celery = ContextualCelery()
heroku = Heroku()
redis = Redis()
sentry = Sentry(logging=True, level=logging.WARN)
def configure_logging(app):
logging.getLogger().setLevel(getattr(logging, app.config['LOG_LEVEL']))
def create_app(_read_config=True, **config):
from kombu import Queue
app = flask.Flask(
__name__,
static_folder=None,
template_folder=os.path.join(PROJECT_ROOT, 'templates'))
# Utilized for sessions and other secrets
# NOTE: This key is insecure and you should override it on the server
app.config['SECRET_KEY'] = 't\xad\xe7\xff%\xd2.\xfe\x03\x02=\xec\xaf\\2+\xb8=\xf7\x8a\x9aLD\xb1'
if 'SECRET_KEY' in os.environ:
app.config['SECRET_KEY'] = os.environ['SECRET_KEY']
# The api key to authorize end users against this system.
# NOTE: This key is insecure and you should override it on the server
app.config['API_KEY'] = '3e84744ab2714151b1db789df82b41c0021958fe4d77406e9c0947c34f5c5a70'
if 'API_KEY' in os.environ:
app.config['API_KEY'] = os.environ['API_KEY']
# The private key to use when cloning repositories
# TODO(dcramer): this should support an on-disk option, as well as be
# possible to override per repo
app.config['SSH_PRIVATE_KEY'] = os.environ.get('SSH_PRIVATE_KEY', '').replace("\\n", "\n")
app.config['FREIGHT_URL'] = os.environ.get('FREIGHT_URL', '').rstrip('/')
if 'REDISCLOUD_URL' in os.environ:
app.config
|
['REDIS_URL'] = os.environ['REDISCLOUD_URL'
|
]
app.config['WORKSPACE_ROOT'] = os.environ.get('WORKSPACE_ROOT', '/tmp')
app.config['DEFAULT_TIMEOUT'] = int(os.environ.get('DEFAULT_TIMEOUT', 300))
app.config['LOG_LEVEL'] = os.environ.get('LOG_LEVEL', 'INFO' if config.get('DEBUG') else 'ERROR')
# Currently authentication requires Google
app.config['GOOGLE_CLIENT_ID'] = os.environ.get('GOOGLE_CLIENT_ID')
app.config['GOOGLE_CLIENT_SECRET'] = os.environ.get('GOOGLE_CLIENT_SECRET')
app.config['GOOGLE_DOMAIN'] = os.environ.get('GOOGLE_DOMAIN')
# Generate a GitHub token via Curl:
# curlish https://api.github.com/authorizations \
# -u your-username \
# -X POST \
# -J scopes='repo' \
# -J note='freight'
app.config['GITHUB_TOKEN'] = os.environ.get('GITHUB_TOKEN')
app.config['GITHUB_API_ROOT'] = 'https://api.github.com'
app.config['SQLALCHEMY_COMMIT_ON_TEARDOWN'] = True
app.config['SQLALCHEMY_POOL_SIZE'] = 60
app.config['SQLALCHEMY_MAX_OVERFLOW'] = 20
if 'SQLALCHEMY_DATABASE_URI' in os.environ:
app.config['SQLALCHEMY_DATABASE_URI'] = os.environ['SQLALCHEMY_DATABASE_URI']
app.config['BROKER_TRANSPORT'] = None
if 'BROKER_URL' in os.environ:
app.config['BROKER_URL'] = os.environ['BROKER_URL']
app.config['CELERY_ACCEPT_CONTENT'] = ['json']
app.config['CELERY_ACKS_LATE'] = True
app.config['CELERY_DEFAULT_QUEUE'] = "default"
app.config['CELERY_DEFAULT_EXCHANGE'] = "default"
app.config['CELERY_DEFAULT_EXCHANGE_TYPE'] = "direct"
app.config['CELERY_DEFAULT_ROUTING_KEY'] = "default"
app.config['CELERY_DISABLE_RATE_LIMITS'] = True
app.config['CELERY_IGNORE_RESULT'] = True
app.config['CELERY_RESULT_BACKEND'] = None
app.config['CELERY_RESULT_SERIALIZER'] = 'json'
app.config['CELERY_SEND_EVENTS'] = False
app.config['CELERY_TASK_RESULT_EXPIRES'] = 1
app.config['CELERY_TASK_SERIALIZER'] = 'json'
app.config['CELERY_TIMEZONE'] = 'UTC'
app.config['CELERYD_PREFETCH_MULTIPLIER'] = 1
app.config['CELERYD_MAX_TASKS_PER_CHILD'] = 10000
app.config['CELERY_QUEUES'] = (
Queue('default', routing_key='default'),
Queue('freight.tasks', routing_key='freight.tasks'),
)
app.config['CELERY_IMPORTS'] = (
'freight.tasks',
)
app.config['CELERY_ROUTES'] = {
'freight.execute_task': {
'queue': 'freight.tasks',
'routing_key': 'freight.tasks',
},
}
app.config['SENTRY_INCLUDE_PATHS'] = [
'ds',
]
# We don't support non-proxied installs
app.wsgi_app = ProxyFix(app.wsgi_app)
# Pull in Heroku configuration
heroku.init_app(app)
if 'DYNO' in os.environ:
# XXX: the released version of flask-sslify does not support init_app
SSLify(app)
# Set any remaining defaults that might not be present yet
if not app.config.get('SQLALCHEMY_DATABASE_URI'):
app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql:///freight'
if not app.config.get('BROKER_URL'):
app.config['BROKER_URL'] = 'redis://localhost/0'
app.config.update(config)
if _read_config:
if os.environ.get('FREIGHT_CONF'):
# FREIGHT_CONF=/etc/freight.conf.py
app.config.from_envvar('FREIGHT_CONF')
else:
# Look for ~/.freight/freight.conf.py
path = os.path.normpath(os.path.expanduser('~/.freight/freight.conf.py'))
app.config.from_pyfile(path, silent=True)
configure_logging(app)
configure_sentry(app)
configure_api(app)
configure_celery(app)
configure_redis(app)
configure_sqlalchemy(app)
configure_web_routes(app)
return app
def configure_api(app):
from freight.api.controller import ApiCatchall
from freight.api.app_details import AppDetailsApiView
from freight.api.app_index import AppIndexApiView
from freight.api.stats import StatsApiView
from freight.api.task_details import TaskDetailsApiView
from freight.api.task_index import TaskIndexApiView
from freight.api.task_log import TaskLogApiView
api.add_resource(AppIndexApiView, '/apps/')
api.add_resource(AppDetailsApiView, '/apps/<app_id>/')
api.add_resource(StatsApiView, '/stats/')
api.add_resource(TaskIndexApiView, '/tasks/')
api.add_resource(TaskDetailsApiView, '/tasks/<task_id>/')
api.add_resource(TaskLogApiView, '/tasks/<task_id>/log/')
# catchall should be the last resource
api.add_resource(ApiCatchall, '/<path:path>')
# init must be called after routes are registered
api.init_app(app)
def configure_celery(app):
celery.init_app(app)
def configure_redis(app):
redis.init_app(app)
def configure_sentry(app):
from flask import session
from raven.contrib.celery import register_signal, register_logger_signal
sentry.init_app(app)
@app.before_request
def capture_user(*args, **kwargs):
if 'uid' in session:
sentry.client.user_context({
'id': session['uid'],
'email': session['email'],
})
register_signal(sentry.client)
register_logger_signal(sentry.client)
def configure_sqlalchemy(app):
db.init_app(app)
def configure_web_routes(app):
from freight.web.auth import AuthorizedView, LoginView, LogoutView
from freight.web.index import IndexView
from freight.web.static import StaticView
static_root = os.path.join(PROJECT_ROOT, 'dist')
app.add_url_rule(
'/static/<path:filename>',
view_func=StaticView.as_view(b'static', root=static_root))
app.add_url_rule(
'/auth/login/',
view_func=LoginView.as_view(b'login', authorized_url='authorized'))
app.add_url_rule(
'/auth/logout/',
view_func=LogoutView.as_view(b'logout', complete_url='index'))
app.add_url_rule(
'/auth/complete/',
view_func=AuthorizedView.as_view(b'authorized', authorized_url='authorized', complete_url='index'))
index_view = IndexView.as_view(b'index', login_url='login')
app.add_url_rule('/', view_func=index_view)
app.add_url_rule('/<path:
|
openstack/tripleo-heat-templates
|
tools/convert_nic_config.py
|
Python
|
apache-2.0
| 7,737 | 0 |
#!/usr/bin/env python
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import argparse
import collections
import datetime
import os
import re
import shutil
import sys
import yaml
def parse_opts(argv):
parser = argparse.ArgumentParser(
description='Convert to new NIC config templates with '
'OS::Heat::Value resources.')
parser.add_argument('-t', '--template', metavar='TEMPLATE_FILE',
help=("Existing NIC config template to conver."),
required=True)
parser.add_argument('--discard-comments', metavar='DISCARD_COMMENTS',
help="Discard comments from the template. (The "
"scripts functions to keep YAML file comments in "
"place, does not work in all scenarios.)",
default=False)
opts = parser.parse_args(argv[1:])
return opts
def to_commented_yaml(filename):
"""Convert comments into 'comments<num>: ...' YAML"""
out_str = ''
last_non_comment_spaces = ''
with open(filename, 'r') as f:
comment_count = 0
for line in f:
# skip blank line
if line.isspace():
continue
char_count = 0
spaces = ''
for char in line:
char_count += 1
if char == ' ':
spaces += ' '
next
elif char == '#':
comment_count += 1
comment = line[char_count:-1]
last_non_comment_spaces = spaces
out_str += "%scomment%i_%i: '%s'\n" % (
last_non_comment_spaces, comment_count, len(spaces),
comment)
break
else:
last_non_comment_spaces = spaces
out_str += line
# inline comments check
m = re.match(".*:.*#(.*)", line)
if m:
comment_count += 1
out_str += "%s inline_comment%i: '%s'\n" % (
last_non_comment_spaces, comment_count, m.group(1))
break
with open(filename, 'w') as f:
f.write(out_str)
return out_str
def to_normal_yaml(filename):
"""Convert back to normal #commented YAML"""
with open(filename, 'r') as f:
data = f.read()
out_str = ''
next_line_break = False
for line in data.split('\n'):
# get_input not supported by run-os-net-config.sh script
line = line.replace('get_input: ', '')
# Normal comments
m = re.match(" +comment[0-9]+_([0-9]+): '(.*)'.*", line)
# Inline comments
i = re.match(" +inline_comment[0-9]+: '(.*)'.*", line)
if m:
if next_line_break:
out_str += '\n'
next_line_break = False
for x in range(0, int(m.group(1))):
out_str += " "
out_str += "#%s\n" % m.group(2)
elif i:
|
out_str += " #%s\n" % i.group(1)
next_line_break = False
else:
if next_line_break:
out_str += '\n'
out_str += line
next_line_break = True
if next_line_break:
|
out_str += '\n'
with open(filename, 'w') as f:
f.write(out_str)
return out_str
class TemplateDumper(yaml.SafeDumper):
def represent_ordered_dict(self, data):
return self.represent_dict(data.items())
def description_presenter(self, data):
if not len(data) > 80:
return self.represent_scalar('tag:yaml.org,2002:str', data)
return self.represent_scalar('tag:yaml.org,2002:str', data, style='>')
class TemplateLoader(yaml.SafeLoader):
def construct_mapping(self, node):
self.flatten_mapping(node)
return collections.OrderedDict(self.construct_pairs(node))
TemplateDumper.add_representer(str,
TemplateDumper.description_presenter)
TemplateDumper.add_representer(bytes,
TemplateDumper.description_presenter)
TemplateDumper.add_representer(collections.OrderedDict,
TemplateDumper.represent_ordered_dict)
TemplateLoader.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
TemplateLoader.construct_mapping)
def write_template(template, filename=None):
with open(filename, 'w') as f:
yaml.dump(template, f, TemplateDumper, width=120,
default_flow_style=False)
def validate_template(template):
if not os.path.exists(template):
raise RuntimeError('Template not provided.')
if not os.path.isfile(template):
raise RuntimeError('Template %s is not a file.')
pass
def backup_template(template):
extension = datetime.datetime.now().strftime('%Y%m%d%H%M%S')
backup_filename = os.path.realpath(template) + '.' + extension
if os.path.exists(backup_filename):
raise RuntimeError('Backupe file: %s already exists. Aborting!'
% backup_filename)
shutil.copyfile(template, backup_filename)
print('The original template was saved as: %s' % backup_filename)
def needs_conversion():
with open(OPTS.template, 'r') as f:
template = yaml.load(f.read(), Loader=TemplateLoader)
net_config_res = template['resources'].get('OsNetConfigImpl')
if (net_config_res and net_config_res[
'type'] == 'OS::Heat::SoftwareConfig'):
backup_template(OPTS.template)
if not OPTS.discard_comments:
# Convert comments '# ...' into 'comments<num>: ...'
# is not lost when loading the data.
to_commented_yaml(OPTS.template)
return True
return False
def convert_to_heat_value_resource():
if needs_conversion():
with open(OPTS.template, 'r') as f:
template = yaml.load(f.read(), Loader=TemplateLoader)
net_config_res = template['resources']['OsNetConfigImpl']
net_config_res_props = net_config_res['properties']
# set the type to OS::Heat::Value
net_config_res['type'] = 'OS::Heat::Value'
del net_config_res_props['group']
old_config = net_config_res_props['config']
new_config = old_config['str_replace']['params']['$network_config']
del net_config_res_props['config']
net_config_res_props['value'] = new_config
outputs = template['outputs']
del outputs['OS::stack_id']
outputs['config'] = {}
outputs['config']['value'] = 'get_attr[OsNetConfigImpl, value]'
write_template(template, filename=OPTS.template)
if not OPTS.discard_comments:
# Convert previously converted comments, 'comments<num>: ...'
# YAML back to normal #commented YAML
to_normal_yaml(OPTS.template)
print('The update template was saved as: %s' % OPTS.template)
else:
print('Template does not need conversion: %s' % OPTS.template)
OPTS = parse_opts(sys.argv)
convert_to_heat_value_resource()
|
Vesihiisi/COH-tools
|
importer/DkBygningDa.py
|
Python
|
mit
| 3,500 | 0 |
from Monument import Monument, Dataset
import importer_utils as utils
import importer as importer
class DkBygningDa(Monument):
def set_adm_location(self):
if self.has_non_empty_attribute("kommune"):
if utils.count_wikilinks(self.kommune) == 1:
adm_location = utils.q_from_first_wikilink("da", self.kommune)
self.add_statement("located_adm", adm_location)
def set_location(self):
"""
Set location based on 'by' column.
If there's one wikilinked item, confirm that
the corresponding WD item is of a type that's
a subclass of 'human settlement', using query results
downloaded by importer.
If not wikilinked, check if there's a dawp article
with the same name and do the same check.
"""
place_item = None
if self.has_non_empty_attribute("by"):
place = self.by
if utils.count_wikilinks(place) == 1:
place = utils.get_wikilinks(place)[0].title
if utils.wp_page_exists("da", place):
place_item = utils.q_from_wikipedia("da", place)
if place_item:
place_item_ids = utils.get_P31(place_item, self.repo)
for p31_value in place_item_ids:
if p31_value in self.data_files["settlement"]:
self.add_statement("location", place_item)
# there can be more than one P31, but after first positive
# we can leave
return
def set_sagsnr(self):
"""Danish listed buildings case ID (P2783)."""
self.add_statement("listed_building_dk", str(self.sagsnr))
def update_labels(self):
self.add_label("da", utils.remove_markup(self.sagsnavn))
def set_address(self):
"""
Set address of object.
self.addresse is always streetname + number.
self.postnr is always zipcode
self.by is always placename.
"""
if self.has_non_empty_attribute("adresse"):
address = self.adresse + " " + self.postnr + " " + self.by
self.add_statement("located_street", address)
def set_inception(self):
if self.has_non_empty_attribute("opforelsesar"):
inception = utils.parse_year(self.opforelsesar)
if isinstance(inception, int):
self.add_statement(
"inception", utils.package_time({"year": inception}))
def set_monuments_all_id(self):
"""Map monuments_all ID to fields in this table."""
self.monuments_all_id = "{!s}-{!s}-{!s}".format(
self.kommunenr, self.ejendomsnr, self.bygningsnr)
def __init__(self, db_row_dict, mapping, data_files, existing):
Monument.__init__(self, db_row_dict, mapping, data_files, existing)
self.set_monuments_all_id()
self.update_labels()
self.exist
|
s("da")
self.set_commonscat()
self.set_image("billede")
self.set_coords(("lat", "lon"))
self.set_adm_location()
self.set_l
|
ocation()
self.set_sagsnr()
self.set_address()
self.set_inception()
self.exists_with_prop(mapping)
self.print_wd()
if __name__ == "__main__":
"""Point of entrance for importer."""
args = importer.handle_args()
dataset = Dataset("dk-bygninger", "da", DkBygningDa)
dataset.subclass_downloads = {"settlement": "Q486972"}
importer.main(args, dataset)
|
ujac81/PiBlaster
|
Pi/PyBlaster/src/helpers.py
|
Python
|
gpl-3.0
| 541 | 0 |
"""helpers.py -- supporting routines for PyBlaster project
@Author Ulrich Jansen <ulrich
|
.jansen@rwth-aachen.de>
"""
suffixes = ['B', 'KB', 'MB', 'GB', 'TB', 'PB']
def humansize(nbytes):
if nbytes == 0:
return
|
'0 B'
i = 0
while nbytes >= 1024 and i < len(suffixes)-1:
nbytes /= 1024.
i += 1
f = ('%.2f' % nbytes).rstrip('0').rstrip('.')
return '%s %s' % (f, suffixes[i])
def seconds_to_minutes(nsecs):
if nsecs == 0:
return ""
return "%d:%02d" % (int(nsecs / 60), nsecs % 60)
|
SalesforceFoundation/CumulusCI
|
cumulusci/robotframework/tests/test_template_util.py
|
Python
|
bsd-3-clause
| 1,872 | 0.000534 |
import unittest
from cumulusci.core import template_utils
class TemplateUtils(unittest.TestCase):
def test_string_generator(self):
x = 100
y = template_utils.StringGenerator(lambda: str(x))
assert str(y) == "100"
x = 200
assert str(y) == "200"
def test_faker_library(self):
fake = template_utils.FakerTemplateLibrary()
assert fake.first_name
assert "example.com" in fake.email(domain="example.com")
def test_faker_languages(self):
fake = template_utils.FakerTemplateLibrary("no_NO")
assert fake.first_name
assert "example.com" in fake.email(domain="example.com")
def test_format_str(self):
assert template_utils.format_str("abc") == "abc"
assert template_utils.format_str("{{abc}}", {"abc": 5}) == "5"
assert len(template_utils.format_str("{{fake.first_name}}"))
assert "15" in template_utils.format_str(
"{{fake.first_name}} {{count}}", {"count": 15}
)
assert "15" in template_utils.format_str(
"{{fake.first_name}} {{count}}", {"count": "15"}
)
assert (
template_utils.format_str("{% raw %}{}{% endraw %}", {"count": "15"})
== "{}"
)
def test_format_str_languages(self):
norwegian_f
|
aker = template_utils.FakerTemplateLibrary("no_NO")
val = template_utils.format_str(
"{{vikingfake.first_name}} {{abc}}",
{"abc": 5, "vikingfake": norwegian_faker},
|
)
assert "5" in val
def cosmopolitan_faker(language):
return template_utils.FakerTemplateLibrary(language)
val = template_utils.format_str(
"{{fakei18n('ne_NP').first_name}} {{abc}}",
{"abc": 5, "fakei18n": cosmopolitan_faker, "type": type},
)
assert "5" in val
|
fitnr/buoyant
|
tests/test_buoyant.py
|
Python
|
gpl-3.0
| 5,408 | 0.001664 |
"""General tests for Buoyant library."""
import datetime
import unittest
from io import BytesIO
import buoyant
from buoyant import buoy
sampledata = [
{
"latitude (degree)": "39.235",
"sea_surface_wave_peak_period (s)": "13.79",
"polar_coordinate_r1 (1)": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;",
"station_id": "urn:ioos:station:wmo:4
|
6014",
"sea_surface_wind_wave_period (s)": "3.80",
"spectral_energy (m**2/Hz)": "0;0;0;0;0.117495;0.347233;0.340078;1
|
.07545;1.31407;0.644604;0.319928;0.20951;0.203445;0.407703;0.501098;1.05528;0.552653;0.982512;0.40238;0.259344;0.176087;0.156276;0.10127;0.0713481;0.1257;0.0469963;0.0294347;0.0344079;0.0196117;0.0208386;0.0207157;0.0185725;0.0112313;0.0140935;0.00829521;0.0135329;0.0103501;0.00823833;0.00611987;0.00516951;0.00295949;0.00274196;0.00162249;0.00153895;0.000701703;0.000452887",
"sea_surface_wave_mean_period (s)": "7.61",
"sea_water_temperature (c)": "",
"bandwidths (Hz)": "0.0050;0.0050;0.0050;0.0050;0.0050;0.0050;0.0050;0.0050;0.0050;0.0050;0.0050;0.0050;0.0050;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0200;0.0200;0.0200;0.0200;0.0200;0.0200;0.0200",
"sea_surface_wind_wave_to_direction (degree)": "",
"polar_coordinate_r2 (1)": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;",
"sampling_rate (Hz)": "",
"sea_surface_wave_to_direction (degree)": "",
"sea_surface_swell_wave_significant_height (m)": "1.07",
"number_of_frequencies (count)": "46",
"center_frequencies (Hz)": "0.0325;0.0375;0.0425;0.0475;0.0525;0.0575;0.0625;0.0675;0.0725;0.0775;0.0825;0.0875;0.0925;0.1000;0.1100;0.1200;0.1300;0.1400;0.1500;0.1600;0.1700;0.1800;0.1900;0.2000;0.2100;0.2200;0.2300;0.2400;0.2500;0.2600;0.2700;0.2800;0.2900;0.3000;0.3100;0.3200;0.3300;0.3400;0.3500;0.3650;0.3850;0.4050;0.4250;0.4450;0.4650;0.4850",
"date_time": "2015-07-31T19:50:00Z",
"sea_surface_wind_wave_significant_height (m)": "0.17",
"sea_surface_wave_significant_height (m)": "1.09",
"sea_surface_swell_wave_to_direction (degree)": "",
"sea_surface_swell_wave_period (s)": "",
"calculation_method": "UNKNOWN",
"mean_wave_direction (degree)": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;",
"longitude (degree)": "-123.974",
"principal_wave_direction (degree)": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;",
"sensor_id": "urn:ioos:sensor:wmo:46014::wpm1",
}
]
class BuoyTestCase(unittest.TestCase):
def setUp(self):
self.b = buoyant.Buoy("41012")
def test_observation(self):
"""Test the Observation class."""
self.assertTrue(issubclass(buoyant.Observation, float))
subint = float.__new__(buoyant.Observation, 11)
assert subint == 11
assert isinstance(subint, buoyant.Observation)
obs = buoyant.Observation(1, "m")
assert isinstance(obs, buoyant.Observation)
assert obs.unit == "m"
self.assertEqual(str(obs), "1.0 m")
assert repr(obs) == "Observation(1.0, 'm')"
assert obs + 2 == 3
def test_buoy_instant(self):
assert self.b
assert isinstance(self.b, buoyant.Buoy)
def test_data_exists(self):
x = self.b.sea_water_electrical_conductivity
assert x.unit == "mS/cm"
currents = self.b.currents
self.assertIsInstance(currents, list)
assert isinstance(x.datetime, datetime.datetime)
assert isinstance(self.b.image, BytesIO)
assert isinstance(self.b.__dict__["lat"], float)
assert isinstance(self.b.coords, tuple)
assert (self.b.__dict__["lat"], self.b.__dict__["lon"]) == self.b.coords
def test_keys(self):
"""Test that observation attributes exist in Buoy class."""
self.assertIsNotNone(self.b.sea_water_salinity)
self.assertIsNotNone(self.b.air_pressure_at_sea_level)
self.assertIsNotNone(self.b.air_temperature)
self.assertIsNotNone(self.b.currents)
self.assertIsNotNone(self.b.sea_water_electrical_conductivity)
self.assertIsNotNone(self.b.sea_water_salinity)
self.assertIsNotNone(self.b.sea_water_temperature)
def test_parse_unit(self):
dictionary = {"magic (pixie dust)": "42"}
x = buoy.parse_unit("magic", dictionary)
assert isinstance(x, buoyant.Observation)
nope = buoy.parse_unit("widget", dictionary)
self.assertIsNone(nope)
spectral_energy = buoy.parse_unit("spectral_energy", sampledata[0])
self.assertEqual(spectral_energy[4], buoy.Observation(0.117495, "m**2/Hz"))
def test_error(self):
with self.assertRaises(AttributeError):
self.b._get("foo bar")
self.assertIsNone(self.b.depth)
def test_image(self):
station = buoyant.Buoy(51001)
assert buoy.CAM_ENDPOINT in station.image_url
self.assertIsNotNone(station.image)
def test_degroup(self):
waves = buoyant.buoy._degroup(sampledata, buoyant.properties.waves)
self.assertEqual(
waves[0]["sea_surface_wind_wave_significant_height"],
buoy.Observation(0.17, "m"),
)
if __name__ == "__main__":
unittest.main()
|
Javex/mixminion
|
lib/mixminion/server/ServerKeys.py
|
Python
|
mit
| 49,832 | 0.003793 |
# Copyright 2002-2011 Nick Mathewson. See LICENSE for licensing information.
"""mixminion.ServerKeys
Classes for servers to generate and store keys and server descriptors.
"""
#FFFF We need support for encrypting private keys.
__all__ = [ "ServerKeyring", "generateServerDescriptorAndKeys",
"generateCertChain" ]
import os
import errno
import socket
import re
import sys
import time
import threading
import urllib
import urllib2
import mixminion._minionlib
import mixminion.Crypto
import mixminion.NetUtils
import mixminion.Packet
import mixminion.server.HashLog
import mixminion.server.MMTPServer
import mixminion.server.ServerMain
from mixminion.ServerInfo import ServerInfo, PACKET_KEY_BYTES, MMTP_KEY_BYTES,\
signServerInfo
from mixminion.Common import AtomicFile, LOG, MixError, MixFatalError, \
ceilDiv, createPrivateDir, checkPrivateFile, englishSequence, \
formatBase64, formatDate, formatTime, previousMidnight, readFile, \
replaceFile, secureDelete, tryUnlink, UIError, writeFile
from mixminion.Config import ConfigError
#----------------------------------------------------------------------
# Seconds before a key becomes live that we want to generate
# and publish it.
#
#FFFF Make this configurable? (Set to 2 days, 13 hours)
PUBLICATION_LATENCY = (2*24+13)*60*60
# Number of seconds worth of keys we want to generate in advance.
#
#FFFF Make this configurable? (Set to 2 weeks).
PREPUBLICATION_INTERVAL = 14*24*60*60
# We have our X509 certificate set to expire a bit after public key does,
# so that slightly-skewed clients don't incorrectly give up while trying to
# connect to us. (And so that we don't mess up the world while being
# slightly skewed.)
CERTIFICATE_EXPIRY_SLOPPINESS = 2*60*60
# DOCDOC
CERTIFICATE_LIFETIME = 24*60*60
#----------------------------------------------------------------------
class ServerKeyring:
"""A ServerKeyring remembers current and future keys, descriptors, and
hash logs for a mixminion server. It keeps track of key rotation
schedules, and generates new keys as needed.
"""
## Fields:
# homeDir: server home directory
# keyDir: server key directory
# keyOverlap: How long after a new key begins do we accept the old one?
# keySets: sorted list of (start, end, keyset)
# nextUpdate: time_t when a new key should be added, or a current key
# should be removed, or "None" for uncalculated.
# keyRange: tuple of (firstKey, lastKey) to represent which key names
# have keys on disk.
# currentKeys: None, if we haven't checked for currently live keys, or
# a list of currently live ServerKeyset objects.
# dhFile: pathname to file holding diffie-helman parameters.
# _lock: A lock to prevent concurrent key generation or rotation.
def __init__(self, config):
"Create a ServerKeyring from a config object"
self._lock = threading.RLock()
self.configure(config)
def configure(self, config):
"Set up a ServerKeyring from a config object"
self.config = config
self.homeDir = config.getBaseDir()
self.keyDir = config.getKeyDir()
self.hashDir = os.path.join(config.getWorkDir(), 'hashlogs')
self.dhFile = os.path.join(config.getWorkDir(), 'tls', 'dhparam')
self.certFile = os.path.join(config.getWorkDir(), "cert_chain")
self.keyOverlap = config['Server']['PublicKeyOverlap'].getSeconds()
self.nickname = config['Server']['Nickname'] #DOCDOC
self.nextUpdate = None
self.currentKeys = None
self._tlsContext = None #DOCDOC
self._tlsContextExpires = -1 #DOCDOC
self.pingerSeed = None
self.checkKeys()
def checkKeys(self):
"""Internal method: read information about all this server's
currently-prepared keys from disk.
May raise ConfigError if any of the server descriptors on disk
are invalid.
"""
self.keySets = []
badKeySets = []
firstKey = sys.maxint
lastKey = 0
LOG.debug("Scanning server keystore at %s", self.keyDir)
if not os.path.exists(self.keyDir):
LOG.info("Creating server keystore at %s", self.keyDir)
createPrivateDir(self.keyDir)
# Iterate over the entires in HOME/keys
for dirname in os.listdir(self.keyDir):
# Skip any that aren't directories named "key_INT"
if not os.path.isdir(os.path.join(self.keyDir,dirname)):
continue
if not dirname.startswith('key_'):
LOG.warn("Unexpected directory %s under %s",
dirname, self.keyDir)
continue
keysetname = dirname[4:]
try:
setNum = int(keysetname)
# keep trace of the first and last used key number
if setNum < firstKey: firstKey = setNum
if setNum > lastKey: lastKey = setNum
except ValueError:
LOG.warn("Unexpected directory %s under %s",
dirname, self.keyDir)
continue
# Find the server descriptor...
keyset = ServerKeyset(self.keyDir, keysetname, self.hashDir)
ok = 1
try:
keyset.checkKeys()
except MixError, e:
LOG.warn("Error checking private keys in keyset %s: %s",
keysetname, str(e))
ok = 0
try:
if ok:
keyset.getServerDescriptor()
except (ConfigError, IOError), e:
LOG.warn("Key set %s has invalid/missing descriptor: %s",
keysetname, str(e))
ok = 0
if ok:
t1, t2 = keyset.getLiveness()
self.keySets.append( (t1, t2, keyset) )
LOG.trace("Found key %s (valid from %s to %s)",
dirname, formatDate(t1), formatDate(t2))
else:
badKeySets.append(keyset)
LOG.debug("Found %s keysets: %s were incomplete or invalid.",
len(self.keySets), len(badKeySets))
if badKeySets:
LOG.warn("Removing %s invalid keysets", len(badKeySets))
for b in badKeySets:
b.delete()
# Now, sort the key intervals by starting time.
self.keySets.sort()
self.keyRange = (firstKey, lastKey)
# Now we try to see whether we have more or less than 1 key in effect
# for a given time.
for idx in xrange(len(self.keySets)-1):
end = self.keySets[idx][1]
start = self.keySets[idx+1][0]
if start < end:
LOG.warn("Multiple keys for %s. That's unsupported.",
formatDate(end))
elif start > end:
LOG.warn("Gap in key schedule: no key from %s to %s",
formatDate(end), formatDate(start))
def checkDescriptorConsistency(self, regen=1):
"""Check whether the server descriptors in this keyring are
consistent with the server's configuration. If 'regen' is
true, inconsistent descriptors are regenerated."""
identity = None
state = []
for _,_,ks in self.keySets:
ok = ks.checkConsistency(self.config, 0)
if ok == 'good':
continue
state.append((ok, ks))
if not state:
return
LOG.warn("Some generated keysets do not match "
"current configuration...")
for ok, ks in state:
va,vu = ks.getLiveness()
LOG.warn("Keyset %s (%s--%s):",ks.keyname,formatTime(va,1),
formatTime(vu,1))
ks.checkConsistency(self.config, 1)
if regen and ok == 'bad':
if not identity: identity = self.getIdentityKey()
ks.regenerateServerDescriptor(self.config, identity)
def getIden
|
tityKey(self):
"""Retu
|
rn this server's
|
JCraft40/finalproject
|
polls/models.py
|
Python
|
gpl-2.0
| 899 | 0.006674 |
import datetime
from django.db import models
from django.utils import
|
timezone
class Question(models.Mo
|
del):
question_text = models.CharField(max_length=200)
pub_date = models.DateTimeField('date published')
def __unicode__(self): # __unicode__ on Python 2
return self.question_text
def was_published_recently(self):
now = timezone.now()
return now - datetime.timedelta(days=1) <= self.pub_date <= now
was_published_recently.admin_order_field = 'pub_date'
was_published_recently.boolean = True
was_published_recently.short_description = 'Published recently?'
class Choice(models.Model):
question = models.ForeignKey(Question)
choice_text = models.CharField(max_length=200)
votes = models.IntegerField(default=0)
def __unicode__(self): # __unicode__ on Python 2
return self.choice_text
|
krmahadevan/selenium
|
py/selenium/webdriver/safari/webdriver.py
|
Python
|
apache-2.0
| 4,520 | 0.001991 |
# Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from selenium.common.exceptions import WebDriverException
try:
import http.client as http_client
except ImportError:
import httplib as http_client
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from selenium.webdriver.remote.webdriver import WebDriver as RemoteWebDriver
from .service import Service
from .remote_connection import SafariRemoteConnection
class WebDriver(RemoteWebDriver):
"""
Controls the SafariDriver and allows you to drive the browser.
"""
def __init__(self, port=0, executable_path="/usr/bin/safaridriver", reuse_service=False,
desired_capabilities=DesiredCapabilities.SAFARI, quiet=False,
keep_alive=True):
"""
Creates a new Safari driver instance and launches or finds a running safaridriver service.
:Args:
- port - The port on which the safaridriver service should listen for new connections. If zero, a free port will be found.
- executable_path - Path to a custom safaridriver executable to be used. If absent, /usr/bin/safaridriver is used.
- reuse_service - If True, do not spawn a safaridriver instance; instead, connect to an already-running service that was launched externally.
- desired_capabilities: Dictionary object with desired capabilities (Can be used to provide various Safari switches).
- quiet - If True, the driver's stdout and stderr is suppressed.
- keep_alive - Whether to configure SafariRemoteConnection to use
HTTP keep-alive. Defaults to False.
"""
self._reuse_service = reuse_service
self.service = Service(executable_path, port=port, quiet=quiet)
if not reuse_service:
self.service.start()
executor = SafariRemoteConnection(remote_server_addr=self.service.service_url,
keep_alive=keep_alive)
RemoteWebDriver.__init__(
self,
command_executor=executor,
desired_capabilities=desired_capabilities)
self._is_remote = False
def quit(self):
"""
Closes the browser and shuts down the SafariDriver executable
that is started when starting the SafariDriver
"""
try:
RemoteWebDriver.quit(self)
except http_client.BadStatusLine:
pass
finally:
if not self._reuse_service:
self.service.stop()
# safaridriver extension commands. The canonical command support matrix is here:
# https://developer.apple.com/library/content/documentation/NetworkingInternetWeb/Conceptual
|
/WebDriverEndpointDoc/Commands/Commands.html
# First available in Safari 11.1 and Safari Te
|
chnology Preview 41.
def set_permission(self, permission, value):
if not isinstance(value, bool):
raise WebDriverException("Value of a session permission must be set to True or False.")
payload = {}
payload[permission] = value
self.execute("SET_PERMISSIONS", {"permissions": payload})
# First available in Safari 11.1 and Safari Technology Preview 41.
def get_permission(self, permission):
payload = self.execute("GET_PERMISSIONS")["value"]
permissions = payload["permissions"]
if not permissions:
return None
if permission not in permissions:
return None
value = permissions[permission]
if not isinstance(value, bool):
return None
return value
# First available in Safari 11.1 and Safari Technology Preview 42.
def debug(self):
self.execute("ATTACH_DEBUGGER")
self.execute_script("debugger;")
|
oldpa/luigi
|
luigi/scheduler.py
|
Python
|
apache-2.0
| 45,889 | 0.002245 |
# -*- coding: utf-8 -*-
#
# Copyright 2012-2015 Spotify AB
#
# Licensed under the Apache License, Version
|
2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://
|
www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
The system for scheduling tasks and executing them in order.
Deals with dependencies, priorities, resources, etc.
The :py:class:`~luigi.worker.Worker` pulls tasks from the scheduler (usually over the REST interface) and executes them.
See :doc:`/central_scheduler` for more info.
"""
import collections
try:
import cPickle as pickle
except ImportError:
import pickle
import datetime
import functools
import itertools
import logging
import os
import re
import time
from luigi import six
from luigi import configuration
from luigi import notifications
from luigi import parameter
from luigi import task_history as history
from luigi.task_status import DISABLED, DONE, FAILED, PENDING, RUNNING, SUSPENDED, UNKNOWN
from luigi.task import Config
logger = logging.getLogger("luigi.server")
class Scheduler(object):
"""
Abstract base class.
Note that the methods all take string arguments, not Task objects...
"""""
add_task = NotImplemented
get_work = NotImplemented
ping = NotImplemented
UPSTREAM_RUNNING = 'UPSTREAM_RUNNING'
UPSTREAM_MISSING_INPUT = 'UPSTREAM_MISSING_INPUT'
UPSTREAM_FAILED = 'UPSTREAM_FAILED'
UPSTREAM_DISABLED = 'UPSTREAM_DISABLED'
UPSTREAM_SEVERITY_ORDER = (
'',
UPSTREAM_RUNNING,
UPSTREAM_MISSING_INPUT,
UPSTREAM_FAILED,
UPSTREAM_DISABLED,
)
UPSTREAM_SEVERITY_KEY = UPSTREAM_SEVERITY_ORDER.index
STATUS_TO_UPSTREAM_MAP = {
FAILED: UPSTREAM_FAILED,
RUNNING: UPSTREAM_RUNNING,
PENDING: UPSTREAM_MISSING_INPUT,
DISABLED: UPSTREAM_DISABLED,
}
TASK_FAMILY_RE = re.compile(r'([^(_]+)[(_]')
class scheduler(Config):
# TODO(erikbern): the config_path is needed for backwards compatilibity. We should drop the compatibility
# at some point (in particular this would force users to replace all dashes with underscores in the config)
retry_delay = parameter.FloatParameter(default=900.0)
remove_delay = parameter.FloatParameter(default=600.0)
worker_disconnect_delay = parameter.FloatParameter(default=60.0)
state_path = parameter.Parameter(default='/var/lib/luigi-server/state.pickle')
# Jobs are disabled if we see more than disable_failures failures in disable_window seconds.
# These disables last for disable_persist seconds.
disable_window = parameter.IntParameter(default=3600,
config_path=dict(section='scheduler', name='disable-window-seconds'))
disable_failures = parameter.IntParameter(default=None,
config_path=dict(section='scheduler', name='disable-num-failures'))
disable_hard_timeout = parameter.IntParameter(default=None,
config_path=dict(section='scheduler', name='disable-hard-timeout'))
disable_persist = parameter.IntParameter(default=86400,
config_path=dict(section='scheduler', name='disable-persist-seconds'))
max_shown_tasks = parameter.IntParameter(default=100000)
max_graph_nodes = parameter.IntParameter(default=100000)
prune_done_tasks = parameter.BoolParameter(default=False)
record_task_history = parameter.BoolParameter(default=False)
prune_on_get_work = parameter.BoolParameter(default=False)
def fix_time(x):
# Backwards compatibility for a fix in Dec 2014. Prior to the fix, pickled state might store datetime objects
# Let's remove this function soon
if isinstance(x, datetime.datetime):
return time.mktime(x.timetuple())
else:
return x
class Failures(object):
"""
This class tracks the number of failures in a given time window.
Failures added are marked with the current timestamp, and this class counts
the number of failures in a sliding time window ending at the present.
"""
def __init__(self, window):
"""
Initialize with the given window.
:param window: how long to track failures for, as a float (number of seconds).
"""
self.window = window
self.failures = collections.deque()
self.first_failure_time = None
def add_failure(self):
"""
Add a failure event with the current timestamp.
"""
failure_time = time.time()
if not self.first_failure_time:
self.first_failure_time = failure_time
self.failures.append(failure_time)
def num_failures(self):
"""
Return the number of failures in the window.
"""
min_time = time.time() - self.window
while self.failures and fix_time(self.failures[0]) < min_time:
self.failures.popleft()
return len(self.failures)
def clear(self):
"""
Clear the failure queue.
"""
self.failures.clear()
def _get_default(x, default):
if x is not None:
return x
else:
return default
class Task(object):
def __init__(self, task_id, status, deps, resources=None, priority=0, family='', module=None,
params=None, disable_failures=None, disable_window=None, disable_hard_timeout=None,
tracking_url=None):
self.id = task_id
self.stakeholders = set() # workers ids that are somehow related to this task (i.e. don't prune while any of these workers are still active)
self.workers = set() # workers ids that can perform task - task is 'BROKEN' if none of these workers are active
if deps is None:
self.deps = set()
else:
self.deps = set(deps)
self.status = status # PENDING, RUNNING, FAILED or DONE
self.time = time.time() # Timestamp when task was first added
self.updated = self.time
self.retry = None
self.remove = None
self.worker_running = None # the worker id that is currently running the task or None
self.time_running = None # Timestamp when picked up by worker
self.expl = None
self.priority = priority
self.resources = _get_default(resources, {})
self.family = family
self.module = module
self.params = _get_default(params, {})
self.disable_failures = disable_failures
self.disable_hard_timeout = disable_hard_timeout
self.failures = Failures(disable_window)
self.tracking_url = tracking_url
self.scheduler_disable_time = None
self.runnable = False
def __repr__(self):
return "Task(%r)" % vars(self)
def add_failure(self):
self.failures.add_failure()
def has_excessive_failures(self):
if (self.failures.first_failure_time is not None and
self.disable_hard_timeout):
if (time.time() >= self.failures.first_failure_time +
self.disable_hard_timeout):
return True
if self.failures.num_failures() >= self.disable_failures:
return True
return False
def can_disable(self):
return (self.disable_failures is not None or
self.disable_hard_timeout is not None)
@property
def pretty_id(self):
param_str = ', '.join('{}={}'.format(key, value) for key, value in self.params.items())
return '{}({})'.format(self.family, param_str)
class Worker(object):
"""
Structure for tracking worker activity and keeping their references.
"""
def __init__(self, worker_id, last_active=None):
self.id = worker_id
self.reference = None # reference to the worker in the real world. (Currently
|
mechaphish/colorguard
|
tests/test_cromu70_caching.py
|
Python
|
bsd-2-clause
| 1,245 | 0.006426 |
import nose
from nose.plugins.attrib import attr
import logging
import colorguard
import os
bin_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries'))
@attr(speed='slow')
def test_cromu_00070_caching():
# Test exploitation of CROMU_00070 given an input which causes a leak. Then test that we can do it again restoring
# from the cache.
for _ in rang
|
e(2):
payload = bytes.fromhex("06000006020a00000000000000000000000c030c00000100e1f505000000000000eb")
cg = colorguard.ColorGuard(os.path.join(bin
|
_location, "tests/cgc/CROMU_00070"), payload)
pov = cg.attempt_exploit()
nose.tools.assert_not_equal(pov, None)
nose.tools.assert_true(pov.test_binary())
def run_all():
functions = globals()
all_functions = dict(filter((lambda kv: kv[0].startswith('test_')), functions.items()))
for f in sorted(all_functions.keys()):
if hasattr(all_functions[f], '__call__'):
all_functions[f]()
if __name__ == "__main__":
logging.getLogger("colorguard").setLevel("DEBUG")
logging.getLogger("povsim").setLevel("DEBUG")
import sys
if len(sys.argv) > 1:
globals()['test_' + sys.argv[1]]()
else:
run_all()
|
aleixo/cnn_fire
|
googlemanager.py
|
Python
|
gpl-3.0
| 4,878 | 0.009842 |
# coding=utf-8
#https://developers.google.com/drive/v3/web/quickstart/python
from __future__ import print_function
import httplib2
import os
import io
from apiclient import discovery
import oauth2client
from
|
oauth2client import client
from oauth2client import tools
from apiclient.http import MediaIoBaseDownload
from apiclient.http import MediaFileUpload
import sys
import argparse
from pyfcm import FCMNotification
import h5py
"""
DESCRIPTION
Script with class that manages operations with Google.
Send file, uploads file and list files
"""
class GoogleManager:
def __init__(self):
self.SCOPES = 'https://www.g
|
oogleapis.com/auth/drive'
self.CLIENT_SECRET_FILE = 'GoogleDrive_Client_secret.json'
self.APPLICATION_NAME = 'pythonscript'
print("[GOOGLE MANAGER] Google Manager started")
def init_for_upload(self,upload_file=None,upload_file_name=None):
if upload_file and upload_file_name:
self.upload_manager(upload_file,upload_file_name)
print("[GOOGLE MANAGER] Will upload file")
else:
raise ValueError("[ERROR] Object initializer has to have file name to upload and name of uploaded file in upload mode. Initialize object with mode, upload filename and upload destination name")
def init_for_download(self,download_file=None):
if download_file:
self.download_manager(download_file)
print("[GOOGLE MANAGER] Will download file")
else:
raise ValueError("[ERROR] Object initializer has to have file name to download in download mode. Initialize object with mode and file name to download")
def init_for_list(self,):
self.download_manager(True)
def download_file(self,file_id, mimeType, filename,drive_service):
if "google-apps" in mimeType:
return
request = drive_service.files().get_media(fileId=file_id)
fh = io.FileIO(filename, 'wb')
downloader = MediaIoBaseDownload(fh, request)
done = False
while done is False:
status, done = downloader.next_chunk()
print("[PROGRESS] Download %d%%." % int(status.progress() * 100))
def get_credentials(self):
SCOPES = 'https://www.googleapis.com/auth/drive'
CLIENT_SECRET_FILE = 'GoogleDrive_Client_secret.json'
APPLICATION_NAME = 'pythonscript'
home_dir = os.path.expanduser('~')
credential_dir = os.path.join(home_dir, '.credentials')
if not os.path.exists(credential_dir):
os.makedirs(credential_dir)
credential_path = os.path.join(credential_dir,'drive-python-quickstart.json')
store = oauth2client.file.Storage(credential_path)
credentials = store.get()
if not credentials or credentials.invalid:
flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)
flow.user_agent = APPLICATION_NAME
credentials = tools.run_flow(flow, store)
return credentials
def upload_manager(self,fileToUpload,nameToUpload):
credentials = self.get_credentials()
http = credentials.authorize(httplib2.Http())
drive_service = discovery.build('drive', 'v3', http=http)
file_metadata = {'name' : nameToUpload}
media = MediaFileUpload(fileToUpload,resumable=True)
file = drive_service.files().create(body=file_metadata,media_body=media,fields='id').execute()
print(file)
print("[GOOGLE MANAGER] File with name {} uploaded to Google Drive".format(nameToUpload))
def download_manager(self,fileToDownload=None,list = False):
credentials = self.get_credentials()
http = credentials.authorize(httplib2.Http())
service = discovery.build('drive', 'v3', http=http)
results = service.files().list(pageSize=10,fields="nextPageToken, files(id, name)").execute()
items = results.get('files', [])
if not items:
print("[GOOGLE MANAGER] No files found.")
else:
for item in items:
name = str(item["name"].encode('ascii', 'ignore'))
print("[GOOGLE MANAGER] Found file -> {}".format(name))
if name == fileToDownload and not list:
credentials = self.get_credentials()
http = credentials.authorize(httplib2.Http())
drive_service = discovery.build('drive', 'v3', http=http)
self.download_file(item['id'],"text/plain",item['name'],drive_service)
#drive = GoogleManager()
#drive.init_for_download("weights.h5")
#drive.init_for_download("model.json")
#drive.init_for_upload("more_images.h5","weights.h5")
#drive.init_for_upload("model_more_images.json","model.json")
#drive.init_for_list()
|
V3sth4cks153/Python-Programs
|
equation_solver.py
|
Python
|
mit
| 1,645 | 0.024924 |
# -*- coding: utf-8 -*-
#Import libraries
from sys import exit
from math import sqrt
#Print title (http://patorjk.com/software/taag/#p=display&f=Small%20Slant&t=Equation%20Solver%20V2.1)
print " ____ __ _ ____ __ _ _____ ___"
print " / __/__ ___ _____ _/ /_(_)__ ___ / __/__ / / _____ ____ | | / /_ | < /"
print " / _// _ `/ // / _ `/ __/ / _ \/ _ \ _\ \/ _ \/ / |/ / -_) __/ | |/ / __/_ / / "
print "/___/\_, /\_,_/\_,_/\__/_/\___/_//_/ /___/\___/_/|___/\__/_/ |___/____(_)_/ "
print " /_/ "
#Welcome phrase
print "\nWelcome in the 'Equation Solver' 2.1 by Rafael Riber .\nPlease give the values
|
for 'a', 'b' and 'c' as follows: f(x) = Ax^2+Bx+C.\n"
#Define check function
def check(x):
if x != 0:
pass
else:
|
exit("Invalid value. Please enter only numbers other than zero.")
#Input and check
a = float(input("Value of 'A': "))
check(a)
b = float(input("Value of 'B': "))
check(b)
c = float(input("Value of 'C': "))
check(c)
#Formulas
dis = (b * b) - 4 * (a * c)
x1 = (-b - sqrt(dis) ) / (2 * a)
x2 = (-b + sqrt(dis) ) / (2 * a)
x3 = (-b) / (2 * a)
sx = (-b) / (2 * a)
sy = (- dis) / (4 * a)
#Calculus conditions
if dis >= 0:
print "\nThe discriminant is equal to: %s.\n" % (dis)
else:
exit("The equation has no real roots: The discriminant is negative.")
if dis == 0:
print "Sole root of the equation: (%s). Summit: (%s; %s)\n" % (x3, sx, sy)
else:
print "Roots: (%s; %s)\nSummit: (%s; %s) \n\nThank you for using the Equation Solver by Rafael Riber !" % (x1, x2, sx, sy)
|
Banno/getsentry-javascript-lite
|
sentry_javascript_lite/plugin.py
|
Python
|
apache-2.0
| 3,880 | 0.004897 |
"""
sentry_javascript_lite.plugin
~~~~~~~~~~~~~~~~~~~~~
"""
import re
from django.conf import settings
from sentry.lang.javascript.plugin import JavascriptPlugin
from sentry.lang.javascript.processor import SourceProcessor
from sentry.interfaces.stacktrace import (Frame, Stacktrace)
from sentry_javascript_lite import VERSION
def javascript_lite_preprocess_event(data):
if data.get('platform') != 'javascript':
return
processor = JavascriptLiteSourceProcessor()
return processor.process(data)
class JavascriptPlugin(JavascriptPlugin):
author = 'Chad Killingsworth, Jack Henry and Associates'
author_url = 'https://github.com/Banno/getsentry-javascript-lite'
version = VERSION
description = "Preprocess Raw Javascript Stacktraces"
resource_links = [
('Bug Tracker', 'https://github.com/Banno/getsentry-javascript-lite/issues'),
('Source', 'https://github.com/Banno/getsentry-javascript-lite'),
]
slug = 'javascript-lite'
title = 'Javascript-lite Event Preprocessor'
conf_title = title
conf_key = 'javascript-lite'
def get_event_preprocessors(self, **kwargs):
if not settings.SENTRY_SCRAPE_JAVASCRIPT_CONTEXT:
return []
return [javascript_lite_preprocess_event]
class JavascriptLiteSourceProcessor(SourceProcessor):
chrome_ie_stacktrace_expr = re.compile(r'^\s*at (.*?) ?\(?((?:file|https?|chrome-extension):.*?):(\d+)(?::(\d+))?\)?\s*$',
re.IGNORECASE)
firefox_safari_stacktrace_expr = re.compile(r'^\s*(.*?)(?:\((.*?)\))?@((?:file|https?|chrome).*?):(\d+)(?::(\d+))?\s*$',
re.IGNORECASE)
whitespace_expr = re.compile(r'^\s+')
location_parts_expr = re.compile(r'[\(\)\s]')
def get_stacktraces(self, data):
stacktraces = super(JavascriptLiteSourceProcessor, self).get_stacktraces(data);
if (not stacktraces and 'extra' in data and
isinstance(data['extra'], dict) and 'rawstack' in data['extra']):
stacktraces = self.format_raw_stacktrace(data['extra']['rawstack'])
if stacktraces:
data['extra'].pop('rawstack', None)
return stacktraces
def format_raw_stacktrace(self, value):
kwargs = {
'frames': [],
'frames_omitted': []
}
for frame in value.split('\n'):
if JavascriptLiteSourceProcessor.chrome_ie_stacktrace_expr.search(frame):
kwargs['frames'].append(self.format_chrome_ie_frame(frame))
elif JavascriptLiteSourceProcessor.firefox_safari_stacktrace_expr.search(frame):
kwargs['frames'].append(self.format_firefox_safari_frame(frame))
if len(kwargs['frames']) > 0:
return [Stacktrace(**kwargs)]
return []
def format_chrome_ie_frame(self, frame):
tokens = JavascriptLiteSourceProcessor.chrome_ie_stacktrace_expr.findall(frame)[0]
frame = {
'filename': tokens[1],
'function': tokens[0] or '?',
'in_app': True,
}
try:
frame['lineno'] = int(float(tokens[2]))
except:
pass
try:
frame['colno'] = int(float(tokens[3]))
except:
pass
return Frame.to_python(frame)
def format_firefox_safari_frame(self, frame):
tokens = Ja
|
vascriptLiteSourceProcessor.firefox_safari_stacktrace_expr.findall(frame)[0]
frame = {
|
'filename': tokens[2],
'function': tokens[0] or '?',
'in_app': True,
}
if tokens[1]:
frame['args'] = tokens[1].split(',')
try:
frame['lineno'] = int(float(tokens[3]))
except:
pass
try:
frame['colno'] = int(float(tokens[4]))
except:
pass
return Frame.to_python(frame)
|
creasyw/IMTAphy
|
framework/scenarios/PyConfig/scenarios/placer/positionList.py
|
Python
|
gpl-2.0
| 2,880 | 0.010069 |
###############################################################################
# This file is part of openWNS (open Wireless Network Simulator)
# _____________________________________________________________________________
#
# Copyright (C) 2004-2007
# Chair of Communication Networks (ComNets)
# Kopernikusstr. 16, D-52074 Aachen, Germany
# phone: ++49-241-80-27910,
# fax: ++49-241-80-22242
# email: info@openwns.org
# www: http://www.openwns.org
# _____________________________________________________________________________
#
# openWNS is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License version 2 as published by the
# Free Software Foundation;
#
# openWNS is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
import scenarios.interfaces
import openwns.geometry.position
import math
class PositionListPlacer(scenarios.interfaces.INodePlacer):
"""
Place a number of nodes on the given positions.
"""
def __init__(self, numberOfNodes = 1, positionsList = [openwns.geometry.position.Position(1,1)], rotate = 0.0):
"""
@type numberOfNodes: int
@param numberOfNodes: The number of nodes on the circle
@Type: position: float
@param position: distance from
|
BS in Meters for every single node
@type rotate: float
@par
|
am rotate: Rotate the final result by rotate in radiant [0..2pi]
"""
self.center = openwns.geometry.position.Position(x = 0.0, y = 0.0, z = 0.0)
self.numberOfNodes = numberOfNodes
self.positionsList = positionsList
self.rotate = rotate
def setCenter(self, center):
self.center = center
def getPositions(self):
positions = []
for i in xrange(self.numberOfNodes):
x = self.positionsList[i].x
y = self.positionsList[i].y
v = openwns.geometry.position.Vector(x = x, y = y, z = 0.0)
p = v.turn2D(self.rotate).toPosition()
positions.append(p)
return [p + self.center for p in positions]
def isInside(self, position):
for i in xrange(self.numberOfNodes):
x = self.positionsList[i].x
y = self.positionsList[i].y
v = openwns.geometry.position.Vector(x = x, y = y, z = 0.0)
p = v.turn2D(self.rotate).toPosition()
if p.x + self.center.x == position.x:
return True
return False
|
ossobv/asterisklint
|
asterisklint/__init__.py
|
Python
|
gpl-3.0
| 1,096 | 0 |
# AsteriskLint -- an Asterisk PBX config syntax checker
# Copyright (C) 2015-2016 Walter Doekes, OSSO B.V.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Gene
|
ral Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
from .config import ConfigAggregator
from .dialplan import DialplanAggregator
from .file import FileReader
from .func_odbc import FuncOdbcAggregator
class FileConfigParser(ConfigAggregator, FileReader):
pass
class FileDialplanParser(DialplanAggregator, FileReader):
pass
class FileFuncOdbcParser(FuncOdbcAggregator, FileReader):
pass
|
won0089/oppia
|
extensions/rules/real_test.py
|
Python
|
apache-2.0
| 2,944 | 0 |
# coding: utf-8
#
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, softwar
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for classification of real numbers."""
__author__ = 'Sean Lip'
from core.tests import test_utils
from extensions.rules import real
class RealRuleUnitTests(test_utils.GenericTestBase):
"""Tests for rules operating on Real objects."""
def test_equals_rule(self):
self.assertTrue(real.Equals(3).eval(3))
self.assertTrue(real.Equals(3.0).eval(3))
self.assertFalse(real.Equals(4).eval(3))
def test_is_less_than_rule(self):
self.assertTrue(real.IsLessThan(4).eval(3))
self.assertTrue(real.IsLessThan(4).eval(3.0))
self.assertTrue(real.IsLessThan(4.0).eval(3.0))
self.assertFalse(real.IsLessThan(3).eval(3))
self.assertFalse(real.IsLessThan(3.0).eval(3.0))
self.assertFalse(real.IsLessThan(3.0).eval(4.0))
self.assertFalse(real.IsLessThan(3).eval(4))
def test_is_greater_than_rule(self):
self.assertTrue(real.IsGreaterThan(3).eval(4))
|
self.assertTrue(real.IsGreaterThan(3.0).eval(4))
self.assertTrue(real.IsGreaterThan(3.0).eval(4.0))
self.assertFalse(real.IsGreaterThan(3).eval(3))
self.assertFalse(real.IsGreaterThan(3.0).eval(3.0))
self.assertFalse(real.IsGreaterThan(4.0).eval(3.0))
self.assertFalse(real.IsGreaterThan(4).eval(3))
def test_is_less_than_or_equal_to_rule(self):
rule = real.IsLessThanOrEqualTo(3)
se
|
lf.assertTrue(rule.eval(2))
self.assertTrue(rule.eval(3))
self.assertFalse(rule.eval(4))
def test_is_greater_than_or_equal_to_rule(self):
rule = real.IsGreaterThanOrEqualTo(3)
self.assertTrue(rule.eval(4))
self.assertTrue(rule.eval(3))
self.assertFalse(rule.eval(2))
def test_is_inclusively_between_rule(self):
with self.assertRaises(AssertionError):
real.IsInclusivelyBetween(2, 1)
rule = real.IsInclusivelyBetween(1, 3)
self.assertTrue(rule.eval(2))
self.assertTrue(rule.eval(1))
self.assertTrue(rule.eval(3))
self.assertTrue(rule.eval(1.0))
self.assertFalse(rule.eval(3.001))
def test_is_within_tolerance_rule(self):
rule = real.IsWithinTolerance(0.5, 0)
self.assertTrue(rule.eval(0))
self.assertTrue(rule.eval(0.5))
self.assertFalse(rule.eval(0.51))
|
Geode/geonode
|
geonode/upload/migrations/0001_initial.py
|
Python
|
gpl-3.0
| 2,961 | 0.00304 |
# -*- coding: utf-8 -*-
#########################################################################
#
# Copyright (C) 2016 OSGeo
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
from __future__ import unicode_literals
from django.db import migrations, models
import datetime
from djan
|
go.conf import settings
class Migration(migrations.Migration):
dependencies = [
('layers', '0002_initial_step2'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Upload',
fields=[
('id', models.AutoField(verbo
|
se_name='ID', serialize=False, auto_created=True, primary_key=True)),
('import_id', models.BigIntegerField(null=True)),
('state', models.CharField(max_length=16)),
('date', models.DateTimeField(default=datetime.datetime.now, verbose_name=b'date')),
('upload_dir', models.CharField(max_length=100, null=True)),
('name', models.CharField(max_length=64, null=True)),
('complete', models.BooleanField(default=False)),
('session', models.TextField(null=True)),
('metadata', models.TextField(null=True)),
('mosaic_time_regex', models.CharField(max_length=128, null=True)),
('mosaic_time_value', models.CharField(max_length=128, null=True)),
('mosaic_elev_regex', models.CharField(max_length=128, null=True)),
('mosaic_elev_value', models.CharField(max_length=128, null=True)),
('layer', models.ForeignKey(to='layers.Layer', null=True)),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL, null=True)),
],
options={
'ordering': ['-date'],
},
),
migrations.CreateModel(
name='UploadFile',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('file', models.FileField(upload_to=b'uploads')),
('slug', models.SlugField(blank=True)),
('upload', models.ForeignKey(blank=True, to='upload.Upload', null=True)),
],
),
]
|
jessicayuen/cmput410-lab2
|
sample5.py
|
Python
|
gpl-3.0
| 1,165 | 0.017167 |
# Sample 5
import socket
import sys
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
except socket.error as msg:
print('Failed to create socket!')
print('Error code: ' + str(msg[0]) + ', error message: ' + msg[1])
sys.exit()
print('Socked created successfully.')
# Part 1
host = ''
port = 8888
try:
s.bind((host, port))
except socket.error:
msg = str(socket.error)
print('Bind failed! Error code: ' + str(msg[0]) + ', message: ' + msg[1])
sys.exit()
print('Socket bind complete.')
s.listen(10) # Limitation to number of connections that can be in the queue
print('Socket is now listening.')
# Part 3 - the while loop
|
to keep the socket listening for clients
while True:
conn, addr = s.accept() # blocking call, to accept the first client that comes
# can type in bash the following to talk to the socket: telnet
|
localhost 8888
# Part 2
data = conn.recv(1024)
if not data:
break
reply = '<<<Hello ' + str(data) + '>>>'
conn.sendall(reply.encode('UTF8'))
# once you start the socket with python sample5.py
# try telnet localhost 8888 in another terminal
# type test, and it should echo back <<<Hello test>>>
conn.close()
s.close()
|
mitsuhiko/werkzeug
|
src/werkzeug/wrappers/base_request.py
|
Python
|
bsd-3-clause
| 1,174 | 0 |
import typing as t
import warnings
from .request import Request
class _FakeSubclassCheck(type):
def __subclasscheck__(cls, subclass: t.Type) -> bool:
warnings.warn(
"'BaseRequest' is deprecated and will be removed in"
" Werkzeug 2.1. Use 'issubclass(cls, Request)' instead.",
DeprecationWarning,
stacklevel=2,
)
return issubclass(subclass, Request)
def __instancecheck__(cls, instance: t.Any) -> bool:
warnings.warn(
"'BaseRequest' is deprecated and will be removed in"
" Werkzeug 2.1. Use 'isinstance(obj, Request)' instead.",
DeprecationWarning,
stacklevel=2,
)
return isinstance(instance, Request)
class
|
BaseRequest(Request, metaclass=_FakeSubclassCheck):
def __init__(self, *args: t.Any, **kwargs: t.Any) -> None:
warnings.warn(
"'BaseRequest' is deprecated and will be removed in"
" Werkzeug 2.1. 'Request' now includes the functionality"
" directly.",
DeprecationWarning,
stacklevel=2,
)
super().__init__(*args, **kwarg
|
s)
|
mikeshultz/icenine
|
icenine/contrib/transactions.py
|
Python
|
gpl-3.0
| 9,242 | 0.003787 |
# -*- coding: utf-8 -*-
import rlp
import secp256k1
from rlp.sedes import big_endian_int, binary, Binary
from rlp.utils import str_to_bytes, ascii_chr
from eth_utils.address import to_normalized_address
from eth_utils.hexidecimal import encode_hex, decode_hex
try:
from Crypto.Hash import keccak
sha3_256 = lambda x: keccak.new(digest_bits=256, data=x).digest()
except ImportError:
import sha3 as _sha3
sha3_256 = lambda x: _sha3.keccak_256(x).digest()
from py_ecc.secp256k1 import privtopub, ecdsa_raw_sign, ecdsa_raw_recover
from icenine.contrib.keys import privtoaddr
#from ethereum.utils import encode_hex
#from ethereum.exceptions import InvalidTransaction
#from ethereum import bloom
#from ethereum import opcodes
#from ethereum import utils
#from ethereum.slogging import get_logger
#from ethereum.utils import TT256, mk_contract_address, zpad, int_to_32bytearray, big_endian_to_int, ecsign, ecrecover_to_pub, normalize_key
# Reimplemented from ethereum.utils
def sha3(seed):
return sha3_256(to_string(seed))
big_endian_to_int = lambda x: big_endian_int.deserialize(str_to_bytes(x).lstrip(b'\x00'))
is_numeric = lambda x: isinstance(x, int)
def bytearray_to_bytestr(value):
return bytes(value)
def to_string(value):
if isinstance(value, bytes):
return value
if isinstance(value, str):
return bytes(value, 'utf-8')
if isinstance(value, int):
return bytes(str(value), 'utf-8')
def normalize_address(x, allow_blank=False):
if is_numeric(x):
return int_to_addr(x)
if allow_blank and x in {'', b''}:
return b''
if len(x) in (42, 50) and x[:2] in {'0x', b'0x'}:
x = x[2:]
if len(x) in (40, 48):
x = decode_hex(x)
if len(x) == 24:
assert len(x) == 24 and sha3(x[:20])[:4] == x[-4:]
x = x[:20]
if len(x) != 20:
raise Exception("Invalid address format: %r" % x)
return x
def normalize_key(key):
if is_numeric(key):
o = encode_int32(key)
elif len(key) == 32:
o = key
elif len(key) == 64:
o = decode_hex(key)
elif len(key) == 66 and key[:2] == '0x':
o = decode_hex(key[2:])
else:
raise Exception("Invalid key format: %r" % key)
if o == b'\x00' * 32:
raise Exception("Zero privkey invalid")
return o
def safe_ord(value):
if isinstance(value, int):
return value
else:
return ord(value)
def ecsign(rawhash, key):
if secp256k1 and hasattr(secp256k1, 'PrivateKey'):
pk = secp256k1.PrivateKey(key, raw=True)
signature = pk.ecdsa_recoverable_serialize(
pk.ecdsa_sign_recoverable(rawhash, raw=True)
)
signature = signature[0] + bytearray_to_bytestr([signature[1]])
v = safe_ord(signature[64]) + 27
r = big_endian_to_int(signature[0:32])
s = big_endian_to_int(signature[32:64])
else:
v, r, s = ecdsa_raw_sign(rawhash, key)
return v, r, s
# end reimplementation
#log = get_logger('eth.chain.tx')
TT256 = 2 ** 256
TT256M1 = 2 ** 256 - 1
TT255 = 2 ** 255
SECP256K1P = 2**256 - 4294968273
# in the yellow paper it is specified that s should be smaller than secpk1n (eq.205)
secpk1n = 115792089237316195423570985008687907852837564279074904382605163141518161494337
null_address = b'\xff' * 20
address_type = Binary.fixed_length(20, allow_empty=True)
class Transaction(rlp.Serializable):
"""
A transaction is stored as:
[nonce, gasprice, startgas, to, value, data, v, r, s]
nonce is the number of transactions already sent by that account, encoded
in binary form (eg. 0 -> '', 7 -> '\x07', 1000 -> '\x03\xd8').
(v,r,s) is the raw Electrum-style signature of the transaction without the
signature made with the private key corresponding to the sending account,
with 0 <= v <= 3. From an Electrum-style signature (65 bytes) it is
possible to extract the public key, and thereby the address, directly.
A valid transaction is one where:
(i) the signature is well-formed (ie. 0 <= v <= 3, 0 <= r < P, 0 <= s < N,
0 <= r < P - N if v >= 2), and
(ii) the sending account has enough funds to pay the fee and the value.
"""
fields = [
('nonce', big_endian_int),
('gasprice', big_endian_int),
('startgas', big_endian_int),
('to', address_type),
('value', big_endian_int),
('data', binary),
('v', big_endian_int),
('r', big_endian_int),
('s', big_endian_int),
]
_sender = None
def __init__(self, nonce, gaspr
|
ice, startgas, to, value, data, v=0, r=0, s=0):
self.data = None
to = normalize_address(to, allow_blank=True)
super(Transact
|
ion, self).__init__(nonce, gasprice, startgas, to, value, data, v, r, s)
if self.gasprice >= TT256 or self.startgas >= TT256 or \
self.value >= TT256 or self.nonce >= TT256:
raise InvalidTransaction("Values way too high!")
@property
def sender(self):
if not self._sender:
# Determine sender
if self.r == 0 and self.s == 0:
self._sender = null_address
else:
if self.v in (27, 28):
vee = self.v
sighash = sha3(rlp.encode(self, UnsignedTransaction))
elif self.v >= 37:
vee = self.v - self.network_id * 2 - 8
assert vee in (27, 28)
rlpdata = rlp.encode(rlp.infer_sedes(self).serialize(self)[:-3] + [self.network_id, '', ''])
sighash = sha3(rlpdata)
else:
raise InvalidTransaction("Invalid V value")
if self.r >= secpk1n or self.s >= secpk1n or self.r == 0 or self.s == 0:
raise InvalidTransaction("Invalid signature values!")
pub = ecrecover_to_pub(sighash, vee, self.r, self.s)
if pub == b"\x00" * 64:
raise InvalidTransaction("Invalid signature (zero privkey cannot sign)")
self._sender = sha3(pub)[-20:]
return self._sender
@property
def network_id(self):
if self.r == 0 and self.s == 0:
return self.v
elif self.v in (27, 28):
return None
else:
return ((self.v - 1) // 2) - 17
@sender.setter
def sender(self, value):
self._sender = value
def sign(self, key, network_id=None):
"""Sign this transaction with a private key.
A potentially already existing signature would be overridden.
"""
if network_id is None:
rawhash = sha3(rlp.encode(self, UnsignedTransaction))
else:
assert 1 <= network_id < 2**63 - 18
rlpdata = rlp.encode(rlp.infer_sedes(self).serialize(self)[:-3] + [network_id, b'', b''])
rawhash = sha3(rlpdata)
key = normalize_key(key)
self.v, self.r, self.s = ecsign(rawhash, key)
if network_id is not None:
self.v += 8 + network_id * 2
self._sender = privtoaddr(key)
return self
@property
def hash(self):
return sha3(rlp.encode(self))
def to_dict(self):
d = {}
for name, _ in self.__class__.fields:
d[name] = getattr(self, name)
if name in ('to', 'data'):
d[name] = '0x' + encode_hex(d[name])
d['sender'] = '0x' + encode_hex(self.sender)
d['hash'] = '0x' + encode_hex(self.hash)
return d
@property
def intrinsic_gas_used(self):
num_zero_bytes = str_to_bytes(self.data).count(ascii_chr(0))
num_non_zero_bytes = len(self.data) - num_zero_bytes
return (opcodes.GTXCOST
# + (0 if self.to else opcodes.CREATE[3])
+ opcodes.GTXDATAZERO * num_zero_bytes
+ opcodes.GTXDATANONZERO * num_non_zero_bytes)
@property
def creates(self):
"returns the address of a contract created by this tx"
if self.to in (b'', '\0' * 20):
return mk_contract_address(self.sender, self.nonce)
def __eq__(self, ot
|
jhoenicke/python-trezor
|
trezorlib/ontology.py
|
Python
|
lgpl-3.0
| 2,134 | 0.001406 |
# This file is part of the Trezor project.
#
# Copyright (C) 2012-2018 SatoshiLabs and contributors
#
# This librar
|
y is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License version 3
# as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY W
|
ARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the License along with this library.
# If not, see <https://www.gnu.org/licenses/lgpl-3.0.html>.
from . import messages
from .tools import expect
#
# Ontology functions
#
@expect(messages.OntologyAddress, field="address")
def get_address(client, address_n, show_display=False):
return client.call(
messages.OntologyGetAddress(address_n=address_n, show_display=show_display)
)
@expect(messages.OntologyPublicKey)
def get_public_key(client, address_n, show_display=False):
return client.call(
messages.OntologyGetPublicKey(address_n=address_n, show_display=show_display)
)
@expect(messages.OntologySignedTransfer)
def sign_transfer(client, address_n, t, tr):
return client.call(
messages.OntologySignTransfer(address_n=address_n, transaction=t, transfer=tr)
)
@expect(messages.OntologySignedWithdrawOng)
def sign_withdrawal(client, address_n, t, w):
return client.call(
messages.OntologySignWithdrawOng(
address_n=address_n, transaction=t, withdraw_ong=w
)
)
@expect(messages.OntologySignedOntIdRegister)
def sign_register(client, address_n, t, r):
return client.call(
messages.OntologySignOntIdRegister(
address_n=address_n, transaction=t, ont_id_register=r
)
)
@expect(messages.OntologySignedOntIdAddAttributes)
def sign_add_attr(client, address_n, t, a):
return client.call(
messages.OntologySignOntIdAddAttributes(
address_n=address_n, transaction=t, ont_id_add_attributes=a
)
)
|
edno/udacity-sandbox
|
ud889/AIND_Sudoku/solution.py
|
Python
|
unlicense
| 6,141 | 0.006839 |
assignments = []
rows = 'ABCDEFGHI'
cols = '123456789'
def assign_value(values, box, value):
"""
Please use this function to update your values dictionary!
Assigns a value to a given box. If it updates the board record it.
"""
# Don't waste memory appending actions that don't actually change any values
if values[box] == value:
return values
values[box] = value
if len(value) == 1:
assignments.append(values.copy())
return values
def naked_twins(values):
"""Eliminate values using the naked twins strategy.
Args:
values(dict): a dictionary of the form {'box_name': '123456789', ...}
Returns:
the values dictionary with the naked twins eliminated from peers.
"""
# Find all instances of naked twins
twins_list = []
for box in boxes:
if len(values[box]) == 2:
for peer in peers[box]:
if values[peer] == values[box]:
twins_list.append([box,peer])
# Eliminate the naked twins as possibilities for their peers
if twins_list:
for twins in twins_list:
# intersect list of twins' peers for common units
twins_peers = set(peers[twins[0]]).intersection(set(peers[twins[1]]))
for peer in twins_peers:
for v in values[twins[0]]:
values = assign_value(values, peer, values[peer].replace(v,''))
return values
def cross(A, B):
"Cross product of elements in A and elements in B."
return [s+t for s in A for t in B]
def diag(A, B):
"Diagonals of A elements with elements in B."
return [A[r]+B[c] for r in range(len(A)) for c in range(len(B)) if r == c]
def grid_values(grid):
"""
Convert grid into a dict of {square: char} with '123456789' for empties.
Args:
grid(string) - A grid in string form.
Returns:
A grid in dictionary form
Keys: The boxes, e.g., 'A1'
Values: The value in each box, e.g., '8'. If the box has no value, then the value will be '123456789'.
"""
return dict((boxes[i], grid[i] if (grid[i] != '.') else '123456789') for i in range(len(boxes)))
def display(values):
"""
Display the values as a 2-D grid.
Args:
values(dict): The sudoku in dictionary form
"""
width = 1+max(len(values[s]) for s in boxes)
line = '+'.join(['-'*(width*3)]*3)
for r in rows:
print(''.join(values[r+c].center(width)+('|' if c in '36' else '')
for c in cols))
if r in 'CF': print(line)
return
def eliminate(values):
for box,value in values.items():
if len(value) == 1:
for peer in peers[box]:
values = assign_value(values, peer, values[peer].replace(value,''))
return values
def only_choice(values):
for box,v in values.items():
if len(v) > 1:
for unit in units[box]:
pval = str().join(values[key] for key in unit if key != box)
d = [val for val in v if val not in pval]
if len(d) == 1:
values = assign_value(values, box, d[0])
return values
def reduce_puzzle(values):
stalled = False
while not stalled:
# Check how many boxes have a determined value
solved_values_before = len([box for box in values.keys() if len(values[box]) == 1])
# Use the Eliminate Strategy
values = eliminate(values)
# Use the Only Choice Strategy
values = only_choice(values)
# Use the Naked Twins Strategy
values = naked_twins(values)
# Check how many boxes have a determined value, to compare
solved_values_after = len([box for box in values.keys() if len(values[box]) == 1])
# If no new values were added, stop the loop.
stalled = solved_values_before == solved_values_after
# Sanity check, return False if there is a box with zero available values:
if len([box for box in values.keys() if len(values[box]) == 0]):
return False
return values
def search(values):
# First, reduce the puzzle using the previous function
values = reduce_puzzle(values)
if not values:
return False
# Return solution if all box have unique value
if all(len(v) == 1 for v in values.values()):
return values
# Choose one of the unfilled squares with the fewest po
|
ssibilities
_,box = min((len(v),k) for k,v in values.items() if len(v) > 1)
# Now use recursion to solve each one of the resulting sudokus, and if one returns a value (not False), return that answer!
# If you're stuck, see the solution.py tab!
for val in values[box]:
new_values = values.copy()
new_values[box] = val
res = search(new_values)
i
|
f res:
return res
def solve(grid):
"""
Find the solution to a Sudoku grid.
Args:
grid(string): a string representing a sudoku grid.
Example: '2.............62....1....7...6..8...3...9...7...6..4...4....8....52.............3'
Returns:
The dictionary representation of the final sudoku grid. False if no solution exists.
"""
return search(grid_values(grid))
boxes = cross(rows, cols)
row_units = [cross(r, cols) for r in rows]
column_units = [cross(rows, c) for c in cols]
square_units = [cross(rs, cs) for rs in ('ABC','DEF','GHI') for cs in ('123','456','789')]
diag_units = [diag(rows, cols)] + [diag(rows, cols[::-1])]
unitlist = row_units + column_units + square_units + diag_units
units = dict((s, [u for u in unitlist if s in u]) for s in boxes)
peers = dict((s, set(sum(units[s],[]))-set([s])) for s in boxes)
if __name__ == '__main__':
diag_sudoku_grid = '2.............62....1....7...6..8...3...9...7...6..4...4....8....52.............3'
display(solve(diag_sudoku_grid))
try:
from visualize import visualize_assignments
visualize_assignments(assignments)
except SystemExit:
pass
except:
print('We could not visualize your board due to a pygame issue. Not a problem! It is not a requirement.')
|
jpmontez/jenkins-rpc
|
scripts/build-summary/cachequery.py
|
Python
|
gpl-2.0
| 492 | 0 |
import click
import pickle
from build import Build
@click.group()
def cli():
pass
@cli.command()
@click.option('--cache-file', default='test-cache')
@click.option('--query')
def query(cache_file, query):
w
|
ith open(cache_file, 'rb') as f:
key, criteria = query.split('=')
buildobjs = pickle.load(f)
|
for name, build in buildobjs.items():
item = getattr(build, key, '')
if criteria in item:
print(build, item)
cli()
|
bchappet/dnfpy
|
src/dnfpyUtils/stats/trajectory.py
|
Python
|
gpl-2.0
| 1,105 | 0.021719 |
from dnfpyUtils.stats.statistic import Statistic
import numpy as np
class Trajectory(Statistic):
"""
Abstract class for trajectory
"""
def __init__(self,name,dt=0.1,dim=0,**kwargs):
super().__init__(name=name,size=0,dim=dim,dt=dt,**kwargs)
self.trace = [] #save the trace
def getViewData(s
|
elf):
return self._data#,self.getMean()
def reset(self):
super().reset()
self.trace = []
self._data = np.nan
def getMean(self):
return np.nanmean(self.trace)
def getRMSE(self):
return np.sqrt(np.nanmean(self.trace))
def getCount(self):
return np.sum(~np.isnan(sel
|
f.trace))
def getMax(self):
return np.max(self.trace)
def getPercentile(self,percent):
return np.nanpercentile(self.trace,percent)
def getMin(self):
return np.min(self.trace)
def getStd(self):
return np.std(self.trace)
def getTrace(self):
"""
Return the time trace of the statistic
"""
return self.trace
|
aevum/moonstone
|
src/moonstone/gui/qt/component/mwindow.py
|
Python
|
lgpl-3.0
| 14,404 | 0.00854 |
# -*- coding: utf-8 -*-
#
# Moonstone is platform for processing of medical images (DICOM).
# Copyright (C) 2009-2011 by Neppo Tecnologia da Informação LTDA
# and Aevum Softwares LTDA
#
# This file is part of Moonstone.
#
# Moonstone is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
from PySide import QtCore, QtGui
from mscreen import MScreen
from ....bloodstone.scenes.imageplane import VtkImagePlane
from ..rename import Rename
from ....bloodstone.scenes.cameracontroller2d import CameraController2D
class MWindow(QtGui.QTabWidget):
def __init__(self, ilsa = None, parent=None, serie=None):
logging.debug("In MWindow::__init__()")
super(MWindow, self).__init__(parent)
self._serie = serie
self.createWidgets()
self.createContextMenu()
self.createActions()
self.updateWidgets()
self._ilsa = ilsa
self._mScreens = []
self._yamlPath = None
self._mainImageData = None
self._vtiPath = None
self._cameraController = CameraController2D(self)
def addTab(self, widget, title):
logging.debug("In MWindow::addTab()")
if isinstance(widget, MScreen):
super(MWindow, self).addTab(widget, title)
self._mScreens.append(widget)
else:
raise "Widget is not a instance of MScreen!"
def createWidgets(self):
logging.debug("In MWindow::createWidgets()")
self.rename = Rename(self)
def close(self):
self._mainImageData = None
for mscreen in self._mScreens:
mscreen.close(force=True)
#mscreen.destroy()
#mscreen.setParent(None)
#mscreen = None
#del mscreen
#self._mScreens = None
#self.mouseReleaseEvent = None
super(MWindow, self).close()
def createContextMenu(self):
logging.debug("In AxialPlane::createContextMenu()")
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap(":/static/default/icon/22x22/im-status-message-edit.png"))
self.renameAction = QtGui.QAction(self)
self.renameAction.setText(QtGui.QApplication.translate("MWindow",
"Rename",
None,
QtGui.QApplication.UnicodeUTF8))
self.renameAction.setIconVisibleInMenu(True)
self.renameAction.setObjectName("renameAction")
self.renameAction.setIcon(icon1)
iconDuplicate = QtGui.QIcon()
iconDuplicate.addPixmap(QtGui.QPixmap(":/static/default/icon/22x22/document-new.png"))
self.duplicateAction = QtGui.QAction(self)
self.duplicateAction.setText(QtGui.QApplication.translate("MWindow",
"Duplicate",
None,
QtGui.QApplication.UnicodeUTF8))
self.duplicateAction.setIconVisibleInMenu(True)
self.duplicateAction.setObjectName("duplicateAction")
self.duplicateAction.setIcon(iconDuplicate)
icon2 = QtGui.QIcon()
icon2.addPixmap(QtGui.QPixmap(":/static/default/icon/22x22/view-refresh.png"))
self.resetAction = QtGui.QAction(self)
self.resetAction.setText(QtGui.QApplication.translate("MWindow",
"Reset",
None,
QtGui.QApplication.UnicodeUTF8))
self.resetAction.setIconVisibleInMenu(True)
self.resetAction.setObjectName("resetAction")
self.resetAction.setIcon(icon2)
icon2 = QtGui.QIcon()
icon2.addPixmap(QtGui.QPixmap(":/static/default/icon/22x22/dialog-close.png"))
self.closeAction = QtGui.QAction(self)
self.closeAction.setText(QtGui.QApplication.translate("MWindow",
"Close",
None,
QtGui.QApplication.UnicodeUTF8))
self.closeAction.setIconVisibleInMenu(True)
self.closeAction.setObjectName("closeAction")
self.closeAction.setIcon(icon2)
self.addAxialAction = QtGui.QAction(self)
self.addAxialAction.setText("Axial")
self.addAxialAction.setIconVisibleInMenu(True)
self.addAxialAction.setObjectName("addAxialAction")
self.addCoronalAction = QtGui.QAction(self)
self.addCoronalAction.setText(QtGui.QApplication.translate("MWindow",
"Coronal",
None,
QtGui.QApplication.UnicodeUTF8))
self.addCoronalAction.setIconVisibleInMenu(True)
self.addCoronalAction.setObjectName("addCoronalAction")
self.addSagittalAction = QtGui.QAction(self)
self.addSagittalAction.setText(QtGui.QApplication.translate("MWindow",
"Sagittal",
|
None,
QtGui.QApplication.UnicodeUTF8))
self.addSagittalAction.setIconVisibleInMenu(True)
self.addSagittalAction.setObjectName("addSagittalAction")
self.addVolumeAction = QtGui.QAction(self)
self.addVolumeAction.setText(QtGui.QApplication.translate("MWindow",
"Volume",
None,
QtGui.QApplication.UnicodeUTF8))
self.addVolumeAction.setIconVisibleInMenu(True)
self.addVolumeAction.setObjectName("addVolumeAction")
self.contextMenu = QtGui.QMenu(self)
self.contextMenu.addAction(self.renameAction)
self.contextMenu.addAction(self.resetAction)
self.contextMenu.addAction(self.duplicateAction)
self.contextMenu.addAction(self.closeAction)
self.contextMenu.setIcon(icon1)
windowMenu = QtGui.QMenu(self.contextMenu)
windowMenu.addAction(self.addAxialAction)
windowMenu.addAction(self.addCoronalAction)
windowMenu.addAction(self.addSagittalAction)
windowMenu.addAction(self.addVolumeAction)
windowMenu.setTitle(QtGui.QApplication.translate("MWindow",
"Add Scene",
None,
QtGui.QApplication.UnicodeUTF8))
self.contextMenu.addAction(windowMenu.menuAction())
def createActions(self):
logging.debug("In MWindow::createActions()")
self.connect(self, QtCore.SIGNAL("tabCloseRequested(int)"),
self.slotTabCloseRequested)
self.connect(self, QtCore.SIGNAL("currentChanged(int)"),
self.slotTabChanged)
self.mouseReleaseEvent = self.rightClickAction
self.connect(self.rename.Ok, QtCore.SIGNAL("clicked()"),
self.slotRenameOkButtonClicked)
self.connect(self.rename.Cancel, QtCore.SIGNAL("clicked()"),
self.slotRenameCancelBu
|
|
Akrog/cinder
|
cinder/api/contrib/consistencygroups.py
|
Python
|
apache-2.0
| 14,556 | 0 |
# Copyright (C) 2012 - 2014 EMC Corporation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""The consistencygroups api."""
import webob
from webob import exc
from cinder.api import common
from cinder.api import extensions
from cinder.api.openstack import wsgi
from cinder.api.views import consistencygrou
|
ps as co
|
nsistencygroup_views
from cinder.api import xmlutil
from cinder import consistencygroup as consistencygroupAPI
from cinder import exception
from cinder.i18n import _, _LI
from cinder.openstack.common import log as logging
from cinder import utils
LOG = logging.getLogger(__name__)
def make_consistencygroup(elem):
elem.set('id')
elem.set('status')
elem.set('availability_zone')
elem.set('created_at')
elem.set('name')
elem.set('description')
def make_consistencygroup_from_src(elem):
elem.set('id')
elem.set('status')
elem.set('created_at')
elem.set('name')
elem.set('description')
elem.set('cgsnapshot_id')
class ConsistencyGroupTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('consistencygroup',
selector='consistencygroup')
make_consistencygroup(root)
alias = Consistencygroups.alias
namespace = Consistencygroups.namespace
return xmlutil.MasterTemplate(root, 1, nsmap={alias: namespace})
class ConsistencyGroupsTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('consistencygroups')
elem = xmlutil.SubTemplateElement(root, 'consistencygroup',
selector='consistencygroups')
make_consistencygroup(elem)
alias = Consistencygroups.alias
namespace = Consistencygroups.namespace
return xmlutil.MasterTemplate(root, 1, nsmap={alias: namespace})
class ConsistencyGroupFromSrcTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('consistencygroup-from-src',
selector='consistencygroup-from-src')
make_consistencygroup_from_src(root)
alias = Consistencygroups.alias
namespace = Consistencygroups.namespace
return xmlutil.MasterTemplate(root, 1, nsmap={alias: namespace})
class CreateDeserializer(wsgi.MetadataXMLDeserializer):
def default(self, string):
dom = utils.safe_minidom_parse_string(string)
consistencygroup = self._extract_consistencygroup(dom)
return {'body': {'consistencygroup': consistencygroup}}
def _extract_consistencygroup(self, node):
consistencygroup = {}
consistencygroup_node = self.find_first_child_named(
node,
'consistencygroup')
attributes = ['name',
'description']
for attr in attributes:
if consistencygroup_node.getAttribute(attr):
consistencygroup[attr] = consistencygroup_node.\
getAttribute(attr)
return consistencygroup
class CreateFromSrcDeserializer(wsgi.MetadataXMLDeserializer):
def default(self, string):
dom = utils.safe_minidom_parse_string(string)
consistencygroup = self._extract_consistencygroup(dom)
retval = {'body': {'consistencygroup-from-src': consistencygroup}}
return retval
def _extract_consistencygroup(self, node):
consistencygroup = {}
consistencygroup_node = self.find_first_child_named(
node, 'consistencygroup-from-src')
attributes = ['cgsnapshot', 'name', 'description']
for attr in attributes:
if consistencygroup_node.getAttribute(attr):
consistencygroup[attr] = (
consistencygroup_node.getAttribute(attr))
return consistencygroup
class ConsistencyGroupsController(wsgi.Controller):
"""The ConsistencyGroups API controller for the OpenStack API."""
_view_builder_class = consistencygroup_views.ViewBuilder
def __init__(self):
self.consistencygroup_api = consistencygroupAPI.API()
super(ConsistencyGroupsController, self).__init__()
@wsgi.serializers(xml=ConsistencyGroupTemplate)
def show(self, req, id):
"""Return data about the given consistency group."""
LOG.debug('show called for member %s', id)
context = req.environ['cinder.context']
try:
consistencygroup = self.consistencygroup_api.get(
context,
group_id=id)
except exception.ConsistencyGroupNotFound as error:
raise exc.HTTPNotFound(explanation=error.msg)
return self._view_builder.detail(req, consistencygroup)
def delete(self, req, id, body):
"""Delete a consistency group."""
LOG.debug('delete called for member %s', id)
context = req.environ['cinder.context']
force = False
if body:
cg_body = body['consistencygroup']
force = cg_body.get('force', False)
LOG.info(_LI('Delete consistency group with id: %s'), id,
context=context)
try:
group = self.consistencygroup_api.get(context, id)
self.consistencygroup_api.delete(context, group, force)
except exception.ConsistencyGroupNotFound:
msg = _("Consistency group %s could not be found.") % id
raise exc.HTTPNotFound(explanation=msg)
except exception.InvalidConsistencyGroup as error:
raise exc.HTTPBadRequest(explanation=error.msg)
return webob.Response(status_int=202)
@wsgi.serializers(xml=ConsistencyGroupsTemplate)
def index(self, req):
"""Returns a summary list of consistency groups."""
return self._get_consistencygroups(req, is_detail=False)
@wsgi.serializers(xml=ConsistencyGroupsTemplate)
def detail(self, req):
"""Returns a detailed list of consistency groups."""
return self._get_consistencygroups(req, is_detail=True)
def _get_consistencygroups(self, req, is_detail):
"""Returns a list of consistency groups through view builder."""
context = req.environ['cinder.context']
consistencygroups = self.consistencygroup_api.get_all(context)
limited_list = common.limited(consistencygroups, req)
if is_detail:
consistencygroups = self._view_builder.detail_list(req,
limited_list)
else:
consistencygroups = self._view_builder.summary_list(req,
limited_list)
return consistencygroups
@wsgi.response(202)
@wsgi.serializers(xml=ConsistencyGroupTemplate)
@wsgi.deserializers(xml=CreateDeserializer)
def create(self, req, body):
"""Create a new consistency group."""
LOG.debug('Creating new consistency group %s', body)
if not self.is_valid_body(body, 'consistencygroup'):
raise exc.HTTPBadRequest()
context = req.environ['cinder.context']
try:
consistencygroup = body['consistencygroup']
except KeyError:
msg = _("Incorrect request body format")
raise exc.HTTPBadRequest(explanation=msg)
name = consistencygroup.get('name', None)
description = consistencygroup.get('description', None)
volume_types = consistencygroup.get('volume_types', None)
if not volume_types:
msg = _("volume_types must be provided to create "
"consi
|
CraveFood/restkiss
|
tests/test_preparers.py
|
Python
|
bsd-3-clause
| 2,628 | 0.003805 |
import unittest
from restkiss.preparers import Preparer, FieldsPreparer
class InstaObj(object):
def __init__(self, **kwargs):
for k, v in kwargs.items():
setattr(self, k, v)
class LookupDataTestCase(unittest.TestCase):
def setUp(self):
super(LookupDataTestCase, self).setUp()
self.preparer = FieldsPreparer(fields=None)
self.obj_data = InstaObj(
say='what',
count=453,
moof={
'buried': {
'id': 7,
'data': InstaObj(yes='no')
}
},
parent=None
)
self.dict_data = {
'hello': 'world',
'abc': 123,
'more': {
'things': 'here',
'nested': InstaObj(
awesome=True,
depth=3
),
},
'parent': None,
}
def test_dict_simple(self):
self.assertEqual(self.preparer.lookup_data('hello', self.dict_data), 'world')
self.assertEqual(self.preparer.lookup_data('abc', self.dict_data), 123)
def test_obj_simple(self):
self.assertEqual(self.preparer.lookup_data('say', self.obj_data), 'what')
self.assertEqual(self.preparer.lookup_data('count', self.obj_data), 453)
def test_dict_nested(self):
self.assertEqual(self.preparer.lookup_data('more.things', self.dict_data), 'here')
self.assertEqual(self.preparer.lookup_data('more.nested.depth', self.dict_data), 3)
def test_obj_nested(self):
self.assertEqual(self.preparer.lookup_data('moof.buried.id', self.obj_data), 7)
self.assertEqual(self.preparer.lookup_data('moof.buried.data.yes', self.obj_data), 'no')
def test_dict_miss(self):
with self.assertRaises(KeyError):
self.preparer.lookup_data('another', self.dict_data)
def test_obj_miss(self):
with self.assertRaises(AttributeError):
self.preparer.lookup_data('whee', self.obj_data)
def test_dict_nullable_fk(self):
self.assertEqual(self.preparer.lookup_data('parent.id', self.dict_data), None)
def test_obj_nullable_fk(self):
self.ass
|
ertEqual(self.preparer.lookup_data('parent.id', self.obj_data), None)
def test_empty_lookup(self):
# We could possibly get here in the recursion.
self.assertEqual(self.preparer.lookup_da
|
ta('', 'Last value'), 'Last value')
def test_complex_miss(self):
with self.assertRaises(AttributeError):
self.preparer.lookup_data('more.nested.nope', self.dict_data)
|
coopie/huzzer
|
test/test_function_generator.py
|
Python
|
mit
| 3,071 | 0.001954 |
from huzzer.function_generator import generate_expression, generate_unary_expr
from huzzer.expressions import VariableExpression, FunctionExpression, BRANCH_EXPRESSIONS
from huzzer.namers import DefaultNamer
from huzzer import INT, BOOL
empty_variables = {
INT: [],
BOOL: []
}
def test_generate_unary_expr():
ints = [generate_unary_expr(INT, empty_variables, 0) for i in range(50)]
assert all([
x.type_signiature == (INT, INT) and len(x.args) == 1 and type(x.args[0]) == int
for x in ints
])
bools = [generate_unary_expr(BOOL, empty_variables, 0) for i in range(10)]
assert all([
x.type_signiature == (BOOL, BOOL) and len(x.args) == 1 and type(x.args[0]) == bool
for x in bools
])
bool_variable = VariableExpression(BOOL, 1)
just_bools = {
INT: [],
BOOL: [bool_variable]
}
var_expr = generate_unary_expr(BOOL, just_bools, 1)
assert var_expr is bool_variable
int_expr = generate_unary_expr(INT, just_bools, 1)
assert int_expr is not bool_variable
# haskell_typ
|
e,
# variables,
# functions,
# branch_expressions,
# tree_depth,
# branching_probability=0.4,
# variable_probability=0.7,
# function_call_probability=0.5
def test_generate_expression():
int_function = FunctionExpression([BOOL, INT, INT], 1)
bool_function = FunctionExpres
|
sion([BOOL, BOOL, BOOL, BOOL], 2)
functions = {
INT: [int_function],
BOOL: [bool_function]
}
# this should definitely start with the bool func, as the probabilities are one
bool_expr = generate_expression(
BOOL,
empty_variables,
functions,
BRANCH_EXPRESSIONS,
2,
branching_probability=1.0,
function_call_probability=1.0
)
assert type(bool_expr) == type(bool_function) and bool_expr.function_id == 2
expr = generate_expression(
BOOL,
empty_variables,
functions,
BRANCH_EXPRESSIONS,
1,
branching_probability=1.0,
function_call_probability=1.0
)
assert expr.type_signiature == (BOOL, BOOL)
assert type(expr) != type(bool_function)
bool_variable = VariableExpression(BOOL, 1)
int_variable = VariableExpression(INT, 2)
variables = {
INT: [int_variable],
BOOL: [bool_variable]
}
var_expr = generate_expression(
BOOL,
variables,
functions,
BRANCH_EXPRESSIONS,
1,
branching_probability=1.0,
function_call_probability=1.0,
variable_probability=1.0
)
assert type(var_expr) is type(bool_variable) and var_expr.var_id == bool_variable.var_id
func_expr_with_only_vars = generate_expression(
BOOL,
variables,
functions,
BRANCH_EXPRESSIONS,
2,
branching_probability=1.0,
function_call_probability=1.0,
variable_probability=1.0
)
assert type(func_expr_with_only_vars) == type(bool_function) and \
all([arg is bool_variable for arg in func_expr_with_only_vars.args])
|
karamanolev/persephone
|
persephone/persephone/celery.py
|
Python
|
mit
| 231 | 0 |
import os
from celery imp
|
ort Celery
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'persephone.settings')
app = Celery('persephone')
app.config_from_object('django.conf:settings',
|
namespace='CELERY')
app.autodiscover_tasks()
|
SNoiraud/gramps
|
gramps/gen/filters/rules/_regexpidbase.py
|
Python
|
gpl-2.0
| 1,899 | 0.005793 |
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2002-2006 Donald N. Allingham
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 o
|
f the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not
|
, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
#-------------------------------------------------------------------------
#
# Standard Python modules
#
#-------------------------------------------------------------------------
import re
from ...const import GRAMPS_LOCALE as glocale
_ = glocale.translation.gettext
#-------------------------------------------------------------------------
#
# Gramps modules
#
#-------------------------------------------------------------------------
from . import Rule
#-------------------------------------------------------------------------
#
# HasIdOf
#
#-------------------------------------------------------------------------
class RegExpIdBase(Rule):
"""
Objects with a Gramps ID that contains a substring or matches a
regular expression.
"""
labels = [ _('Text:') ]
name = 'Objects with <Id>'
description = "Matches objects whose Gramps ID contains a substring " \
"or matches a regular expression"
category = _('General filters')
allow_regex = True
def apply(self, db, obj):
return self.match_substring(0, obj.gramps_id)
|
almarklein/scikit-image
|
skimage/measure/_marching_cubes.py
|
Python
|
bsd-3-clause
| 6,374 | 0.000157 |
import numpy as np
from . import _marching_cubes_cy
def marching_cubes(volume, level, spacing=(1., 1., 1.)):
"""
Marching cubes algorithm to find iso-valued surfaces in 3d volumetric data
Parameters
----------
volume : (M, N, P) array of doubles
Input data volume to find isosurfaces. Will be cast to `np.float64`.
level : float
Contour val
|
ue to search for isosurfaces in `volume`.
spacing : length-3 tuple of floats
Voxel spacing in spatial dimensions corresponding to numpy array
indexing dimensions (M, N, P) as in `volume`.
Returns
-------
verts : (V, 3) array
Spatial coordinates for V unique mesh vertices. Coordinate order
matches input `volume` (M, N, P).
faces : (F, 3) array
Define triangular faces via referencing vertex indices from ``v
|
erts``.
This algorithm specifically outputs triangles, so each face has
exactly three indices.
Notes
-----
The marching cubes algorithm is implemented as described in [1]_.
A simple explanation is available here::
http://www.essi.fr/~lingrand/MarchingCubes/algo.html
There are several known ambiguous cases in the marching cubes algorithm.
Using point labeling as in [1]_, Figure 4, as shown::
v8 ------ v7
/ | / | y
/ | / | ^ z
v4 ------ v3 | | /
| v5 ----|- v6 |/ (note: NOT right handed!)
| / | / ----> x
| / | /
v1 ------ v2
Most notably, if v4, v8, v2, and v6 are all >= `level` (or any
generalization of this case) two parallel planes are generated by this
algorithm, separating v4 and v8 from v2 and v6. An equally valid
interpretation would be a single connected thin surface enclosing all
four points. This is the best known ambiguity, though there are others.
This algorithm does not attempt to resolve such ambiguities; it is a naive
implementation of marching cubes as in [1]_, but may be a good beginning
for work with more recent techniques (Dual Marching Cubes, Extended
Marching Cubes, Cubic Marching Squares, etc.).
Because of interactions between neighboring cubes, the isosurface(s)
generated by this algorithm are NOT guaranteed to be closed, particularly
for complicated contours. Furthermore, this algorithm does not guarantee
a single contour will be returned. Indeed, ALL isosurfaces which cross
`level` will be found, regardless of connectivity.
The output is a triangular mesh consisting of a set of unique vertices and
connecting triangles. The order of these vertices and triangles in the
output list is determined by the position of the smallest ``x,y,z`` (in
lexicographical order) coordinate in the contour. This is a side-effect
of how the input array is traversed, but can be relied upon.
To quantify the area of an isosurface generated by this algorithm, pass
the outputs directly into `skimage.measure.mesh_surface_area`.
Regarding visualization of algorithm output, the ``mayavi`` package
is recommended. To contour a volume named `myvolume` about the level 0.0::
>>> from mayavi import mlab # doctest: +SKIP
>>> verts, tris = marching_cubes(myvolume, 0.0, (1., 1., 2.)) # doctest: +SKIP
>>> mlab.triangular_mesh([vert[0] for vert in verts],
... [vert[1] for vert in verts],
... [vert[2] for vert in verts],
... tris) # doctest: +SKIP
>>> mlab.show() # doctest: +SKIP
References
----------
.. [1] Lorensen, William and Harvey E. Cline. Marching Cubes: A High
Resolution 3D Surface Construction Algorithm. Computer Graphics
(SIGGRAPH 87 Proceedings) 21(4) July 1987, p. 163-170).
See Also
--------
skimage.measure.mesh_surface_area
"""
# Check inputs and ensure `volume` is C-contiguous for memoryviews
if volume.ndim != 3:
raise ValueError("Input volume must have 3 dimensions.")
if level < volume.min() or level > volume.max():
raise ValueError("Contour level must be within volume data range.")
volume = np.array(volume, dtype=np.float64, order="C")
# Extract raw triangles using marching cubes in Cython
# Returns a list of length-3 lists, each sub-list containing three
# tuples. The tuples hold (x, y, z) coordinates for triangle vertices.
# Note: this algorithm is fast, but returns degenerate "triangles" which
# have repeated vertices - and equivalent vertices are redundantly
# placed in every triangle they connect with.
raw_tris = _marching_cubes_cy.iterate_and_store_3d(volume, float(level),
spacing)
# Find and collect unique vertices, storing triangle verts as indices.
# Returns a true mesh with no degenerate faces.
verts, faces = _marching_cubes_cy.unpack_unique_verts(raw_tris)
return np.asarray(verts), np.asarray(faces)
def mesh_surface_area(verts, tris):
"""
Compute surface area, given vertices & triangular faces
Parameters
----------
verts : (V, 3) array of floats
Array containing (x, y, z) coordinates for V unique mesh vertices.
faces : (F, 3) array of ints
List of length-3 lists of integers, referencing vertex coordinates as
provided in `verts`
Returns
-------
area : float
Surface area of mesh. Units now [coordinate units] ** 2.
Notes
-----
The arguments expected by this function are the exact outputs from
`skimage.measure.marching_cubes`. For unit correct output, ensure correct
`spacing` was passed to `skimage.measure.marching_cubes`.
This algorithm works properly only if the ``faces`` provided are all
triangles.
See Also
--------
skimage.measure.marching_cubes
"""
# Fancy indexing to define two vector arrays from triangle vertices
actual_verts = verts[tris]
a = actual_verts[:, 0, :] - actual_verts[:, 1, :]
b = actual_verts[:, 0, :] - actual_verts[:, 2, :]
del actual_verts
# Area of triangle in 3D = 1/2 * Euclidean norm of cross product
return ((np.cross(a, b) ** 2).sum(axis=1) ** 0.5).sum() / 2.
|
pavpanchekha/oranj
|
oranj/core/builtin.py
|
Python
|
gpl-3.0
| 1,879 | 0.010112 |
#!/bin/false
# -*- coding: utf-8 -*-
from objects.orobject import OrObject
from objects.function import Function
from objects.number import Number
from objects.file import File
from objects.inheritdict import InheritDict
from objects.ordict import OrDict
from objects.orddict import ODict
import objects.console as console
import objects.exception as exception
import objects.orstring as orstring
import types
import libbuiltin
def expose(r, n=""):
v = OrObject.from_py(r)
if n:
v.name = n
return v
builtin = InheritDict()
builtin.update({
"int": expose(libbuiltin.toint),
"num": expose(Number),
"dict": expose(OrDict),
"odict": expose(ODict),
"set": expose(set),
"io": expose(console.io),
"file": expose(File),
"input": expose(console.input),
"output": expose(console.output),
"error": expose(c
|
onsole.error),
"endl": expose("\n"),
"repr": expose(repr),
"join": expose(libbuiltin.join),
"range": expose(range),
"type": expose(libbuiltin.typeof, "type"),
"dir": expose(libbuiltin.dirof, "dir"),
"attrs": expose(libbuiltin.attrsof, "attrs"),
"reverse": expose(reversed),
"sort": expose(sorted),
"chr": expose(unichr),
"Exception": expose(Exception),
"hasattr": expose(OrObject.ha
|
s, "hasattr"),
"getattr": expose(OrObject.get, "getattr"),
"setattr": expose(OrObject.set, "setattr"),
})
stolen_builtins = [
'abs', 'all', 'any', 'bool', 'callable', #buffer
'cmp', #chr (not as unichr)
'dict', 'divmod', 'enumerate', #delattr
'exit', 'filter', # frozenset
'hash', 'id', #get/hasattr
'iter', 'len', 'list',
'map', 'max', 'min', 'ord', # object
'range', 'repr', #property
'round', 'set', 'slice', #setattr
'str', 'sum', 'unicode', #super
'zip'
]
for i in stolen_builtins:
builtin[i] = expose(__builtins__[i])
|
bewallyt/Classy
|
authentication/serializers.py
|
Python
|
mit
| 1,481 | 0.000675 |
from django.contrib.auth import update_session_auth_hash
from rest_framework
|
import serializers
from authentication.models import Account
class AccountSerializer(serializers.ModelSerializer):
password = serializers.CharField(write_only=True, required=False)
confirm_password = serializers.CharField(
|
write_only=True, required=False)
class Meta:
model = Account
fields = ('id', 'email', 'username', 'created_at', 'updated_at',
'first_name', 'last_name', 'tagline', 'password',
'confirm_password', 'userType')
read_only_fields = ('created_at', 'updated_at',)
def create(self, validated_data):
return Account.objects.create(**validated_data)
def update(self, instance, validated_data):
instance.username = validated_data.get('username', instance.username)
instance.tagline = validated_data.get('tagline', instance.tagline)
instance.save()
password = validated_data.get('password', None)
confirm_password = validated_data.get('confirm_password', None)
if password and confirm_password and password == confirm_password:
instance.set_password(password)
instance.save()
update_session_auth_hash(self.context.get('request'), instance)
return instance
class SimpleAccountSerializer(serializers.ModelSerializer):
class Meta:
model = Account
fields = ('id', 'email', 'username',)
|
xvitaly/stmbot
|
stmbot/checker.py
|
Python
|
gpl-3.0
| 5,895 | 0.002969 |
#!/usr/bin/python
# coding=utf-8
# Simple Steam profile checker Telegram bot
# Copyright (c) 2017 EasyCoding Team
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from html import unescape
from re import sub
from urllib.request import Request as request, urlopen
from xml.dom import minidom
class SteamChecker:
@staticmethod
def striptags(gtstr, gtrep=''):
"""
Strip HTML tags from string.
:param gtstr: String to strip tags
:param gtrep: Replacement for tags
:return: String without HTML tags
"""
return sub('<[^<]+?>', gtrep, unescape(gtstr))
def __fetchxml(self):
"""
Format query to API, fetch results and return them as string.
|
:return: API check results
"""
apiuri = 'https://check.team-fortress.su/api.php?action=check&token=%s&id=%s' % (self.__token, self.__id)
req = request(apiuri, data=None, headers={'User-Agent': 'Mozilla/5.0 (Windows NT
|
10.0; rv:52.0.0)'
'Gecko/20100101 Firefox/52.0.0'})
with urlopen(req) as xmlres:
return xmlres.read().decode('utf-8')
@property
def sitestatus(self):
"""
TEAM-FORTRESS.SU user friendly status of checked user profile.
:return: TEAM-FORTRESS.SU check results
"""
# Set dictionary with API return codes...
stv = {
'1': 'гарант',
'2': 'в белом списке',
'3': 'в чёрном списке',
'4': 'нет в базе',
'5': 'в чёрном списке аукциона',
'6': 'сотрудник сайта',
'7': 'донатер',
'8': 'ненадёжный'
}
# Return result using dictionary...
return stv[self.__sitestatus]
@property
def vacstatus(self):
"""
VAC status of checked user profile.
:return: VAC status
"""
stv = {
'0': 'чист',
'1': 'забанен'
}
return stv[self.__vacstatus]
@property
def f2pstatus(self):
"""
Free-to-Play status (has no purchased games) of checked user profile.
:return: Free-to-Play status
"""
stv = {
'0': 'нет',
'1': 'да'
}
return stv[self.__f2pstatus]
@property
def tradestatus(self):
"""
Current trade status of checked user profile.
:return: Trade status
"""
stv = {
'0': 'нет ограничений',
'1': 'заблокирована',
'2': 'испытательный срок'
}
return stv[self.__tradestatus]
@property
def gamebanstatus(self):
"""
Current game bans on checked user profile.
:return: Game bans status and their count
"""
return 'нет' if self.__gamebans == '0' else 'есть (%s)' % self.__gamebans
@property
def description(self):
"""
Formatted custom description of checked user profile.
:return: Custom description with markup
"""
return '`%s`' % self.striptags(self.__description, ' ') if self.__description else '*отсутствует.*'
def __init__(self, tid, token):
"""
Main SteamChecker constructor.
:param tid: Profile link, username or SteamID
:param token: API token
"""
# Setting token and unique identifier to pseudo-private properties...
self.__id = tid
self.__token = token
# Fetching XML from API...
rxml = self.__fetchxml()
# Parsing received XML...
xmlp = minidom.parseString(rxml)
# Checking API result...
if xmlp.getElementsByTagName('qstatus')[0].firstChild.data != 'OK':
raise Exception('Incorrect API return code')
# Setting public fields...
self.steamid32 = xmlp.getElementsByTagName('steamID')[0].firstChild.data
self.steamid64 = xmlp.getElementsByTagName('steamID64')[0].firstChild.data
self.steamidv3 = xmlp.getElementsByTagName('steamIDv3')[0].firstChild.data
self.nickname = xmlp.getElementsByTagName('nickname')[0].firstChild.data
self.avatar = xmlp.getElementsByTagName('avatar')[0].firstChild.data
self.permalink = xmlp.getElementsByTagName('permalink')[0].firstChild.data
self.srstatus = self.striptags(xmlp.getElementsByTagName('steamrep')[0].firstChild.data)
# Setting private fields...
self.__sitestatus = xmlp.getElementsByTagName('sitestatus')[0].firstChild.data
self.__vacstatus = xmlp.getElementsByTagName('isbanned')[0].firstChild.data
self.__f2pstatus = xmlp.getElementsByTagName('isf2p')[0].firstChild.data
self.__tradestatus = xmlp.getElementsByTagName('istrbanned')[0].firstChild.data
self.__premium = xmlp.getElementsByTagName('ispremium')[0].firstChild.data
self.__gamebans = xmlp.getElementsByTagName('gamebans')[0].firstChild.data
# Fetching custom description...
dcs = xmlp.getElementsByTagName('customdescr')[0].firstChild
self.__description = dcs.data if dcs else ''
|
justinpotts/mozillians
|
vendor-local/lib/python/tablib/formats/_json.py
|
Python
|
bsd-3-clause
| 991 | 0 |
# -*- coding: utf-8 -*-
""" Tablib - JSON Support
"""
import tablib
import sys
from tablib.packages import omnijson
|
as json
title = 'json'
extentions = ('json', 'jsn')
def export_set(dataset):
"""Returns JSON representation of Dataset."""
return json.dumps(dataset.dict)
def export_book(databook):
"""Returns JSON representation of Databook."""
return json.dumps(databook._package())
def import_set(dset, in_stream):
"""Returns dataset from JSON stream."""
dset.wipe()
dset.dict = json.loads(in_stream)
def import_book(dbook, in_stream):
"""Returns databook from
|
JSON stream."""
dbook.wipe()
for sheet in json.loads(in_stream):
data = tablib.Dataset()
data.title = sheet['title']
data.dict = sheet['data']
dbook.add_sheet(data)
def detect(stream):
"""Returns True if given stream is valid JSON."""
try:
json.loads(stream)
return True
except ValueError:
return False
|
leppa/home-assistant
|
homeassistant/components/nest/__init__.py
|
Python
|
apache-2.0
| 14,138 | 0.000637 |
"""Support for Nest devices."""
from datetime import datetime, timedelta
import logging
import socket
import threading
from nest import Nest
from nest.nest import APIError, AuthorizationError
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import (
CONF_BINARY_SENSORS,
CONF_FILENAME,
CONF_MONITORED_CONDITIONS,
CONF_SENSORS,
CONF_STRUCTURE,
|
EVENT_HOMEASSISTANT_START,
EVENT_HOMEASSISTANT_STOP,
)
from homeassistant.core import callback
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_connect, dispatcher_send
from homeassistant.helpers.entity import Entity
from . import local_auth
from .const import DOMAIN
_CONFIGURING
|
= {}
_LOGGER = logging.getLogger(__name__)
SERVICE_CANCEL_ETA = "cancel_eta"
SERVICE_SET_ETA = "set_eta"
DATA_NEST = "nest"
DATA_NEST_CONFIG = "nest_config"
SIGNAL_NEST_UPDATE = "nest_update"
NEST_CONFIG_FILE = "nest.conf"
CONF_CLIENT_ID = "client_id"
CONF_CLIENT_SECRET = "client_secret"
ATTR_ETA = "eta"
ATTR_ETA_WINDOW = "eta_window"
ATTR_STRUCTURE = "structure"
ATTR_TRIP_ID = "trip_id"
AWAY_MODE_AWAY = "away"
AWAY_MODE_HOME = "home"
ATTR_AWAY_MODE = "away_mode"
SERVICE_SET_AWAY_MODE = "set_away_mode"
SENSOR_SCHEMA = vol.Schema(
{vol.Optional(CONF_MONITORED_CONDITIONS): vol.All(cv.ensure_list)}
)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_CLIENT_ID): cv.string,
vol.Required(CONF_CLIENT_SECRET): cv.string,
vol.Optional(CONF_STRUCTURE): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(CONF_SENSORS): SENSOR_SCHEMA,
vol.Optional(CONF_BINARY_SENSORS): SENSOR_SCHEMA,
}
)
},
extra=vol.ALLOW_EXTRA,
)
SET_AWAY_MODE_SCHEMA = vol.Schema(
{
vol.Required(ATTR_AWAY_MODE): vol.In([AWAY_MODE_AWAY, AWAY_MODE_HOME]),
vol.Optional(ATTR_STRUCTURE): vol.All(cv.ensure_list, [cv.string]),
}
)
SET_ETA_SCHEMA = vol.Schema(
{
vol.Required(ATTR_ETA): cv.time_period,
vol.Optional(ATTR_TRIP_ID): cv.string,
vol.Optional(ATTR_ETA_WINDOW): cv.time_period,
vol.Optional(ATTR_STRUCTURE): vol.All(cv.ensure_list, [cv.string]),
}
)
CANCEL_ETA_SCHEMA = vol.Schema(
{
vol.Required(ATTR_TRIP_ID): cv.string,
vol.Optional(ATTR_STRUCTURE): vol.All(cv.ensure_list, [cv.string]),
}
)
def nest_update_event_broker(hass, nest):
"""
Dispatch SIGNAL_NEST_UPDATE to devices when nest stream API received data.
Runs in its own thread.
"""
_LOGGER.debug("Listening for nest.update_event")
while hass.is_running:
nest.update_event.wait()
if not hass.is_running:
break
nest.update_event.clear()
_LOGGER.debug("Dispatching nest data update")
dispatcher_send(hass, SIGNAL_NEST_UPDATE)
_LOGGER.debug("Stop listening for nest.update_event")
async def async_setup(hass, config):
"""Set up Nest components."""
if DOMAIN not in config:
return True
conf = config[DOMAIN]
local_auth.initialize(hass, conf[CONF_CLIENT_ID], conf[CONF_CLIENT_SECRET])
filename = config.get(CONF_FILENAME, NEST_CONFIG_FILE)
access_token_cache_file = hass.config.path(filename)
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={"nest_conf_path": access_token_cache_file},
)
)
# Store config to be used during entry setup
hass.data[DATA_NEST_CONFIG] = conf
return True
async def async_setup_entry(hass, entry):
"""Set up Nest from a config entry."""
nest = Nest(access_token=entry.data["tokens"]["access_token"])
_LOGGER.debug("proceeding with setup")
conf = hass.data.get(DATA_NEST_CONFIG, {})
hass.data[DATA_NEST] = NestDevice(hass, conf, nest)
if not await hass.async_add_job(hass.data[DATA_NEST].initialize):
return False
for component in "climate", "camera", "sensor", "binary_sensor":
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, component)
)
def validate_structures(target_structures):
all_structures = [structure.name for structure in nest.structures]
for target in target_structures:
if target not in all_structures:
_LOGGER.info("Invalid structure: %s", target)
def set_away_mode(service):
"""Set the away mode for a Nest structure."""
if ATTR_STRUCTURE in service.data:
target_structures = service.data[ATTR_STRUCTURE]
validate_structures(target_structures)
else:
target_structures = hass.data[DATA_NEST].local_structure
for structure in nest.structures:
if structure.name in target_structures:
_LOGGER.info(
"Setting away mode for: %s to: %s",
structure.name,
service.data[ATTR_AWAY_MODE],
)
structure.away = service.data[ATTR_AWAY_MODE]
def set_eta(service):
"""Set away mode to away and include ETA for a Nest structure."""
if ATTR_STRUCTURE in service.data:
target_structures = service.data[ATTR_STRUCTURE]
validate_structures(target_structures)
else:
target_structures = hass.data[DATA_NEST].local_structure
for structure in nest.structures:
if structure.name in target_structures:
if structure.thermostats:
_LOGGER.info(
"Setting away mode for: %s to: %s",
structure.name,
AWAY_MODE_AWAY,
)
structure.away = AWAY_MODE_AWAY
now = datetime.utcnow()
trip_id = service.data.get(
ATTR_TRIP_ID, "trip_{}".format(int(now.timestamp()))
)
eta_begin = now + service.data[ATTR_ETA]
eta_window = service.data.get(ATTR_ETA_WINDOW, timedelta(minutes=1))
eta_end = eta_begin + eta_window
_LOGGER.info(
"Setting ETA for trip: %s, "
"ETA window starts at: %s and ends at: %s",
trip_id,
eta_begin,
eta_end,
)
structure.set_eta(trip_id, eta_begin, eta_end)
else:
_LOGGER.info(
"No thermostats found in structure: %s, " "unable to set ETA",
structure.name,
)
def cancel_eta(service):
"""Cancel ETA for a Nest structure."""
if ATTR_STRUCTURE in service.data:
target_structures = service.data[ATTR_STRUCTURE]
validate_structures(target_structures)
else:
target_structures = hass.data[DATA_NEST].local_structure
for structure in nest.structures:
if structure.name in target_structures:
if structure.thermostats:
trip_id = service.data[ATTR_TRIP_ID]
_LOGGER.info("Cancelling ETA for trip: %s", trip_id)
structure.cancel_eta(trip_id)
else:
_LOGGER.info(
"No thermostats found in structure: %s, "
"unable to cancel ETA",
structure.name,
)
hass.services.async_register(
DOMAIN, SERVICE_SET_AWAY_MODE, set_away_mode, schema=SET_AWAY_MODE_SCHEMA
)
hass.services.async_register(
DOMAIN, SERVICE_SET_ETA, set_eta, schema=SET_ETA_SCHEMA
)
hass.services.async_register(
DOMAIN, SERVICE_CANCEL_ETA, cancel_eta, schema=CANCEL_ETA_SCHEMA
)
@callback
def start_up(event):
"""Start Nest upda
|
pythonindia/junction
|
tests/conftest.py
|
Python
|
mit
| 212 | 0 |
# -*- coding: utf-8 -*-
import os
import django
from .fixtures import * # noqa
|
# import pytest
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
def p
|
ytest_configure(config):
django.setup()
|
vsfs/vsfs-bench
|
ec2/fabfile.py
|
Python
|
apache-2.0
| 3,913 | 0 |
#!/usr/bin/env python
#
# Copyright 2014 (c) Lei Xu <eddyxu@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from fabric.api import task
import yaml
import logging
import vsfs_ec2 as ec2
vsfs = ec2.VsfsEC2()
logging.basicConfig(format='[%(asctime)s](%(levelname)s) %(message)s',
level=logging.INFO)
@task
def help(name=''):
"""Print full information of the function. (name='task')
"""
if not name:
print("Use: 'fab help:func_name' for detailed help for each task.")
else:
print(globals()[name].__doc__)
@task
def price_history(instance='m1.small', n=10):
"""Print out the recent price history (instance='m1.small',n=10).
"""
vsfs.get_spot_price_history(instance_type=instance, num_prices=n)
@task
def spot_requests():
"""Prints all active spot instance requests.
"""
vsfs.get_all_spot_requests()
@task
def instances(state='running'):
"""Prints the information of instance.
"""
vsfs.get_all_instances(state)
@task
def image_list():
"""Prints all images.
"""
vsfs.print_all_images()
@task
def image_create(price=0.01, spot='yes', revision='HEAD', branch='master'):
"""Creates an VSFS image using Spot Instance (price=0.01,spot=yes/no).
Options:
@param spot set to 'yes' to use spot instance, set to 'no' to use on-demand
instance. default: 'yes'
@param price the bid price for spot instance. default: 0.01
@param branch git branch of the vsfs source.
@param revision the git revsion of the vsfs source.
"""
if spot == 'yes':
vsfs.create_image_spot(price)
else:
vsfs.create_image()
@task
def image_delete(image_id):
"""Deleted a stored image with the given ID.
"""
vsfs.delete_image(image_id)
@task
def security_group_list():
"""List out all security groups.
"""
vsfs.print_security_groups()
@task
def
|
cluster_start(ami, nmaster, nindexd, nclient, yaml='example.yaml'):
"""Starts a cluster (ami='', nmaster=0, nindexd=0, nclient=0, \
yaml='example.yaml')
|
Configuration of cluster is defined in 'example.yaml'
"""
num_masters = int(nmaster)
num_indexd = int(nindexd)
num_client = int(nclient)
vsfs.start_cluster(ami, num_masters, num_indexd, num_client,
conf_yaml=yaml)
@task
def vpc_list():
"""Prints all available VPC and its detailed information.
"""
vsfs.print_vpcs()
@task
def vpc_create():
"""Creates a 10.0.0.0/22 virtual private cluster (VPC).
"""
vsfs.create_vpc()
@task
def vpc_clear():
"""Removes all virtual private clusters.
"""
vsfs.remove_vpcs()
@task
def list_x509_certifications():
print vsfs.locate_x509_certifications()
@task
def s3_space():
"""Calculate s3 space consumption.
"""
vsfs.print_s3_space()
@task
def volume_list():
"""List all volumes
"""
vsfs.print_volumes()
@task
def volume_create(ami, price, volsize):
"""Creates a new EBS volume and format it (param: ami, price, volsize)
"""
vsfs.create_volume_spot(ami, price, volsize)
@task
def elastic_ip_list():
"""List all elastic ips.
"""
vsfs.print_elastic_ips()
@task
def test_run():
"""Start cluster on active instances.
"""
confs = {}
with open('test.yaml') as fobj:
confs = yaml.load(fobj.read())
vsfs.start_test_cluster(confs)
|
JazzeYoung/VeryDeepAutoEncoder
|
pylearn2/pylearn2/datasets/tests/test_mnistplus.py
|
Python
|
bsd-3-clause
| 1,978 | 0 |
"""
This file tests the MNISTPlus class. majorly concerning the X and y member
of the dataset and their corresponding sizes, data scales and topological
views.
"""
from pylearn2.datasets.mnistplus import MNISTPlus
from pylearn2.space impo
|
rt IndexSpace, VectorSpace
import unittest
from pylearn2.testing.skip import skip_if_no_data
import numpy as np
def test_MNISTPlus():
"""
Test the MNISTPlus warper.
Tests the scale of data, the splitting of train, valid, test sets.
Tests that a topological batch has 4 dimensions.
Tests that it work well with selected type of augmentation.
"""
sk
|
ip_if_no_data()
for subset in ['train', 'valid', 'test']:
ids = MNISTPlus(which_set=subset)
assert 0.01 >= ids.X.min() >= 0.0
assert 0.99 <= ids.X.max() <= 1.0
topo = ids.get_batch_topo(1)
assert topo.ndim == 4
del ids
train_y = MNISTPlus(which_set='train', label_type='label')
assert 0.99 <= train_y.X.max() <= 1.0
assert 0.0 <= train_y.X.min() <= 0.01
assert train_y.y.max() == 9
assert train_y.y.min() == 0
assert train_y.y.shape == (train_y.X.shape[0], 1)
train_y = MNISTPlus(which_set='train', label_type='azimuth')
assert 0.99 <= train_y.X.max() <= 1.0
assert 0.0 <= train_y.X.min() <= 0.01
assert 0.0 <= train_y.y.max() <= 1.0
assert 0.0 <= train_y.y.min() <= 1.0
assert train_y.y.shape == (train_y.X.shape[0], 1)
train_y = MNISTPlus(which_set='train', label_type='rotation')
assert 0.99 <= train_y.X.max() <= 1.0
assert 0.0 <= train_y.X.min() <= 0.01
assert train_y.y.max() == 9
assert train_y.y.min() == 0
assert train_y.y.shape == (train_y.X.shape[0], 1)
train_y = MNISTPlus(which_set='train', label_type='texture_id')
assert 0.99 <= train_y.X.max() <= 1.0
assert 0.0 <= train_y.X.min() <= 0.01
assert train_y.y.max() == 9
assert train_y.y.min() == 0
assert train_y.y.shape == (train_y.X.shape[0], 1)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.