repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
---|---|---|---|---|---|---|---|---|
sofianehaddad/ot-svn
|
python/test/t_ClaytonCopulaFactory_std.py
|
Python
|
mit
| 1,214 | 0.000824 |
#! /usr/bin/env python
from openturns import *
TESTPREAMBLE()
RandomGenerator.SetSeed(0)
try:
distribution = ClaytonCopula(1.5)
size = 1000
sample = distribution.getSample(size)
factory = ClaytonCopulaFactory()
estimatedDistribution = factory.build(sample)
print "distribution=", repr(distribution)
print "Estimated distribution=", repr(estimatedDistribution)
estimatedDistribution = factory.build()
print "Default distribution=", estimatedDistribution
estimatedDistribution = factory.build(
distribution.getParametersCollection())
print "Distribution from parameters=", estimatedDistribution
estimatedClaytonCopula = factory.buildAsClaytonCopula(sample)
print "ClaytonCopula =", d
|
istribution
print "Estimated claytonCopula=", estimatedClaytonCopula
estimatedClaytonCopula = factory.buildAsClaytonCopula()
print "Default claytonCopula=", estimatedClaytonCopula
estimatedClaytonCopula = factory.buildAsClaytonCopula(
distribution.getParametersCollection())
print "ClaytonCopula from parameters=", estimatedClaytonCopula
except:
impo
|
rt sys
print "t_ClaytonCopulaFactory_std.py", sys.exc_type, sys.exc_value
|
Unitech/Skytoop
|
controllers/widget.py
|
Python
|
mit
| 3,369 | 0.006827 |
# -*- coding: utf-8 -*-
#
# Copyright 2011, Alexandre Strzelewicz
# Licensed under the MIT Version
#
#########################################################
#
# Widget generic controller
#
#########################################################
xss = local_import('xss')
def can_modify():
if session.can_modify == False:
raise HTTP(404)
def get_widget():
session.forget(response)
widgets = db((db.widgets.desktop_link==session.desktop_id)).select()
return dict(widgets=widgets)
def new_widget():
can_modify()
# Xss prevention
for req in request.vars:
request.vars[req] = xss.xssescape(request.vars[req])
ids = db.widgets.insert(x=request.vars.x,
y=request.vars.y,
width=request.vars.width,
height=request.vars.height,
type=request.vars.type,
data1=request.vars.data1,
data2=request.vars.data2,
data3=request.vars.data3,
title=request.vars.title,
user_link=auth.user.id,
desktop_link=session.desktop_id)
return response.json({'success':'true', 'id':ids})
def remove_widget():
can_modify()
row = db((db.widgets.user_link==auth.user.id)
& (db.widgets.desktop_link==session.desktop_id)
& (db.widgets.id==request.vars.id)).delete()
return response.json({'success':'true'})
#@security.xssremove
def update_widget():
can_modify()
# Xss prevention
for req in request.vars:
request.vars[req] = xss.xssescape(request.vars[req])
db((db.widgets.user_link==auth.user.id)
& (db.widgets.desktop_link==session.desktop_id)
& (db.widgets.id==request.vars.id)) \
.update(x=request.vars.x,
y=request.vars.y,
width=request.vars.width,
height=request.vars.height,
type=request.vars.type,
data1=request.vars.data1,
data2=request.vars.data2,
data3=request.vars.data3,
|
title=request.vars.title)
return response.json({'success':'true'})
#
# entr widgets to share widget (to put in desk.py)
#
# widgets =
|
db((db.desktop.id==db.entr_desktop_widgets.desktop_link)
# & (db.widgets_entr.id==db.entr_desktop_widgets.widget_link)
# & (db.desktop.id==desktop.id))\
# .select(db.widgets_entr.ALL)
# logger.debug(widgets)
#
#
#
# def new_widget_entr():
# widget = db.widgets_entr.insert(x=request.vars.x,
# y=request.vars.y,
# width=request.vars.width,
# height=request.vars.height,
# type=request.vars.type,
# data1=request.vars.data1,
# data2=request.vars.data2,
# data3=request.vars.data3,
# title=request.vars.title)
# db.entr_desktop_widgets.insert(desktop_link=session.desktop_id,
# widget_link=widget.id)
# return response.json({'success':'true', 'id':widget.id})
|
danielsunzhongyuan/my_leetcode_in_python
|
convert_a_number_to_hexadecimal_405.py
|
Python
|
apache-2.0
| 1,152 | 0.005208 |
"""
Given an integer, write an algorithm to convert it to hexadecimal. For negative integer, two's complement method is used.
Note:
All letters in hexadecimal (a-f) must be in lowercase.
The hexadecimal string must not conta
|
in extra leading 0s.
If the number is zero, it is represented by a single zero character '0'; otherwise,
the first character in the hexadecimal string will not be the zero character.
The given n
|
umber is guaranteed to fit within the range of a 32-bit signed integer.
You must not use any method provided by the library which converts/formats the number to hex directly.
Example 1:
Input:
26
Output:
"1a"
Example 2:
Input:
-1
Output:
"ffffffff"
"""
class Solution(object):
def toHex(self, num):
"""
:type num: int
:rtype: str
"""
if 0 == num:
return "0"
# both OK
mapping = dict(zip(range(0, 16), "0123456789abcdef"))
mapping = "0123456789abcdef"
if num < 0:
num += 2 ** 32
remains = []
while num:
remains.append(mapping[num % 16])
num /= 16
return "".join(remains[::-1])
|
bardin-lab/readtagger
|
tests/test_pysamtools_view.py
|
Python
|
mit
| 404 | 0 |
import pysam
from readtagger.pysamtools_view import view
INPUT = 'tagged_dm6.bam'
def test_pysamtoolsview(datadir_copy, tmpdir): # noqa: D103
input_bam = str(datadir_copy[INPUT])
output_b
|
am = tmpdir.join('out.bam').strpath
region = '3R:81
|
21625-8121731'
view(input_bam=input_bam, output_bam=output_bam, region=region)
assert len(pysam.AlignmentFile(output_bam).header['SQ']) == 1
|
CFDEMproject/LIGGGHTS-PUBLIC
|
python/examples/plot.py
|
Python
|
gpl-2.0
| 1,885 | 0.015385 |
#!/usr/bin/env python
|
-i
# preceeding line should have path for Python on your machine
# plot.py
# Purpose: plot Temp of running LIGGGHTS simulation via GnuPlot in Pizza.py
# Syntax: plot.py in.liggghts Nfreq Nsteps compute-ID
# in.liggghts = LIGGGHTS input script
# Nfreq = plot data point every this many steps
# Nsteps = run for this many steps
# compute-ID = ID of compute that calculates temperature
# (or any
|
other scalar quantity)
import sys
sys.path.append("./pizza")
from gnu import gnu
# parse command line
argv = sys.argv
if len(argv) != 5:
print "Syntax: plot.py in.liggghts Nfreq Nsteps compute-ID"
sys.exit()
infile = sys.argv[1]
nfreq = int(sys.argv[2])
nsteps = int(sys.argv[3])
compute = sys.argv[4]
me = 0
# uncomment if running in parallel via Pypar
#import pypar
#me = pypar.rank()
#nprocs = pypar.size()
from liggghts import liggghts
lmp = liggghts()
# run infile all at once
# assumed to have no run command in it
lmp.file(infile)
lmp.command("thermo %d" % nfreq)
# initial 0-step run to generate initial 1-point plot
lmp.command("run 0 pre yes post no")
value = lmp.extract_compute(compute,0,0)
ntimestep = 0
xaxis = [ntimestep]
yaxis = [value]
# wrapper on GnuPlot via Pizza.py gnu tool
# just proc 0 handles plotting
if me == 0:
gn = gnu()
gn.plot(xaxis,yaxis)
gn.xrange(0,nsteps)
gn.title(compute,"Timestep","Temperature")
# run nfreq steps at a time w/out pre/post, query compute, refresh plot
while ntimestep < nsteps:
lmp.command("run %d pre no post no" % nfreq)
ntimestep += nfreq
value = lmp.extract_compute(compute,0,0)
xaxis.append(ntimestep)
yaxis.append(value)
if me == 0: gn.plot(xaxis,yaxis)
lmp.command("run 0 pre no post yes")
# uncomment if running in parallel via Pypar
#print "Proc %d out of %d procs has" % (me,nprocs), lmp
#pypar.finalize()
|
Mapotempo/mapotempo-qgis-plugin
|
urllib3/filepost.py
|
Python
|
gpl-2.0
| 2,256 | 0.000887 |
import codecs
from uuid import uuid4
from io import BytesIO
import six
from six import b
from .fields import RequestField
writer = codecs.lookup('utf-8')[3]
def choose_boundary():
"""
Our embarassingly-simple replacement for mimetools.choose_boundary.
"""
return uuid4().hex
def iter_field_objects(fields):
"""
Iterate over fields.
Supports list of (k, v) tuples and dicts, and lists
|
of
:class:`~urllib3.fields.RequestField`.
"""
if isinstance(fields, dict):
i = six.iteritems(fields)
else:
i = iter(fields)
for field in i:
if isinstance(field, RequestField):
yield field
else:
yield RequestField.from_tuples(*field)
def iter_fields(fields):
"""
.. deprecated:: 1.6
Iterate over fields.
The addition of :class:`~urllib3.fields.RequestField` makes this function
obsolete. Instead, use
|
:func:`iter_field_objects`, which returns
:class:`~urllib3.fields.RequestField` objects.
Supports list of (k, v) tuples and dicts.
"""
if isinstance(fields, dict):
return ((k, v) for k, v in six.iteritems(fields))
return ((k, v) for k, v in fields)
def encode_multipart_formdata(fields, boundary=None):
"""
Encode a dictionary of ``fields`` using the multipart/form-data MIME format.
:param fields:
Dictionary of fields or list of (key, :class:`~urllib3.fields.RequestField`).
:param boundary:
If not specified, then a random boundary will be generated using
:func:`mimetools.choose_boundary`.
"""
body = BytesIO()
if boundary is None:
boundary = choose_boundary()
for field in iter_field_objects(fields):
body.write(b('--%s\r\n' % (boundary)))
writer(body).write(field.render_headers())
data = field.data
if isinstance(data, int):
data = str(data) # Backwards compatibility
if isinstance(data, six.text_type):
writer(body).write(data)
else:
body.write(data)
body.write(b'\r\n')
body.write(b('--%s--\r\n' % (boundary)))
content_type = str('multipart/form-data; boundary=%s' % boundary)
return body.getvalue(), content_type
|
ONSdigital/eq-survey-runner
|
app/helpers/schema_helpers.py
|
Python
|
mit
| 4,296 | 0.002793 |
from functools import wraps
from uuid import uuid4
from app.globals import get_session_store
from app.utilities.schema import load_schema_from_session_data
def with_schema(function):
"""Adds the survey schema as the first argument to the function being wrapped.
Use on flask request handlers or methods called by flask request handlers.
May error unless there is a `current_user`, so should be used as follows e.g.
```python
@login_required
@with_schema
@full_routing_path_required
def get_block(routing_path, schema, *args):
...
```
"""
@wraps(function)
def wrapped_function(*args, **kwargs):
session_data = get_session_store().session_data
schema = load_schema_from_session_data(session_data)
return function(schema, *args, **kwargs)
return wrapped_function
def get_group_instance_id(schema, answer_store, location, answer_instance=0):
"""Return a group instance_id if required, or None if not"""
if not schema.location_requires_group_instance(location):
return None
dependent_drivers = schema.get_group_dependencies(location.group_id)
if dependent_drivers:
return _get_dependent_group_instance(schema, dependent_drivers, answer_store, location.group_instance)
existing_answers = []
if location.group_id in schema.get_group_dependencies_group_drivers() or \
location.block_id in schema.get_group_dependencies_group_drivers():
group_answer_ids = schema.get_answer_ids_for_group(location.group_id)
existing_answers = answer_store.filter(answer_ids=group_answer_ids, group_instance=location.group_instance)
if location.block_id in schema.get_group_dependencies_block_drivers():
block_answer_ids = schema.get_answer_ids_for_block(location.block_id)
existing_answers = answer_store.filter(answer_ids=block_answer_ids, answer_instance=answer_instance)
# If there are existing answers with a group_instance_id
existing_answers_with_group_instance_id = [answer for answer in existing_answers if answer.get('group_instance_id')]
if existing_answers_with_group_instance_id:
return existing_answers_with_group_instance_id[0]['group_instance_id']
return str(uuid4())
def _get_dependent_group_instance(schema, dependent_drivers, answer_store, group_instance):
group_instance_ids = []
for driver_id in dependent_drivers:
if driver_id in schema.get_group_dependencies_group_drivers():
if schema.get_group(driver_id):
driver_answer_ids = schema.get_answer_ids_for_group(driver_id)
group_instance_ids.extend(_get_group_instance_ids_for_group(answer_store, driver_answer_ids))
else:
driver_answer_ids = schema.get_answer_ids_for_block(driver_id)
group_instance_ids.extend(_get_group_instance_ids_for_group(answer_store, driver_answer_ids))
|
if driver_id in schema.get_group_dependencies_block_drivers():
driver_answer_ids = schema.get_answer_ids_for_block(driver_id)
|
group_instance_ids.extend(_get_group_instance_ids_for_block(answer_store, driver_answer_ids))
return group_instance_ids[group_instance]
def _get_group_instance_ids_for_group(answer_store, group_answer_ids):
group_instance_ids = []
group_instances = 0
for answer in list(answer_store.filter(answer_ids=group_answer_ids)):
group_instances = max(group_instances, answer['group_instance'])
for i in range(group_instances + 1):
answers = list(answer_store.filter(answer_ids=group_answer_ids, group_instance=i))
if answers:
group_instance_ids.append(answers[0]['group_instance_id'])
return group_instance_ids
def _get_group_instance_ids_for_block(answer_store, block_answer_ids):
group_instance_ids = []
answer_instances = 0
for answer in list(answer_store.filter(answer_ids=block_answer_ids)):
answer_instances = max(answer_instances, answer['answer_instance'])
for i in range(answer_instances + 1):
answers = list(answer_store.filter(answer_ids=block_answer_ids, answer_instance=i))
if answers:
group_instance_ids.append(answers[0]['group_instance_id'])
return group_instance_ids
|
moertle/_.py
|
_/web/auth/__init__.py
|
Python
|
mit
| 318 | 0.003145 |
# login methods are dynamically imported if auth is enabled
import logging
from .logout
|
import Logout
import tornado.web
import _
@_.c
|
omponents.Register('auth')
class Authentication(tornado.web.RequestHandler):
@classmethod
def _pyConfig(cls, config):
cls.URL = config.pop('login_page', '/login')
|
kastnerkyle/crikey
|
ishaan_model/ishaan_baseline.py
|
Python
|
bsd-3-clause
| 13,070 | 0.000995 |
from __future__ import print_function
import numpy as np
import theano
from theano import tensor
from theano.sandbox.rng_mrg import MRG_RandomStreams as RandomStreams
from scipy.io import wavfile
import os
import sys
from kdllib import audio_file_iterator
from kdllib import numpy_one_hot, apply_quantize_preproc
from kdllib import numpy_softmax, numpy_sample_softmax
from kdllib import param, pa
|
ram_search, print_param_info
from kdllib import LearnedInitHidden
from kdllib import Linear
from kdllib import Embedding
from kdllib import Igor
from kdllib
|
import load_checkpoint, theano_one_hot, concatenate
from kdllib import fetch_fruitspeech, list_iterator
from kdllib import np_zeros, GRU, GRUFork
from kdllib import make_weights, make_biases, relu, run_loop
from kdllib import as_shared, adam, gradient_clipping
from kdllib import get_values_from_function, set_shared_variables_in_function
from kdllib import soundsc, categorical_crossentropy
from kdllib import relu, softmax, sample_softmax
if __name__ == "__main__":
import argparse
fs = 16000
minibatch_size = 128
cut_len = 64
n_epochs = 1000 # Used way at the bottom in the training loop!
checkpoint_every_n_epochs = 1
checkpoint_every_n_updates = 1000
checkpoint_every_n_seconds = 60 * 60
random_state = np.random.RandomState(1999)
filepath = "/Tmp/kastner/blizzard_wav_files/*flac"
train_itr = audio_file_iterator(filepath, minibatch_size=minibatch_size,
stop_index=.9, preprocess="quantize")
valid_itr = audio_file_iterator(filepath, minibatch_size=minibatch_size,
start_index=.9, preprocess="quantize")
X_mb, X_mb_mask = next(train_itr)
train_itr.reset()
input_dim = 256
n_embed = 256
n_hid = 512
n_bins = 256
desc = "Speech generation"
parser = argparse.ArgumentParser(description=desc)
parser.add_argument('-s', '--sample',
help='Sample from a checkpoint file',
default=None,
required=False)
def restricted_int(x):
if x is None:
# None makes it "auto" sample
return x
x = int(x)
if x < 1:
raise argparse.ArgumentTypeError("%r not range [1, inf]" % (x,))
return x
parser.add_argument('-sl', '--sample_length',
help='Number of steps to sample, default is automatic',
type=restricted_int,
default=None,
required=False)
def restricted_float(x):
if x is None:
# None makes it "auto" temperature
return x
x = float(x)
if x <= 0:
raise argparse.ArgumentTypeError("%r not range (0, inf]" % (x,))
return x
parser.add_argument('-t', '--temperature',
help='Sampling temperature for softmax',
type=restricted_float,
default=None,
required=False)
parser.add_argument('-c', '--continue', dest="cont",
help='Continue training from another saved model',
default=None,
required=False)
args = parser.parse_args()
if args.sample is not None:
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
checkpoint_file = args.sample
if not os.path.exists(checkpoint_file):
raise ValueError("Checkpoint file path %s" % checkpoint_file,
" does not exist!")
print(checkpoint_file)
checkpoint_dict = load_checkpoint(checkpoint_file)
X_mb, X_mb_mask = next(train_itr)
train_itr.reset()
prev_h1, prev_h2, prev_h3 = [np_zeros((minibatch_size, n_hid))
for i in range(3)]
sample_function = checkpoint_dict["sample_function"]
if args.temperature is None:
args.temperature = 1.
if args.sample_length is None:
raise ValueError("NYI - use -sl or --sample_length ")
else:
fixed_steps = args.sample_length
temperature = args.temperature
completed = []
# 0 is in the middle
# CANNOT BE 1 timestep - will get floating point exception!
# 2 may still be buggy because X_sym gets sliced and scan gets mad with 1 timestep usually...
init_x = 127 + np_zeros((3, minibatch_size, 1)).astype(theano.config.floatX)
for i in range(fixed_steps):
if i % 100 == 0:
print("Sampling step %i" % i)
rvals = sample_function(init_x, prev_h1, prev_h2,
prev_h3)
sampled, h1_s, h2_s, h3_s = rvals
pred_s = numpy_softmax(sampled, temperature=temperature)
# debug=True gives argmax
# use 0 since it is a moving window
choice = numpy_sample_softmax(pred_s[0], random_state)
choice = choice[None]
completed.append(choice)
# use 3 since scan is throwing exceptions
init_x = np.concatenate((choice[..., None], choice[..., None], choice[..., None]),
axis=0)
init_x = init_x.astype(theano.config.floatX)
# use next step
prev_h1 = h1_s[0]
prev_h2 = h2_s[0]
prev_h3 = h3_s[0]
print("Completed sampling after %i steps" % fixed_steps)
# mb, length
completed = np.array(completed)[:, 0, :]
completed = completed.transpose(1, 0)
# all samples would be range(len(completed))
for i in range(10):
ex = completed[i].ravel()
s = "gen_%i.wav" % (i)
"""
ex = ex.astype("float32")
ex -= ex.min()
ex /= ex.max()
ex -= 0.5
ex *= 0.95
wavfile.write(s, fs, ex)
"""
wavfile.write(s, fs, soundsc(ex))
print("Sampling complete, exiting...")
sys.exit()
else:
print("No plotting arguments, starting training mode!")
X_sym = tensor.tensor3("X_sym")
X_sym.tag.test_value = X_mb[:cut_len]
X_mask_sym = tensor.matrix("X_mask_sym")
X_mask_sym.tag.test_value = X_mb_mask[:cut_len]
init_h1_i = tensor.matrix("init_h1")
init_h1_i.tag.test_value = np_zeros((minibatch_size, n_hid))
init_h2_i = tensor.matrix("init_h2")
init_h2_i.tag.test_value = np_zeros((minibatch_size, n_hid))
init_h3_i = tensor.matrix("init_h3")
init_h3_i.tag.test_value = np_zeros((minibatch_size, n_hid))
init_h1, init_h2, init_h3 = LearnedInitHidden(
[init_h1_i, init_h2_i, init_h3_i], 3 * [(minibatch_size, n_hid)])
inpt = X_sym[:-1]
target = X_sym[1:]
mask = X_mask_sym[:-1]
embed_dim = 256
embed1 = Embedding(inpt, 256, embed_dim, random_state)
in_h1, ingate_h1 = GRUFork([embed1], [embed_dim], n_hid, random_state)
in_h2, ingate_h2 = GRUFork([embed1], [embed_dim], n_hid, random_state)
in_h3, ingate_h3 = GRUFork([embed1], [embed_dim], n_hid, random_state)
def step(in_h1_t, ingate_h1_t,
in_h2_t, ingate_h2_t,
in_h3_t, ingate_h3_t,
h1_tm1, h2_tm1, h3_tm1):
h1_t = GRU(in_h1_t, ingate_h1_t, h1_tm1, n_hid, n_hid, random_state)
h1_h2_t, h1gate_h2_t = GRUFork([h1_t], [n_hid], n_hid, random_state)
h1_h3_t, h1gate_h3_t = GRUFork([h1_t], [n_hid], n_hid, random_state)
h2_t = GRU(h1_h2_t + in_h2_t, h1gate_h2_t + ingate_h2_t, h2_tm1,
n_hid, n_hid, random_state)
h2_h3_t, h2gate_h3_t = GRUFork([h2_t], [n_hid], n_hid, random_state)
h3_t = GRU(h2_h3_t + in_h3_t + h1_h3_t,
h2gate_h3_t + ingate_h3_t + h1gate_h3_t, h3_tm1,
n_hid, n_hid, random_state)
return h1_t, h2_t, h3_t
|
bally12345/enigma2
|
lib/python/Plugins/SystemPlugins/TempFanControl/plugin.py
|
Python
|
gpl-2.0
| 10,581 | 0.019941 |
from Components.ActionMap import ActionMap
from Components.Sensors import sensors
from Components.Sources.Sensor import SensorSource
from Components.Sources.StaticText import StaticText
from Components.ConfigList import ConfigListScreen
from Components.config import getConfigListEntry
from Screens.Screen import Screen
from Plugins.Plugin import PluginDescriptor
from Components.FanControl import fancontrol
class TempFanControl(Screen, ConfigListScreen):
skin = """
<screen position="center,center" size="570,420" title="Fan Control" >
<ePixmap pixmap="skin_default/buttons/red.png" position="0,0" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/green.png" position="140,0" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/yellow.png" position="280,0" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/blue.png" position="420,0" size="140,40" alphatest="on" />
<widget source="red" render="Label" position="0,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#9f1313" transparent="1" />
<widget source="green" render="Label" position="140,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#1f771f" transparent="1" />
<widget source="yellow" render="Label" position="280,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#a08500" transparent="1" />
<widget source="blue" render="Label" position="420,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#18188b" transparent="1" />
<widget name="config" position="10,50" size="550,120" scrollbarMode="showOnDemand" />
<widget source="SensorTempText0" render="Label" position="10,150" zPosition="1" size="90,40" font="Regular;20" halign="left" valign="top" backgroundColor="#9f1313" transparent="1" />
<widget source="SensorTemp0" render="Label" position="100,150" zPosition="1" size="100,20" font="Regular;19" halign="right">
<convert type="SensorToText"></convert>
</widget>
<widget source="SensorTempText1" render="Label" position="10,170" zPosition="1" size="90,40" font="Regular;20" halign="left" valign="top" backgroundColor="#9f1313" transparent="1" />
<widget source="SensorTemp1" render="Label" position="100,170" zPosition="1" size="100,20" font="Regular;19" halign="right">
<convert type="SensorToText"></convert>
</widget>
<widget source="SensorTempText2" render="Label" position="10,190" zPosition="1" size="90,40" font="Regular;20" halign="left" valign="top" backgroundColor="#9f1313" transparent="1" />
<widget source="SensorTemp2" render="Label" position="100,190" zPosition="1" size="100,20" font="Regular;19" halign="right">
<convert type="SensorToText"></convert>
</widget>
<widget source="SensorTempText3" render="Label" position="10,210" zPosition="1" size="90,40" font="Regular;20" halign="left" valign="top" backgroundColor="#9f1313" transparent="1" />
<widget source="SensorTemp3" render="Label" position="100,210" zPosition="1" size="100,20" font="Regular;19" halign="right">
<convert type="SensorToText"></convert>
</widget>
<widget source="SensorTempText4" render="Label" position="10,230" zPosition="1" size="90,40" font="Regular;20" halign="left" valign="top" backgroundColor="#9f1313" transparent="1" />
<widget source="SensorTemp4" render="Label" position="100,230" zPosition="1" size="100,20" font="Regular;19" halign="right">
<convert type="SensorToText"></convert>
</widget>
<widget source="SensorTempText5" render="Label" position="10,250" zPosition="1" size="90,40" font="Regular;20" halign="left" valign="top" backgroundColor="#9f1313" transparent="1" />
<widget source="SensorTemp5" render="Label" position="100,250" zPosition="1" size="100,20" font="Regular;19" halign="right">
<convert type="SensorToText"></convert>
</widget>
<widget source="SensorTempText6" render="Label" position="10,270" zPosition="1" size="90,40" font="Regular;20" halign="left" valign="top" backgroundColor="#9f1313" transparent="1" />
<widget source="SensorTemp6" render="Label" position="100,270" zPosition="1" size="100,20" font="Regular;19" halign="right">
<convert type="SensorToText"></convert>
</widget>
<widget source="SensorTempText7" render="Label" position="10,290" zPosition="1" size="90,40" font="Regular;20" halign="left" valign="top" backgroundColor="#9f1313" transparent="1" />
<widget source="SensorTemp7" render="Label" position="100,290" zPosition="1" size="100,20" font="Regular;19" halign="right">
<convert type="SensorToText"></convert>
</widget>
<widget source="SensorFanText0" render="Label" position="290,150" zPosition="1" size="90,40" font="Regular;20" halign="left" valign="top" backgroundColor="#9f1313" transparent="1" />
<widget source="SensorFan0" render="Label" position="380,150" zPosition="1" size="150,20" font="Regular;19" halign="right">
<convert type="SensorToText"></convert>
</widget>
<widget source="SensorFanText1" render="Label" position="290,170" zPosition="1" size="90,40" font="Regular;20" halign="left" valign="top" backgroundColor="#9f1313" transparent="1" />
<widget source="SensorFan1" render="Label" position="380,170" zPosition="1" size="150,20" font="Regular;19" halign="right">
<convert type="SensorToText"></convert>
</widget>
<widget source="SensorFanText2" render="Label" position="290,190" zPosition="1" size="90,40" font="Regular;20" halign="left" valign="top" backgroundColor="#9f1313" transparent="1" />
<widget source="SensorFan2" render="Label" position="380,190" zPosition="1" size="150,20" font="Regular;19" halign="right">
<convert type="SensorToText"></convert>
</widget>
<widget source="SensorFanText3" render="Label" position="290,210" zPosition="1" size="90,40" font="Regular;20" halign="left" valign="top" backgroundColor="#9f1313" transparent="1" />
<widget source="SensorFan3" render="Label" position="380,210" zPosition="1" size="150,20" font="Regular;19" halign="right">
<convert type="SensorToText"></convert>
</widget>
<widget sou
|
rce="SensorFanText4" render="Label" position="290,230" zPosition="1" size="90,40" font="Regular;20" halign="left" valign="top" backgroundColor="#9f1313" transparent="1" />
<widget sour
|
ce="SensorFan4" render="Label" position="380,230" zPosition="1" size="150,20" font="Regular;19" halign="right">
<convert type="SensorToText"></convert>
</widget>
<widget source="SensorFanText5" render="Label" position="290,250" zPosition="1" size="90,40" font="Regular;20" halign="left" valign="top" backgroundColor="#9f1313" transparent="1" />
<widget source="SensorFan5" render="Label" position="380,250" zPosition="1" size="150,20" font="Regular;19" halign="right">
<convert type="SensorToText"></convert>
</widget>
<widget source="SensorFanText6" render="Label" position="290,270" zPosition="1" size="90,40" font="Regular;20" halign="left" valign="top" backgroundColor="#9f1313" transparent="1" />
<widget source="SensorFan6" render="Label" position="380,270" zPosition="1" size="150,20" font="Regular;19" halign="right">
<convert type="SensorToText"></convert>
</widget>
<widget source="SensorFanText7" render="Label" position="290,290" zPosition="1" size="90,40" font="Regular;20" halign="left" valign="top" backgroundColor="#9f1313" transparent="1" />
<widget source="SensorFan7" render="Label" position="380,290" zPosition="1" size="150,20" font="Regular;19" halign="right">
<convert type="SensorToText"></convert>
</widget>
</screen>"""
def __init__(self, session, args = None):
Screen.__init__(self, session)
templist = sensors.getSensorsList(sensors.TYPE_TEMPERATURE)
tempcount = len(templist)
fanlist = sensors.getSensorsList(sensors.TYPE_FAN_RPM)
fancount = len(fanlist)
self["red"] = StaticText(_("Cancel"))
self["green"] = StaticText(_("OK"))
self["yellow"] = StaticText("")
self["blue"] = StaticText("")
for count in range(8):
if count < tempcount:
id = templist[count]
self[
|
KellyChan/python-examples
|
python/aldebaran/hana/hana/motion/cartesian/motion_hulaHoop.py
|
Python
|
mit
| 2,824 | 0.009561 |
# -*- encoding: UTF-8 -*-
import sys
import motion
import almath
from naoqi import ALProxy
def StiffnessOn(proxy):
# We use the "Body" name to signify the collection of all joints
pNames = "Body"
pStiffnessLists = 1.0
pTimeLists = 1.0
proxy.stiffnessInterpolation(pNames, pStiffnessLists, pTimeLists)
def main(robotIP):
'''
Example showing a Hula Hoop Motion
with the NAO cartesian control of torso
'''
# Init proxies.
try:
motionProxy = ALProxy("ALMotion", robotIP, 9559)
except Exception, e:
print "Could not create proxy to ALMotion"
print "Error was: ", e
try:
postureProxy = ALProxy("ALRobotPosture", robotIP, 9559)
except Exception, e:
print "Could not create proxy to ALRobotPosture"
print "Error was: ", e
# Set NAO in Stiffness On
StiffnessOn(motionProxy)
# Send NAO to Pose Init
postureProxy.goToPosture("StandInit", 0.5)
# Define the changes relative to the current position
dx = 0.07 # translation axis X (meter)
dy = 0.07 # translation axis Y (meter)
dwx = 0.15 # rotation axis X (rad)
dwy = 0.15 # rotation axis Y (rad)
# Define a path of two hula hoop loops
path = [ [+dx, 0.0, 0.0, 0.0, -dwy, 0.0], # point 01 : forward / bend backward
[0.0, -dy, 0.0, -
|
dwx, 0.0, 0.0], # point 02 : right / bend left
[-dx, 0.0, 0.0, 0.0, dwy, 0.0], # point 03 : backward / bend forward
[0.0, +dy, 0.0, dwx, 0.0, 0.0], # point 04 : left / bend right
[+dx, 0.0, 0.0, 0.0, -dwy, 0.0], # point 01 : forward / bend backward
[0.0, -dy, 0.0, -dwx, 0.0, 0.0], #
|
point 02 : right / bend left
[-dx, 0.0, 0.0, 0.0, dwy, 0.0], # point 03 : backward / bend forward
[0.0, +dy, 0.0, dwx, 0.0, 0.0], # point 04 : left / bend right
[+dx, 0.0, 0.0, 0.0, -dwy, 0.0], # point 05 : forward / bend backward
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0] ] # point 06 : Back to init pose
timeOneMove = 0.4 #seconds
times = []
for i in range(len(path)):
times.append( (i+1)*timeOneMove )
# call the cartesian control API
effector = "Torso"
space = motion.FRAME_ROBOT
axisMask = almath.AXIS_MASK_ALL
isAbsolute = False
motionProxy.positionInterpolation(effector, space, path,
axisMask, times, isAbsolute)
if __name__ == "__main__":
robotIp = "127.0.0.1"
if len(sys.argv) <= 1:
print "Usage python motion_hulaHoop.py robotIP (optional default: 127.0.0.1)"
else:
robotIp = sys.argv[1]
main(robotIp)
|
saurabhkumar1989/programming_question_python
|
my_question/all-valid-bracket-permutation.py
|
Python
|
apache-2.0
| 545 | 0.029358 |
'''
Input : n=1
Output: {}
Input : n=2
Output:
{}{}
{{}}
https://www.geeksforgeeks.org/print-all-
|
combinations-of-balanced-parentheses/
'''
def printParenthesis(string, openP, closeP):
if(openP==0 and closeP==0):
# all opening and closing are done
print string
else:
if(openP>closeP):
return
if(closeP>0):
printParenthesis(string+'}',openP,closeP-1)
if(openP>0):
printParenthesis(string+'{',openP-1,closeP)
n = 3
printP
|
arenthesis("", n,n)
|
MinnowBoard/minnow-maker
|
setup.py
|
Python
|
mit
| 1,963 | 0.028018 |
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
from distutils import core
from distutils.command.install import install
import sys, os, subprocess
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
def git(*args):
return subprocess.check_call(['git'] + list(args))
class low_speed_spidev(install):
def run(self):
spidev_directory = subprocess.Popen(["pwd"],stdout=subprocess.PIPE)
spidev_directory, err = spidev_directory.communicate()
spidev_directory = spidev_directory.rstrip() + "/low_speed_spidev"
os.chdir("/usr/src/kernel/")
subprocess.call(["make", "scripts"])
subprocess.call("make")
os.chdir(spidev_direct
|
ory)
subprocess.call(["insmod", "low-speed-spidev.ko"])
os.chdir("..")
class install_all(install):
def run(self):
current_directory = subprocess.Popen(["pwd"],stdout=subprocess.PIPE)
|
current_directory, err = current_directory.communicate()
subprocess.call(["sh","depends.sh"])
subprocess.call(["pip install -r requirements.txt --no-clean"], shell=True)
install.run(self)
setup(name = 'Maker project package',
version = '0.4',
author = 'Adafruit Industries, Intel Corporation',
author_email = 'tdicola@adafruit.com, evan.steele@intel.com',
description = 'Library to provide a cross-platform GPIO interface on the Raspberry Pi and Beaglebone Black using the RPi.GPIO and Adafruit_BBIO libraries. Python code to run the hardware needed for the Minnowboard maker projects found at wiki.minnowboard.org',
license = 'MIT',
packages = ['pyDrivers' , 'Adafruit_Python_GPIO/Adafruit_GPIO'],
long_description = read('README.md'),
cmdclass={'low_speed_spidev':low_speed_spidev, 'install_all':install_all},
install_requires=['PIL', 'numpy'],
)
|
yugangw-msft/azure-cli
|
src/azure-cli/azure/cli/command_modules/eventhubs/tests/latest/test_eventhub_commands_namespace_premium_test.py
|
Python
|
mit
| 4,228 | 0.004257 |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# AZURE CLI EventHub - NAMESPACE TEST DEFINITIONS
import time
from azure.cli.testsdk import (ScenarioTest, ResourceGroupPreparer, KeyVaultPreparer)
# pylint: disable=line-too-long
# pylint: disable=too-many-lines
class EHNamespaceBYOKCURDScenarioTest(ScenarioTest):
from azure.cli.testsdk.scenario_tests import AllowLargeResponse
@AllowLargeResponse()
@ResourceGroupPreparer(name_prefix='cli_test_eh_namespace')
@KeyVaultPreparer(name_prefix='cli', name_len=15, additional_params='--enable-soft-delete --enable-purge-protection')
def test_eh_namespace_premium(self, resource_group):
self.kwargs.update({
'loc': 'eastus',
'rg': resource_group,
'namespacename': self.create_random_name(prefix='eventhubs-nscli', length=20),
'namespacename1': self.create_random_name(prefix='eventhubs-nscli', length=20),
'namespacename2': self.create_random_name(prefix='eventhubs-nscli', length=20),
'namespacenamekafka': self.create_random_name(prefix='eventhubs-nscli1', length=20),
'tags': {'tag1=value1'},
'tags2': {'tag2=value2'},
'sku': 'Standard',
'skupremium': 'Premium',
'authoname': self.create_random_name(prefix='cliAutho', length=20),
'defaultauthorizationrule': 'RootManageSharedAccessKey',
'accessrights': 'Send',
'accessrights1': 'Listen',
'primary': 'PrimaryKey',
'secondary': 'SecondaryKey',
'istrue': 'True',
'isfalse': 'False',
'enableidentity': 'True',
'maximumthroughputunits': 40,
'maximumthroughputunits_update': 5
})
kv_name = self.kwargs['kv']
key_name = self.create_random_name(prefix='cli', length=15)
key_uri = "https://{}.vault.azure.net/".format(kv_name)
self.kwargs.update({
'kv_name': kv_name,
'key_name': key_name,
'key_uri': key_uri
})
# Check for the NameSpace name Availability
self.cmd('eventhubs namespace exists --name {namespacename}', checks=[self.check('nameAvailable', True)])
# Create Namespace
self.cmd('eventhubs namespace create --resource-group {rg} --name {namespacename} --location {loc} --tags {tags}'
' --sku {sku} --maximum-throughput-units {maximumthroughputunits} --disable-local-auth {istrue} --enable-auto-inflate {istrue}',
checks=[self.check('maximumThroughputUnits', '{maximumthroughputunits}'),
self.check('disableLocalAuth', '{istrue}')])
self.kwargs.update({
'maximumthroughputunits': 35})
# Update Namespace
self.cmd('eventhubs namespace update --resource-group {rg} --name {namespacename} '
'--tags {tags2} --maximum-throughput-units {maximumthroughputu
|
nits}',
checks=[self.check('maximumThroughputUnits', '{maximumthroughputunits}')])
self.kwargs.update({
'maximumthroughputunits': 16})
# Create Namespace - premium
self.cmd(
'eventhubs namespace create --resource-group {rg} --name {namespacename1} --location {loc} --tags {tags}'
' --sku {
|
skupremium} --disable-local-auth {isfalse}',
checks=[self.check('disableLocalAuth', '{isfalse}'),
self.check('sku.name', '{skupremium}')])
# Update Namespace
self.cmd('eventhubs namespace update --resource-group {rg} --name {namespacename1} --disable-local-auth {istrue} '
'--tags {tags2}')
# Delete Namespace list by ResourceGroup
self.cmd('eventhubs namespace delete --resource-group {rg} --name {namespacename}')
self.cmd('eventhubs namespace delete --resource-group {rg} --name {namespacename1}')
|
paninetworks/neutron
|
neutron/db/db_base_plugin_common.py
|
Python
|
apache-2.0
| 12,074 | 0.000166 |
# Copyright (c) 2015 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
from oslo_config import cfg
from oslo_log import log as logging
from sqlalchemy.orm import exc
from neutron.api.v2 import attributes
from neutron.common import constants
from neutron.common import exceptions as n_exc
from neutron.common import utils
from neutron.db import common_db_mixin
from neutron.db import models_v2
LOG = logging.getLogger(__name__)
class DbBasePluginCommon(common_db_mixin.CommonDbMixin):
"""Stores getters and helper methods for db_base_plugin_v2
All private getters and simple helpers like _make_*_dict were moved from
db_base_plugin_v2.
More complicated logic and public methods left in db_base_plugin_v2.
Main purpose of this class is to make getters accessible for Ipam
backends.
"""
@staticmethod
def _generate_mac():
return utils.get_random_mac(cfg.CONF.base_mac.split(':'))
@staticmethod
def _delete_ip_allocation(context, network_id, subnet_id, ip_address):
# Delete the IP address from the IPAllocate table
LOG.debug("Delete allocated IP %(ip_address)s "
"(%(network_id)s/%(subnet_id)s)",
{'ip_address': ip_address,
'network_id': network_id,
'subnet_id': subnet_id})
context.session.query(models_v2.IPAllocation).filter_by(
network_id=network_id,
ip_address=ip_address,
subnet_id=subnet_id).delete()
@staticmethod
def _store_ip_allocation(context, ip_address, network_id, subnet_id,
port_id):
LOG.debug("Allocated IP %(ip_address)s "
"(%(network_id)s/%(subnet_id)s/%(port_id)s)",
{'ip_address': ip_address,
'network_id': network_id,
'subnet_id': subnet_id,
'port_id': port_id})
allocated = models_v2.IPAllocation(
network_id=network_id,
port_id=port_id,
ip_address=ip_address,
subnet_id=subnet_id
)
context.session.add(allocated)
def _make_subnet_dict(self, subnet, fields=None, context=None):
res = {'id': subnet['id'],
'name': subnet['name'],
'tenant_id': subnet['tenant_id'],
'network_id': subnet['network_id'],
'ip_version': subnet['ip_version'],
'cidr': subnet['cidr'],
'subnetpool_id': subnet.get('subnetpool_id'),
'allocation_pools': [{'start': pool['first_ip'],
'end': pool['last_ip']}
for pool in subnet['allocation_pools']],
'gateway_ip': subnet['gateway_ip'],
'enable_dhcp': subnet['e
|
nable_dhcp'],
'ipv6_ra_mode': subnet['ipv6_ra_mode'],
'ipv6_address_mode': subnet['ipv6_address_mode'],
'dns_nameservers': [dns['address']
for dns in subnet['dns_nameservers']],
'host_routes':
|
[{'destination': route['destination'],
'nexthop': route['nexthop']}
for route in subnet['routes']],
}
# The shared attribute for a subnet is the same as its parent network
res['shared'] = self._make_network_dict(subnet.networks,
context=context)['shared']
# Call auxiliary extend functions, if any
self._apply_dict_extend_functions(attributes.SUBNETS, res, subnet)
return self._fields(res, fields)
def _make_subnetpool_dict(self, subnetpool, fields=None):
default_prefixlen = str(subnetpool['default_prefixlen'])
min_prefixlen = str(subnetpool['min_prefixlen'])
max_prefixlen = str(subnetpool['max_prefixlen'])
res = {'id': subnetpool['id'],
'name': subnetpool['name'],
'tenant_id': subnetpool['tenant_id'],
'default_prefixlen': default_prefixlen,
'min_prefixlen': min_prefixlen,
'max_prefixlen': max_prefixlen,
'shared': subnetpool['shared'],
'prefixes': [prefix['cidr']
for prefix in subnetpool['prefixes']],
'ip_version': subnetpool['ip_version'],
'default_quota': subnetpool['default_quota']}
return self._fields(res, fields)
def _make_port_dict(self, port, fields=None,
process_extensions=True):
res = {"id": port["id"],
'name': port['name'],
"network_id": port["network_id"],
'tenant_id': port['tenant_id'],
"mac_address": port["mac_address"],
"admin_state_up": port["admin_state_up"],
"status": port["status"],
"fixed_ips": [{'subnet_id': ip["subnet_id"],
'ip_address': ip["ip_address"]}
for ip in port["fixed_ips"]],
"device_id": port["device_id"],
"device_owner": port["device_owner"]}
# Call auxiliary extend functions, if any
if process_extensions:
self._apply_dict_extend_functions(
attributes.PORTS, res, port)
return self._fields(res, fields)
def _get_ipam_subnetpool_driver(self, context, subnetpool=None):
if cfg.CONF.ipam_driver:
return ipam_base.Pool.get_instance(subnetpool, context)
else:
return subnet_alloc.SubnetAllocator(subnetpool, context)
def _get_network(self, context, id):
try:
network = self._get_by_id(context, models_v2.Network, id)
except exc.NoResultFound:
raise n_exc.NetworkNotFound(net_id=id)
return network
def _get_subnet(self, context, id):
try:
subnet = self._get_by_id(context, models_v2.Subnet, id)
except exc.NoResultFound:
raise n_exc.SubnetNotFound(subnet_id=id)
return subnet
def _get_subnetpool(self, context, id):
try:
return self._get_by_id(context, models_v2.SubnetPool, id)
except exc.NoResultFound:
raise n_exc.SubnetPoolNotFound(subnetpool_id=id)
def _get_all_subnetpools(self, context):
# NOTE(tidwellr): see note in _get_all_subnets()
return context.session.query(models_v2.SubnetPool).all()
def _get_port(self, context, id):
try:
port = self._get_by_id(context, models_v2.Port, id)
except exc.NoResultFound:
raise n_exc.PortNotFound(port_id=id)
return port
def _get_dns_by_subnet(self, context, subnet_id):
dns_qry = context.session.query(models_v2.DNSNameServer)
return dns_qry.filter_by(subnet_id=subnet_id).all()
def _get_route_by_subnet(self, context, subnet_id):
route_qry = context.session.query(models_v2.SubnetRoute)
return route_qry.filter_by(subnet_id=subnet_id).all()
def _get_router_gw_ports_by_network(self, context, network_id):
port_qry = context.session.query(models_v2.Port)
return port_qry.filter_by(network_id=network_id,
device_owner=constants.DEVICE_OWNER_ROUTER_GW).all()
def _get_subnets_by_network(self, context, network_id):
subnet_qry = context.session.query(models_v2.Subnet)
return subnet_qry.filter_by(network_id=network_id).all()
def _get_subnets_by_subnetpo
|
santiagolopezg/MODS_ConvNet
|
test_lillabcrossval_network.py
|
Python
|
mit
| 4,398 | 0.033197 |
import keras
from keras.optimizers import SGD, adadelta, rmsprop, adam
from keras.preprocessing.image import ImageDataGenerator
from keras.utils import np_utils
from keras.metrics import matthews_correlation, precision, recall
import keras.backend as K
import cPickle
import numpy as np
import getpass
username = getpass.getuser()
from little_foo3 import foo
def sens(y_true, y_pred):
y_pred_pos = K.round(K.clip(y_pred, 0, 1))
y_pred_neg = 1 - y_pred_pos
y_pos = K.round(K.clip(y_true, 0, 1))
y_neg = 1 - y_pos
tp = K.sum(y_pos * y_pred_pos)
tn = K.sum(y_neg * y_pred_neg)
fp = K.sum(y_neg * y_pred_pos)
fn = K.sum(y_pos * y_pred_neg)
se = tp / (tp + fn)
return se
def spec(y_true, y_pred):
y_pred_pos = K.round(K.clip(y_pred, 0, 1))
y_pred_neg = 1 - y_pred_pos
y_pos = K.round(K.clip(y_true, 0, 1))
y_neg = 1 - y_pos
tp = K.sum(y_pos * y_pred_pos)
tn = K.sum(y_neg * y_pred_neg)
fp = K.sum(y_neg * y_pred_pos)
fn = K.sum(y_pos * y_pred_neg)
sp = tn / (fp + tn)
return sp
def get_weights(n_dataset, username):
weights='best_weights_v5_labcrossval_{0}_train_3_{1}.h5'.format(i, username)
model = foo()
model.load_weights(weights)
print ('weights loaded')
return model
def get_data(n_dataset):
f = file('MODS_224_224_{0}_test_3.pkl'.format(n_dataset),'rb')
data = cPickle.load(f)
f.close()
validation_data = data[0]
tr
|
aining_data = data[1]
t_data = training_data[0]
t_label = training_data[1]
test_data = validation_data[0]
test_label = validation_data[1]
t_data = np.array(t_data)
t_label = np.array(t_label)
test_data = np.array(test_data)
test_label = np.array(test_label)
t_data = t_data.reshape(t_data.shape[0]
|
, 1, 224, 224)
test_data = test_data.reshape(test_data.shape[0], 1, 224, 224)
#less precision means less memory needed: 64 -> 32 (half the memory used)
t_data = t_data.astype('float32')
test_data = test_data.astype('float32')
return (t_data, t_label), (test_data, test_label)
def test_net(i):
model = get_weights(i, username)
print 'using weights from net trained on dataset {0}'.format(i)
history = LossAccHistory()
(X_train, y_train), (X_test, y_test) = get_data(i)
Y_test = np_utils.to_categorical(y_test, nb_classes)
X_test /= 255
print(X_test.shape[0], 'test samples')
model.compile(loss='binary_crossentropy',
optimizer= rmsprop(lr=0.001), #adadelta
metrics=['accuracy', 'matthews_correlation', 'precision', 'recall', sens, spec])
score = model.evaluate(X_test, Y_test, verbose=1)
print (model.metrics_names, score)
if (len(cvscores[0])==0): #if metric names haven't been saved, do so
cvscores[0].append(model.metrics_names)
else:
counter = 1
for k in score: #for each test metric, append it to the cvscores list
cvscores[counter].append(k)
counter +=1
model.reset_states()
def cv_calc():
#calculate mean and stdev for each metric, and append them to test_metrics file
test_metrics.append(cvscores[0])
other_counter = 0
for metric in cvscores[1:]:
v = 'test {0}: {1:.4f} +/- {2:.4f}%'.format(cvscores[0][0][other_counter], np.mean(metric), np.std(metric))
print v
test_metrics.append(v)
other_counter +=1
if other_counter == 7:
other_counter=0
return cvscores, test_metrics
def save_metrics(cvscores, test_metrics):
#save test metrics to txt file
file = open('MODS_test_metrics_labscrossval.txt', 'w')
for j in cvscores:
file.write('\n%s\n' % j)
for i in test_metrics:
file.write('\n%s\n' % i)
file.close()
print test_metrics
class LossAccHistory(keras.callbacks.Callback):
def on_train_begin(self, logs={}):
self.losses = []
self.accu = []
def on_batch_end(self, batch, logs={}):
self.losses.append(logs.get('loss'))
self.accu.append(logs.get('acc'))
nb_classes = 2
nb_epoch = 100
n_dataset = 5
dropout = 0.5
batch_size = 72
optimizer = 'rmsprop'
test_metrics = []
cvscores = [[],[],[],[],[],[], [], []]
#cvscores = [[metrics],[loss],[acc],[mcc],[precision],[recall], [sens], [spec]]
for i in xrange(n_dataset):
test_net(i)
cvscores, test_metrics = cv_calc()
print cvscores, test_metrics
save_metrics(cvscores, test_metrics)
|
dvt32/cpp-journey
|
Python/CodingBat/big_diff.py
|
Python
|
mit
| 220 | 0.027273 |
# http://coding
|
bat.com/prob/p184853
def big_diff(nums):
max_num = nums[0]
min_num = nums[0]
for num in nums:
max_num = max(num, max_num)
min_num = min(num, min_num)
return abs(max_num
|
- min_num)
|
Edraak/edx-ora2
|
openassessment/assessment/api/peer.py
|
Python
|
agpl-3.0
| 37,163 | 0.001453 |
"""Public interface managing the workflow for peer assessments.
The Peer Assessment Workflow API exposes all public actions required to complete
the workflow for a given submission.
"""
import logging
from django.db import DatabaseError, IntegrityError, transaction
from django.utils import timezone
from dogapi import dog_stats_api
from openassessment.assessment.errors import (PeerAssessmentInternalError, PeerAssessmentRequestError,
PeerAssessmentWorkflowError)
from openassessment.assessment.models import (Assessment, AssessmentFeedback, AssessmentPart, InvalidRubricSelection,
PeerWorkflow, PeerWorkflowItem)
from openassessment.assessment.serializers import (AssessmentFeedbackSerializer, InvalidRubric, RubricSerializer,
full_assessment_dict, rubric_from_dict, serialize_assessments)
from submissions import api as sub_api
logger = logging.getLogger("openassessment.assessment.api.peer")
PEER_TYPE = "PE"
def submitter_is_finished(submission_uuid, peer_requirements):
"""
Check whether the submitter has made the required number of assessments.
If the requirements dict is None (because we're being updated
asynchronously or when the workflow is first created),
then automatically return False.
Args:
submission_uuid (str): The UUID of the submission being tracked.
peer_requirements (dict): Dictionary with the key "must_grade" indicating
the required number of submissions the student must grade.
Returns:
bool
"""
if peer_requirements is None:
return False
try:
workflow = PeerWorkflow.objects.get(submission_uuid=submission_uuid)
if workflow.completed_at is not None:
return True
elif workflow.num_peers_graded() >= peer_requirements["must_grade"]:
workflow.completed_at = timezone.now()
workflow.save()
return True
return False
except PeerWorkflow.DoesNotExist:
return False
except KeyError:
raise PeerAssessmentRequestError(u'Requirements dict must contain "must_grade" key')
def assessment_is_finished(submission_uuid, peer_requirements):
"""
Check whether the submitter has received enough assessments
to get a score.
If the requirements dict is None (because we're being updated
asynchronously or when the workflow is first created),
then automatically return False.
Args:
submission_uuid (str): The UUID of the submission being tracked.
peer_requirements (dict): Dictionary with the key "must_be_graded_by"
indicating the required number of assessments the student
must receive to get a score.
Returns:
bool
"""
if not peer_requirements:
return False
workflow = PeerWorkflow.get_by_submission_uuid(submission_uuid)
if workflow is None:
return False
scored_items = workflow.graded_by.filter(
assessment__submission_uuid=submission_uuid,
assessment__score_type=PEER_TYPE
)
return scored_items.count() >= peer_requirements["must_be_graded_by"]
def on_start(submission_uuid):
"""Create a new peer workflow for a student item and submission.
Creates a unique peer workflow for a student item, associated with a
submission.
Args:
submission_uuid (str): The submission associated with this workflow.
Returns:
None
Raises:
SubmissionError: There was an error retrieving the submission.
PeerAssessmentInternalError: Raised when there is an internal error
creating the Workflow.
"""
try:
with transaction.atomic():
submission = sub_api.get_submission_and_student(submission_uuid)
workflow, __ = PeerWorkflow.objects.get_or_create(
student_id=submission['student_item']['student_id'],
course_id=submission['student_item']['course_id'],
item_id=submission['student_item']['item_id'],
submission_uuid=submission_uuid
)
workflow.save()
except IntegrityError:
# If we get an integrity error, it means someone else has already
# created a workflow for this submission, so we don't need to do anything.
pass
except DatabaseError:
error_message = (
u"An internal error occurred while creating a new peer "
u"workflow for submission {}"
.format(submission_uuid)
)
logger.exception(error_message)
raise PeerAssessmentInternalError(error_message)
def get_score(submission_uuid, peer_requirements):
"""
Retrieve a score for a submission if requirements have been satisfied.
Args:
submission_uuid (str): The UUID of the submission.
requirements (dict): Dictionary with the key "must_be_graded_by"
indicating the required number of assessments the student
must receive to get a score.
Returns:
A dictionary with the points earned, points possible, and
contributing_assessments information, along with a None staff_id.
"""
if peer_requirements is None:
return None
# User hasn't completed their own submission yet
if not submitter_is_finished(submission_uuid, peer_requirements):
return None
workflow = PeerWorkflow.get_by_submission_uuid(submission_uuid)
if workflow is None:
return None
# Retrieve the assessments in ascending order by score date,
# because we want to use the *first* one(s) for the score.
items = workflow.graded_by.filter(
assessm
|
ent__submission_
|
uuid=submission_uuid,
assessment__score_type=PEER_TYPE
).order_by('-assessment')
submission_finished = items.count() >= peer_requirements["must_be_graded_by"]
if not submission_finished:
return None
# Unfortunately, we cannot use update() after taking a slice,
# so we need to update the and save the items individually.
# One might be tempted to first query for the first n assessments,
# then select items that have those assessments.
# However, this generates a SQL query with a LIMIT in a subquery,
# which is not supported by some versions of MySQL.
# Although this approach generates more database queries, the number is likely to
# be relatively small (at least 1 and very likely less than 5).
for scored_item in items[:peer_requirements["must_be_graded_by"]]:
scored_item.scored = True
scored_item.save()
assessments = [item.assessment for item in items]
return {
"points_earned": sum(
get_assessment_median_scores(submission_uuid).values()
),
"points_possible": assessments[0].points_possible,
"contributing_assessments": [assessment.id for assessment in assessments],
"staff_id": None,
}
def create_assessment(
scorer_submission_uuid,
scorer_id,
options_selected,
criterion_feedback,
overall_feedback,
rubric_dict,
num_required_grades,
scored_at=None
):
"""Creates an assessment on the given submission.
Assessments are created based on feedback associated with a particular
rubric.
Args:
scorer_submission_uuid (str): The submission uuid for the Scorer's
workflow. The submission being assessed can be determined via the
peer workflow of the grading student.
scorer_id (str): The user ID for the user giving this assessment. This
is required to create an assessment on a submission.
options_selected (dict): Dictionary mapping criterion names to the
option names the user selected for that criterion.
criterion_feedback (dict): Dictionary mapping criterion names to the
free-form text feedback the user gave for the criterion.
Since criterion feedback is optional, some criteria may not appear
in the dictionary.
overall_feedback (unicode): F
|
Chirayu-sopho/Hindi-DateTime-Parser
|
functions.py
|
Python
|
mit
| 4,957 | 0.047004 |
import datetime
from dateutil.relativedelta import *
## give final date and time after parsing by changing current date-time
def change_datetime ( c="0", y=0, mt=0, w=0, d=0, h=0, m=0, s=0):
#mt = mt + 12*y
#d = d + 30*mt
now = datetime.datetime.now()
change = relativedelta( years =+ y, months =+ mt, weeks =+ w, days =+ d, hours =+ h, minutes =+ m, seconds =+ s)
#print (now + change)
if c == "date":
return (now + change).date()
elif c == "time":
return (now + change).time()
## make separate date and time functions
#def change_date (y=0, m=0, w=0, d=0):
#def change_time (h=0, m=0, s=0):
## make separate functions for setting date and time and print -- if not provided the data
## give final date and time after parsing by setting date-time
def set_datetime (y=0, mt=0, d=0, h=0, m=0, s=0, c="0"):
a = ""
if d!=0:
a = a + str(d) + "/"
if mt!=0:
a = a + str(mt) + "/"
if y!=0:
a = a + str(y)
#a = a + " "
if h!=0:
a = a + str(h) + ":"
if m!=0:
a = a + str(m) + ":"
if s!=0:
a = a + str(s)
if c!="0":
a = a + " "
a = a + str(c)
#print (a, "a")
return a
## make function for am/pm
def get_disease (string):
with open("dataset.txt") as f:
content = f.readlines()
names = []
definitions = []
values = []
check = 1
## TODO
## remove the common words from defintion (or input) (or use replace) like a, the,disease, etc. while splitting definition in words
## Also do stemming
## Go through dataset once manually to get these words
for word in content:
if word[0] == 'n':
## TODO think better way in which pop is not required, directly append only if required
if check == 1:
names.append(word)
check = 0
if check == 0:
names.pop()
names.append(word)
if word[0] == 'd':
definitions.append(word)
check = 1
values.append(0)
#string = input("Give Text:")
words = string.split(" ")
for word in words:
for defintion in definitions:
defintion.replace('. ',' ')
defintion.replace(', ',' ')
definition_words = defintion.split(" ")
if word in definition_words:
values[definitions.index(defintion)] += 1
#print (word)
highest = 0
index_of_highest = 0
answer = []
## TODO if there are more than one highest
for value in values:
if value > highest:
highest = value
index_of_highest = values.index(value)
answer.append(names[index_of_highest])
answer.append(highest)
answer.append(definitions[index_of_highest])
for word in words:
newd = definitions[index_of_highest].replace('. ',' ')
newda = newd.replace(', ',' ')
definition_words = newda.split(" ")
## cannot pass with or in split, find better way
#print (definition_words)
if word in definition_words:
values[definitions.index(defintion)] += 1
answer.append(word)
# print (definitions[index_of_highest][defintion.index(word)])
## make definition sort only usable things
## find a way like , and parameters for passing more than value in relplace
return answer
def get_sentences(str):
import re
## use of regular expressions
## str cannot be changed further, always make a new object
words = str.split(" ")
Abbrs = ['Mr.', 'mr.', 'Mrs.', 'mrs.', 'Dr.', 'dr.' , 'Er.', 'er.', 'Prof.', 'prof.', 'Br.', 'br.', 'Fr.', 'fr.', 'Sr.', 'sr.', 'Jr.', 'jr.']
SentenceType = []
for abbr in Abbrs:
if abbr in words:
new_wo
|
rd = abbr.replace(abbr[len(abbr)-1], "")
str = str.replace(abbr, new_word)
#print (new_str)
## str.replace(abbr[len(abbr)-1], " ")
## Do directly in string without using words
for word in words:
if re.findall(r'\.(.)+\.', word):
|
new_word = word.replace('.','')
str = str.replace(word, new_word)
#print (word)
#print (new_word)
#print (new_str2)
if '.' in word[0:len(word)-2]:
new_word = word.replace('.', '[dot]')
str = str.replace(word, new_word)
for letter in str:
if letter == '.':
SentenceType.append("Assertive")
if letter == '?':
SentenceType.append("Interrogative")
if letter == '!' or letter == '!!':
SentenceType.append('Exclamatory')
sentences = re.split("[ ]*[.|?|!|!!]+[ ]*", str)
if (str[len(str)-1] == '.') or (str[len(str)-1] == '?') or (str[len(str)-1] == '!'):
sentences.pop()
return dict(zip(sentences, SentenceType))
## TODOs
## Extend Abbrs list
## Dots back in sentences
## If abbr of acronyms with dots at end of a sentence?
## what if sentence doesn't end with !!? Get the expression from this word.
## If already a new line exist.
## Also implement through machine learning to obtain results without help of punctuation.
## Sentence Type : What about Imperative, compound, complex etc. Exclamatory Sentence or Word
## ensure sentences are returned sequentially
def get_tokens(str):
words = str.split(" ")
return words
## Make an algorithm for different kind of words for forming effective tokens before returning
|
jeremyflores/cocosCairo
|
oldTests/maze.py
|
Python
|
mit
| 9,600 | 0.030417 |
from cocosCairo.cocosCairo import * # Convenience module to import all other modules
from splash import *
BACKGROUND_COLOR = Color(0.1, 0.3, 0.7)
MAZE_PATHS = ["maze01.maze", "maze02.maze", "maze03.maze"] # an ordered list of the maze files
PATH_INDEX = 0 # the index of the next maze file to load
class MazeScene(Scene):
def __init__(self, modelPath):
Scene.__init__(self)
self._modelPath = modelPath
def setup(self):
self.setBackgroundColor(BACKGROUND_COLOR)
def onEnterFromFinishedTransition(self):
Scene.onEnterFromFinishedTransition(self)
self._mazePathController = MazePathController(self._modelPath)
self.addController(self._mazePathController)
x = self.getSize().width/2
y = self.getSize().height/2
self._mazePathController.getNode().setPosition(Point(x,y))
self._mazePathController.getNode().setOpacity(0.0)
action = EaseSineInOut(FadeIn(1.0))
cbAction = CallbackInstantAction(self._onFadeInCompletion)
sequence = Sequence(action, cbAction)
self._mazePathController.getNode().runAction(sequence)
def _onFadeInCompletion(self):
self._mazePathController.getNode().showPieces()
class MazePathModel(AbstractModel):
def __init__(self, filepath):
AbstractModel.__init__(self)
self._modelArray = []
self._playerLocation = [0,0]
self._goalLocation = [0,0]
self._moveCount = 0
f = open(filepath)
# populate the model array
for line in f:
line = line.strip()
if len(line) < 1 or line[0] is "#" or line[:2] is "//": # if the line is a comment or empty
continue # then move on to the next line
row = line.split(',')
row = [int(x[:1]) for x in row if (len(x) > 0 and x != '\n')] # trim and convert to int
self._modelArray.append(row)
# look for special characters
for i in range(0, len(self._modelArray[0])):
for j in range(0, len(self._modelArray)):
if self._modelArray[j][i] is 2:
self._playerLocation = [i, j]
self._modelArray[j][i] = 1
elif self._modelArray[j][i] is 3:
self._goalLocation = [i, j]
self._modelArray[j][i] = 1
f.close()
self.didChange()
def getModelArray(self):
return self._modelArray
def getPlayerLocation(self):
return self._playerLocation
def getGoalLocation(self):
return self._goalLocation
def getMoveCount(self):
return self._moveCount
def movePlayerLocation(self, direction):
self._moveCount += 1
row = self._playerLocation[1]
col = self._playerLocation[0]
if directio
|
n == "left":
if col-1 < 0 or self._modelArray[row][col-1] != 1:
return
else:
self._playerLocation = [col-1, row]
self.didChange()
elif direction == "right":
if col+1 >= len(self._modelArray[0]) or self._modelArray[row][col+1] != 1:
return
else:
self._playerLocation = [col+1, row]
|
self.didChange()
elif direction == "up":
if row-1 < 0 or self._modelArray[row-1][col] != 1:
return
else:
self._playerLocation = [col, row-1]
self.didChange()
elif direction == "down":
if row+1 >= len(self._modelArray) or self._modelArray[row+1][col] != 1:
return
else:
self._playerLocation = [col, row+1]
self.didChange()
class MazePathNode(Node):
def __init__(self, rect=None):
Node.__init__(self, rect)
self._hasRenderedTiles = False
self._hasFinishedActions = False
self._player = None
self._goal = None
self._tileSize = 50
self.setAnchorPoint(Point(0.5, 0.5))
def setOpacity(self, opacity):
Node.setOpacity(self, opacity)
for child in self.getChildren():
child.setOpacity(opacity)
def onModelChange(self, model):
if not model:
return
# render the tiles
if not self._hasRenderedTiles:
self._hasRenderedTiles = True
modelArray = model.getModelArray()
width = self._tileSize * len(modelArray[0])
height = self._tileSize * len(modelArray)
self.setSize(Size(width, height))
for i in range(0, len(modelArray[0])):
for j in range(0, len(modelArray)):
x = i*self._tileSize
y = j*self._tileSize
w = self._tileSize
h = self._tileSize
rect = MakeRect(x, y, w, h)
if modelArray[j][i] is 0: # 'matrix' lookup is [row,col], but that's equivalent to (y,x) instead of (x,y), so switch the i,j indices
continue
else:
color = WhiteColor()
rectangle = RectangleNode(rect, color)
self.addChild(rectangle, 1)
# set up the player's sprite
x = model.getPlayerLocation()[0] * self._tileSize
y = model.getPlayerLocation()[1] * self._tileSize
if not self._player:
self._player = Sprite("images/character.png", Point(x,y))
self.addChild(self._player,3)
self._player.setScale(0.01)
self._player.setAnchorPoint(Point(0.5,0.5))
size = self._player.getSize().width
self._player.setPosition(pointAdd(self._player.getPosition(), Point(size/2, size/2)))
else:
self._hasFinishedActions = False
action = EaseSineInOut(MoveTo(0.05, Point(x,y)))
cbAction = CallbackInstantAction(self.onPlayerMotionCompletion)
sequence = Sequence(action, cbAction)
self._player.runAction(sequence)
# set up the goal sprite
x = model.getGoalLocation()[0] * self._tileSize
y = model.getGoalLocation()[1] * self._tileSize
if not self._goal:
self._goal = Sprite("images/goal.png", Point(x,y))
self.addChild(self._goal,2)
self._goal.setScale(0.01)
self._goal.setAnchorPoint(Point(0.5,0.5))
size = self._goal.getSize().width
self._goal.setPosition(pointAdd(self._goal.getPosition(), Point(size/2, size/2)))
else:
self._goal.setPosition(Point(x,y))
def showPieces(self):
if self._goal:
action = EaseBounceOut(ScaleTo(0.75, 1.0))
sequence = Sequence(action, CallbackInstantAction(self.onGoalScaleCompletion))
self._goal.runAction(sequence)
def onGoalScaleCompletion(self):
self._goal.setAnchorPoint(PointZero())
size = self._goal.getSize().width
self._goal.setPosition(pointSub(self._goal.getPosition(), Point(size/2, size/2)))
if self._player:
action = EaseBounceOut(ScaleTo(0.75, 1.0))
sequence = Sequence(action, CallbackInstantAction(self.onPlayerScaleCompletion))
self._player.runAction(sequence)
def onPlayerScaleCompletion(self):
self._player.setAnchorPoint(PointZero())
size = self._player.getSize().width
self._player.setPosition(pointSub(self._player.getPosition(), Point(size/2, size/2)))
self._hasFinishedActions = True
def onPlayerMotionCompletion(self):
self._hasFinishedActions = True
def reset(self):
self._hasRenderedTiles = False
self._hasFinishedActions = False
self.removeAllChildren()
self._player = None
self._goal = None
def hasFinishedActions(self):
return self._hasFinishedActions
class MazePathController(AbstractController):
def __init__(self, modelPath):
AbstractController.__init__(self, MazePathNode(RectZero()), MazePathModel(modelPath))
def onKeyPress(self, event):
if not self.getNode().hasFinishedActions():
return
key = event.key
if key == "Left":
self.getModel().movePlayerLocation("left")
elif key == "Right":
self.getModel().movePlayerLocation("right")
elif key == "Up":
self.getModel().movePlayerLocation("up")
elif key == "Down":
self.getModel().movePlayerLocation("down")
if self.getModel().getPlayerLocation() == self.getModel().getGoalLocation():
winScene = WinScene(self.getModel().getMoveCount())
transition = MoveInTopTransition(.5, winScene)
self.getDirector().replaceScene(transition)
return True
class WinScene(Scene, GestureListener):
def __init__(self, moveCount):
Scene.__init__(self)
self._currentCount = 0
self._moveCount = moveCount
def setup(self):
self.setBackgroundColor(WhiteColor())
self._label = PangoLabel()
self.setMarkupText(0)
self._label.setAnchorPoint(Point(0.5, 0.5))
self._label.setAlignment("center")
self._label.setFontSize(48)
self.addChild(self._label)
def onEnter(self):
Scene.onEnter(self)
self.getDirector().getGestureDispatch().addListener(self)
x = self.getSize().width/2
y = self.getSize().height/2
self._label.setPosition(Point(x,y))
def onEnterFromFinishedTransition(self):
Scene.onEnterFromFinishedTransition(self)
self.scheduleCallback(self._updateCount, 0.005)
def onExit(self):
Scene.onExit(self)
self.getDi
|
endlessm/chromium-browser
|
net/data/path_builder_unittest/validity_date_prioritization/generate-certs.py
|
Python
|
bsd-3-clause
| 1,833 | 0.001091 |
#!/usr/bin/python
# Copyright (c) 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
A chain with four possible intermediates with different notBefore and notAfter
dates, for testing path bulding prioritization.
"""
import sys
sys.path += ['../..']
import gencerts
DATE_A = '150101120000Z'
DATE_B = '150102120000Z'
DATE_C = '180101120000Z'
DATE_D = '180102120000Z'
root = gencerts.create_self_signed_root_certificate('Root')
root.set_validity_range(DATE_A, DATE_D)
int_ac = gencerts.create_intermediate_certificate('Intermediate', root)
int_ac.set_validity_range(DATE_A, DATE_C)
int_ad = gencerts.create_intermediate_certificate('Intermediate', root)
int_ad.set_validity_range(DATE_A, DATE_D)
int_ad.set_key(int_ac.get_key())
int_bc = gencerts.create_intermediate_certificate('Intermediate', root)
int_bc.set_validity_range(DATE_B, DATE_C)
int_bc.set_key(int_ac.get_key())
int_bd = gencerts.create_intermediate_certificate('Intermediate', root)
int_bd.set_validity_range(DATE_B, DATE_D)
int_bd.set_key(int_ac.get_key())
|
target = gencerts.create_end_entity_certificate('Target', int_ac)
target.set_validity_range(DATE_A, DATE_D)
gencerts.write_chain('The root', [root], out_pem='root.pem')
gencerts.write_chain('Intermediate with validity range A..C',
[int_ac], out_pem='int_ac.pem')
gencerts.write_chain('Intermediate with validity range A..D',
[int_ad],
|
out_pem='int_ad.pem')
gencerts.write_chain('Intermediate with validity range B..C',
[int_bc], out_pem='int_bc.pem')
gencerts.write_chain('Intermediate with validity range B..D',
[int_bd], out_pem='int_bd.pem')
gencerts.write_chain('The target', [target], out_pem='target.pem')
|
twilio/twilio-python
|
twilio/rest/api/v2010/account/sip/domain/ip_access_control_list_mapping.py
|
Python
|
mit
| 16,185 | 0.00451 |
# coding=utf-8
r"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from twilio.base import deserialize
from twilio.base import values
from twilio.base.instance_context import InstanceContext
from twilio.base.instance_resource import InstanceResource
from twilio.base.list_resource import ListResource
from twilio.base.page import Page
class IpAccessControlListMappingList(ListResource):
def __init__(self, version, account_sid, domain_sid):
"""
Initialize the IpAccessControlListMappingList
:param Version version: Version that contains the resource
:param account_sid: The unique id of the Account that is responsible for this resource.
:param domain_sid: The unique string that identifies the SipDomain resource.
:returns: twili
|
o.rest.api.v2
|
010.account.sip.domain.ip_access_control_list_mapping.IpAccessControlListMappingList
:rtype: twilio.rest.api.v2010.account.sip.domain.ip_access_control_list_mapping.IpAccessControlListMappingList
"""
super(IpAccessControlListMappingList, self).__init__(version)
# Path Solution
self._solution = {'account_sid': account_sid, 'domain_sid': domain_sid, }
self._uri = '/Accounts/{account_sid}/SIP/Domains/{domain_sid}/IpAccessControlListMappings.json'.format(**self._solution)
def create(self, ip_access_control_list_sid):
"""
Create the IpAccessControlListMappingInstance
:param unicode ip_access_control_list_sid: The unique id of the IP access control list to map to the SIP domain
:returns: The created IpAccessControlListMappingInstance
:rtype: twilio.rest.api.v2010.account.sip.domain.ip_access_control_list_mapping.IpAccessControlListMappingInstance
"""
data = values.of({'IpAccessControlListSid': ip_access_control_list_sid, })
payload = self._version.create(method='POST', uri=self._uri, data=data, )
return IpAccessControlListMappingInstance(
self._version,
payload,
account_sid=self._solution['account_sid'],
domain_sid=self._solution['domain_sid'],
)
def stream(self, limit=None, page_size=None):
"""
Streams IpAccessControlListMappingInstance records from the API as a generator stream.
This operation lazily loads records as efficiently as possible until the limit
is reached.
The results are returned as a generator, so this operation is memory efficient.
:param int limit: Upper limit for the number of records to return. stream()
guarantees to never return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, stream() will attempt to read the
limit with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.api.v2010.account.sip.domain.ip_access_control_list_mapping.IpAccessControlListMappingInstance]
"""
limits = self._version.read_limits(limit, page_size)
page = self.page(page_size=limits['page_size'], )
return self._version.stream(page, limits['limit'])
def list(self, limit=None, page_size=None):
"""
Lists IpAccessControlListMappingInstance records from the API as a list.
Unlike stream(), this operation is eager and will load `limit` records into
memory before returning.
:param int limit: Upper limit for the number of records to return. list() guarantees
never to return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, list() will attempt to read the limit
with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.api.v2010.account.sip.domain.ip_access_control_list_mapping.IpAccessControlListMappingInstance]
"""
return list(self.stream(limit=limit, page_size=page_size, ))
def page(self, page_token=values.unset, page_number=values.unset,
page_size=values.unset):
"""
Retrieve a single page of IpAccessControlListMappingInstance records from the API.
Request is executed immediately
:param str page_token: PageToken provided by the API
:param int page_number: Page Number, this value is simply for client state
:param int page_size: Number of records to return, defaults to 50
:returns: Page of IpAccessControlListMappingInstance
:rtype: twilio.rest.api.v2010.account.sip.domain.ip_access_control_list_mapping.IpAccessControlListMappingPage
"""
data = values.of({'PageToken': page_token, 'Page': page_number, 'PageSize': page_size, })
response = self._version.page(method='GET', uri=self._uri, params=data, )
return IpAccessControlListMappingPage(self._version, response, self._solution)
def get_page(self, target_url):
"""
Retrieve a specific page of IpAccessControlListMappingInstance records from the API.
Request is executed immediately
:param str target_url: API-generated URL for the requested results page
:returns: Page of IpAccessControlListMappingInstance
:rtype: twilio.rest.api.v2010.account.sip.domain.ip_access_control_list_mapping.IpAccessControlListMappingPage
"""
response = self._version.domain.twilio.request(
'GET',
target_url,
)
return IpAccessControlListMappingPage(self._version, response, self._solution)
def get(self, sid):
"""
Constructs a IpAccessControlListMappingContext
:param sid: A 34 character string that uniquely identifies the resource to fetch.
:returns: twilio.rest.api.v2010.account.sip.domain.ip_access_control_list_mapping.IpAccessControlListMappingContext
:rtype: twilio.rest.api.v2010.account.sip.domain.ip_access_control_list_mapping.IpAccessControlListMappingContext
"""
return IpAccessControlListMappingContext(
self._version,
account_sid=self._solution['account_sid'],
domain_sid=self._solution['domain_sid'],
sid=sid,
)
def __call__(self, sid):
"""
Constructs a IpAccessControlListMappingContext
:param sid: A 34 character string that uniquely identifies the resource to fetch.
:returns: twilio.rest.api.v2010.account.sip.domain.ip_access_control_list_mapping.IpAccessControlListMappingContext
:rtype: twilio.rest.api.v2010.account.sip.domain.ip_access_control_list_mapping.IpAccessControlListMappingContext
"""
return IpAccessControlListMappingContext(
self._version,
account_sid=self._solution['account_sid'],
domain_sid=self._solution['domain_sid'],
sid=sid,
)
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Api.V2010.IpAccessControlListMappingList>'
class IpAccessControlListMappingPage(Page):
def __init__(self, version, response, solution):
"""
Initialize the IpAccessControlListMappingPage
:param Version version: Version that contains the resource
:param Response response: Response from the API
:param account_sid: The unique id of the Account that is responsible for this resource.
:param domain_sid: The unique string that identifies the SipDomain resource.
:returns: twilio
|
yakupc/Artificial-Intelligence
|
Algorithms/SolveTSPSimulatedAnnealing/SolveTSPSimulatedAnnealing.py
|
Python
|
mit
| 4,196 | 0.023832 |
#==============================================================================
#description : Solves travelling salesman problem by using Hill Climbing.
#author : Yakup Cengiz
#date : 20151121
#version : 0.1
#notes :
#python_version : 3.5.0
#Reference : http://www.psychicorigami.com/category/tsp/
#==============================================================================
import math
import sys
import os
import random
CommonPath = os.path.abspath(os.path.join('..', 'Common'))
sys.path.append(CommonPath)
import tsp
def GenerateInitialPath(tour_length):
tour=list(range(tour_length))
random.shuffle(tour)
return tour
MAX_ITERATION = 50000
def reversed_sections(tour):
'''generator to return all possible variations where the section between two cities are swapped'''
for i,j in tsp.AllEdges
|
(len(tour)):
if i != j:
|
copy=tour[:]
if i < j:
copy[i:j+1]=reversed(tour[i:j+1])
else:
copy[i+1:]=reversed(tour[:j])
copy[:j]=reversed(tour[i+1:])
if copy != tour: # no point returning the same tour
yield copy
def kirkpatrick_cooling(start_temp, alpha):
T = start_temp
while True:
yield T
T = alpha * T
def P(prev_score,next_score,temperature):
if next_score > prev_score:
return 1.0
else:
return math.exp( -abs(next_score-prev_score)/temperature )
class ObjectiveFunction:
'''class to wrap an objective function and
keep track of the best solution evaluated'''
def __init__(self,objective_function):
self.objective_function=objective_function
self.best=None
self.best_score=None
def __call__(self,solution):
score=self.objective_function(solution)
if self.best is None or score > self.best_score:
self.best_score=score
self.best=solution
return score
def ApplySimulatedAnnealing(init_function,move_operator,objective_function,max_evaluations,start_temp,alpha):
# wrap the objective function (so we record the best)
objective_function=ObjectiveFunction(objective_function)
current = init_function()
current_score = objective_function(current)
iterationCount = 1
cooling_schedule = kirkpatrick_cooling(start_temp, alpha)
for temperature in cooling_schedule:
done = False
# examine moves around our current position
for next in move_operator(current):
if iterationCount >= max_evaluations:
done=True
break
next_score=objective_function(next)
iterationCount+=1
# probablistically accept this solution always accepting better solutions
p = P(current_score, next_score, temperature)
# random.random() basic function random() generates a random float uniformly in the range [0.0, 1.0).
# p function returns data in range [0.0, 1.0]
if random.random() < p:
current = next
current_score= next_score
break
# see if completely finished
if done: break
best_score = objective_function.best_score
best = objective_function.best
return (iterationCount,best_score,best)
def SolveTSP():
print("Starting to solve travel salesman problem")
coordinates = tsp.ReadCoordinatesFromFile(".\cityCoordinates.csv")
distance_matrix = tsp.ComputeDistanceMatrix(coordinates);
init_function = lambda: GenerateInitialPath(len(coordinates))
objective_function = lambda tour: -tsp.ComputeTourLength(distance_matrix, tour)
start_temp,alpha = 100, 0.995
iterationCount,best_score,shortestPath = ApplySimulatedAnnealing(init_function, reversed_sections, objective_function, MAX_ITERATION,start_temp,alpha)
print(iterationCount, best_score, shortestPath);
tsp.DrawPath(coordinates, shortestPath, "TSP.png");
if __name__ == "__main__":
SolveTSP();
|
cortesi/mitmproxy
|
mitmproxy/tools/console/flowview.py
|
Python
|
mit
| 8,005 | 0.001749 |
import math
import sys
from functools import lru_cache
from typing import Optional, Union # noqa
import urwid
from mitmproxy import contentviews
from mitmproxy import http
from mitmproxy.tools.console import common
from mitmproxy.tools.console import layoutwidget
from mitmproxy.tools.console import flowdetailview
from mitmproxy.tools.console import searchable
from mitmproxy.tools.console import tabs
import mitmproxy.tools.console.master # noqa
from mitmproxy.utils import strutils
class SearchError(Exception):
pass
class FlowViewHeader(urwid.WidgetWrap):
def __init__(
self,
master: "mitmproxy.tools.console.master.ConsoleMaster",
) -> None:
self.master = master
self.focus_changed()
def focus_
|
changed(self):
cols, _ = self.master.ui.get_cols_rows()
if self.master.view.focus.flow:
self._w = common.format_flow(
self.master.view.focus.flow,
False,
extended=True,
hos
|
theader=self.master.options.showhost,
max_url_len=cols,
)
else:
self._w = urwid.Pile([])
class FlowDetails(tabs.Tabs):
def __init__(self, master):
self.master = master
super().__init__([])
self.show()
self.last_displayed_body = None
def focus_changed(self):
if self.master.view.focus.flow:
self.tabs = [
(self.tab_request, self.view_request),
(self.tab_response, self.view_response),
(self.tab_details, self.view_details),
]
self.show()
else:
self.master.window.pop()
@property
def view(self):
return self.master.view
@property
def flow(self):
return self.master.view.focus.flow
def tab_request(self):
if self.flow.intercepted and not self.flow.response:
return "Request intercepted"
else:
return "Request"
def tab_response(self):
if self.flow.intercepted and self.flow.response:
return "Response intercepted"
else:
return "Response"
def tab_details(self):
return "Detail"
def view_request(self):
return self.conn_text(self.flow.request)
def view_response(self):
return self.conn_text(self.flow.response)
def view_details(self):
return flowdetailview.flowdetails(self.view, self.flow)
def content_view(self, viewmode, message):
if message.raw_content is None:
msg, body = "", [urwid.Text([("error", "[content missing]")])]
return msg, body
else:
full = self.master.commands.execute("view.getval @focus fullcontents false")
if full == "true":
limit = sys.maxsize
else:
limit = contentviews.VIEW_CUTOFF
flow_modify_cache_invalidation = hash((
message.raw_content,
message.headers.fields,
getattr(message, "path", None),
))
# we need to pass the message off-band because it's not hashable
self._get_content_view_message = message
return self._get_content_view(viewmode, limit, flow_modify_cache_invalidation)
@lru_cache(maxsize=200)
def _get_content_view(self, viewmode, max_lines, _):
message = self._get_content_view_message
self._get_content_view_message = None
description, lines, error = contentviews.get_message_content_view(
viewmode, message
)
if error:
self.master.log.debug(error)
# Give hint that you have to tab for the response.
if description == "No content" and isinstance(message, http.HTTPRequest):
description = "No request content (press tab to view response)"
# If the users has a wide terminal, he gets fewer lines; this should not be an issue.
chars_per_line = 80
max_chars = max_lines * chars_per_line
total_chars = 0
text_objects = []
for line in lines:
txt = []
for (style, text) in line:
if total_chars + len(text) > max_chars:
text = text[:max_chars - total_chars]
txt.append((style, text))
total_chars += len(text)
if total_chars == max_chars:
break
# round up to the next line.
total_chars = int(math.ceil(total_chars / chars_per_line) * chars_per_line)
text_objects.append(urwid.Text(txt))
if total_chars == max_chars:
text_objects.append(urwid.Text([
("highlight", "Stopped displaying data after %d lines. Press " % max_lines),
("key", "f"),
("highlight", " to load all data.")
]))
break
return description, text_objects
def conn_text(self, conn):
if conn:
hdrs = []
for k, v in conn.headers.fields:
# This will always force an ascii representation of headers. For example, if the server sends a
#
# X-Authors: Made with ❤ in Hamburg
#
# header, mitmproxy will display the following:
#
# X-Authors: Made with \xe2\x9d\xa4 in Hamburg.
#
# The alternative would be to just use the header's UTF-8 representation and maybe
# do `str.replace("\t", "\\t")` to exempt tabs from urwid's special characters escaping [1].
# That would in some terminals allow rendering UTF-8 characters, but the mapping
# wouldn't be bijective, i.e. a user couldn't distinguish "\\t" and "\t".
# Also, from a security perspective, a mitmproxy user couldn't be fooled by homoglyphs.
#
# 1) https://github.com/mitmproxy/mitmproxy/issues/1833
# https://github.com/urwid/urwid/blob/6608ee2c9932d264abd1171468d833b7a4082e13/urwid/display_common.py#L35-L36,
k = strutils.bytes_to_escaped_str(k) + ":"
v = strutils.bytes_to_escaped_str(v)
hdrs.append((k, v))
txt = common.format_keyvals(
hdrs,
key_format="header"
)
viewmode = self.master.commands.call("console.flowview.mode")
msg, body = self.content_view(viewmode, conn)
cols = [
urwid.Text(
[
("heading", msg),
]
),
urwid.Text(
[
" ",
('heading', "["),
('heading_key', "m"),
('heading', (":%s]" % viewmode)),
],
align="right"
)
]
title = urwid.AttrWrap(urwid.Columns(cols), "heading")
txt.append(title)
txt.extend(body)
else:
txt = [
urwid.Text(""),
urwid.Text(
[
("highlight", "No response. Press "),
("key", "e"),
("highlight", " and edit any aspect to add one."),
]
)
]
return searchable.Searchable(txt)
class FlowView(urwid.Frame, layoutwidget.LayoutWidget):
keyctx = "flowview"
title = "Flow Details"
def __init__(self, master):
super().__init__(
FlowDetails(master),
header = FlowViewHeader(master),
)
self.master = master
def focus_changed(self, *args, **kwargs):
self.body.focus_changed()
self.header.focus_changed()
|
MSeifert04/numpy
|
numpy/distutils/cpuinfo.py
|
Python
|
bsd-3-clause
| 23,013 | 0.00491 |
#!/usr/bin/env python
"""
cpuinfo
Copyright 2002 Pearu Peterson all rights reserved,
Pearu Peterson <pearu@cens.ioc.ee>
Permission to use, modify, and distribute this software is given under the
terms of the NumPy (BSD style) license. See LICENSE.txt that came with
this distribution for specifics.
NO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
Pearu Peterson
"""
from __future__ import division, absolute_import, print_function
__all__ = ['cpu']
import sys, re, types
import os
if sys.version_info[0] >= 3:
from subprocess import getstatusoutput
else:
from commands import getstatusoutput
import warnings
import platform
from numpy.distutils.compat import get_exception
def getoutput(cmd, successful_status=(0,), stacklevel=1):
try:
status, output = getstatusoutput(cmd)
except EnvironmentError:
e = get_exception()
warnings.warn(str(e), UserWarning, stacklevel=stacklevel)
return False, ""
if os.WIFEXITED(status) and os.WEXITSTATUS(status) in successful_status:
return True, output
return False, output
def command_info(successful_status=(0,), stacklevel=1, **kw):
info = {}
for key in kw:
ok, output = getoutput(kw[key], successful_status=successful_status,
stacklevel=stacklevel+1)
if ok:
info[key] = output.strip()
return info
def command_by_line(cmd, successful_status=(0,), stacklevel=1):
ok, output = getoutput(cmd, successful_status=successful_status,
stacklevel=stacklevel+1)
if not ok:
return
for line in output.splitlines():
yield line.strip()
def key_value_from_command(cmd, sep, successful_status=(0,),
stacklevel=1):
d = {}
for line in command_by_line(cmd, successful_status=successful_status,
stacklevel=stacklevel+1):
l = [s.strip() for s in line.split(sep, 1)]
if len(l) == 2:
d[l[0]] = l[1]
return d
class CPUInfoBase(object):
"""Holds CPU information and provides methods for requiring
the availability of various CPU features.
"""
def _try_call(self, func):
try:
return func()
except Exception:
pass
def __getattr__(self, name):
if not name.startswith('_'):
if hasattr(self, '_'+name):
attr = getattr(self, '_'+name)
if isinstance(attr, types.MethodType):
return lambda func=self._try_call,attr=attr : func(attr)
else:
return lambda : None
raise AttributeError(name)
def _getNCPUs(self):
return 1
def __get_nbits(self):
abits = platform.architecture()[0]
nbits = re.compile(r'(\d+)bit').search(abits).group(1)
return nbits
def _is_32bit(self):
return self.__get_nbits() == '32'
def _is_64bit(self):
return self.__get_nbits() == '64'
class LinuxCPUInfo(CPUInfoBase):
info = None
def __init__(self):
if self.info is not None:
return
info = [ {} ]
ok, output = getoutput('uname -m')
if ok:
info[0]['uname_m'] = output.strip()
try:
fo = open('/proc/cpuinfo')
except EnvironmentError:
e = get_exception()
warnings.warn(str(e), UserWarning, stacklevel=2)
else:
for line in fo:
name_value = [s.strip() for s in line.split(':', 1)]
if len(name_value) != 2:
continue
name, value = name_value
if not info or name in info[-1]: # next processor
info.append({})
info[-1][name] = value
fo.close()
self.__class__.info = info
def _not_impl(self): pass
# Athlon
def _is_AMD(self):
return self.info[0]['vendor_id']=='AuthenticAMD'
def _is_AthlonK6_2(self):
return self._is_AMD() and self.info[0]['model'] == '2'
def _is_AthlonK6_3(self):
return self._is_AMD() and self.info[0]['model'] == '3'
def _is_AthlonK6(self):
return re.match(r'.*?AMD-K6', self.info[0]['model name']) is not None
def _is_AthlonK7(self):
return re.match(r'.*?AMD-K7', self.info[0]['model name']) is not None
def _is_AthlonMP(self):
return re.match(r'.*?Athlon\(tm\) MP\b',
self.info[0]['model name']) is not None
def _is_AMD64(self):
return self.is_AMD() and self.info[0]['family'] == '15'
def _is_Athlon64(self):
return re.match(r'.*?Athlon\(tm\) 64\b',
self.info[0]['model name']) is not None
def _is_AthlonHX(self):
return re.match(r'.*?Athlon HX\b',
self.info[0]['model name']) is not None
def _is_Opteron(self):
return re.match(r'.*?Opteron\b',
self.info[0]['model name']) is not None
def _is_Hammer(self):
return re.match(r'.*?Hammer\b',
self.info[0]['model name']) is not None
# Alpha
def _is_Alpha(self):
return self.info[0]['cpu']=='Alpha'
def _is_EV4(self):
return self.is_Alpha() and self.info[0]['cpu model'] == 'EV4'
def _is_EV5(self):
return self.is_Alpha() and self.info[0]['cpu model'] == 'EV5'
def _is_EV56(self):
return self.is_Alpha() and self.info[0]['cpu model'] == 'EV56'
def _is_PCA56(self):
return self.is_Alpha() and self.info[0]['cpu model'] == 'PCA56'
# Intel
#XXX
_is_i386 = _not_impl
def _is_Intel(self):
return self.info[0]['vendor_id']=='GenuineIntel'
def _is_i486(self):
return self.info[0]['cp
|
u']=='i486'
def _is_i586(self):
return self.is_Intel() and self.info[0]['cpu family'] == '5'
def _is_i686(self):
return self.is_Intel() and self.info[0]['cpu family'] == '6'
def _is_Celeron(self):
return re.match(r'.*?Celeron',
self.info
|
[0]['model name']) is not None
def _is_Pentium(self):
return re.match(r'.*?Pentium',
self.info[0]['model name']) is not None
def _is_PentiumII(self):
return re.match(r'.*?Pentium.*?II\b',
self.info[0]['model name']) is not None
def _is_PentiumPro(self):
return re.match(r'.*?PentiumPro\b',
self.info[0]['model name']) is not None
def _is_PentiumMMX(self):
return re.match(r'.*?Pentium.*?MMX\b',
self.info[0]['model name']) is not None
def _is_PentiumIII(self):
return re.match(r'.*?Pentium.*?III\b',
self.info[0]['model name']) is not None
def _is_PentiumIV(self):
return re.match(r'.*?Pentium.*?(IV|4)\b',
self.info[0]['model name']) is not None
def _is_PentiumM(self):
return re.match(r'.*?Pentium.*?M\b',
self.info[0]['model name']) is not None
def _is_Prescott(self):
return self.is_PentiumIV() and self.has_sse3()
def _is_Nocona(self):
return (self.is_Intel()
and (self.info[0]['cpu family'] == '6'
or self.info[0]['cpu family'] == '15')
and (self.has_sse3() and not self.has_ssse3())
and re.match(r'.*?\blm\b', self.info[0]['flags']) is not None)
def _is_Core2(self):
return (self.is_64bit() and self.is_Intel() and
re.match(r'.*?Core\(TM\)2\b',
self.info[0]['model name']) is not None)
def _is_Itanium(self):
return re.match(r'.*?Itanium\b',
self.info[0]['family']) is not None
def _is_XEON(self):
return re.match(r'.*?XEON\b',
self.info[0]['model name'], re.IGNORECASE) is not None
_is_Xeon = _is_XEON
# Varia
def _is_singleCPU(self):
return len(self.info) == 1
def _getNCPUs(self):
return len(self.info)
def _has_fdiv_bug(self):
|
mumuxme/vim-config
|
test/test.py
|
Python
|
gpl-3.0
| 2,612 | 0.005393 |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# Above the run-comment and file encoding comment.
# TODO FIXME XXX
# Keywords
with break continue del exec return pass print raise global assert lambda yield
for while if elif else import from as try except finally and in is not or
yield from
def functionname
class Classname
def 哈哈
class 哈哈
await
async def Test
async with
async for
# Builtin objects.
True False Ellipsis None NotImplemented
# Builtin function and types.
__import__ abs all any apply basestring bool buffer callable chr classmethod
cmp coerce compile complex delattr dict dir divmod enumerate eval execfile file
filter float frozenset getattr globals hasattr hash help hex id input int
intern isinstance issubclass iter len list locals long map max min object oct
open ord pow property range raw_input reduce reload repr reversed round set
setattr slice sorted sta
|
ticmethod str sum super tuple type unichr unicode vars
xrange zip
# Builtin exceptions and warnings.
Base
|
Exception Exception StandardError ArithmeticError LookupError
EnvironmentError
AssertionError AttributeError EOFError FloatingPointError GeneratorExit IOError
ImportError IndexError KeyError KeyboardInterrupt MemoryError NameError
NotImplementedError OSError OverflowError ReferenceError RuntimeError
StopIteration SyntaxError IndentationError TabError SystemError SystemExit
TypeError UnboundLocalError UnicodeError UnicodeEncodeError UnicodeDecodeError
UnicodeTranslateError ValueError WindowsError ZeroDivisionError
Warning UserWarning DeprecationWarning PendingDepricationWarning SyntaxWarning
RuntimeWarning FutureWarning ImportWarning UnicodeWarning
# Decorators.
@ decoratorname
@ object.__init__(arg1, arg2)
# Numbers
0 1 2 9 10 0x1f .3 12.34 0j 0j 34.2E-3 0b10 0o77 1023434 0x0
# Erroneous numbers
077 100L 0xfffffffL 0L 08 0xk 0x 0b102 0o78 0o123LaB
# Strings
" test " ' test '
"""
test
"""
'''
test
'''
" \a\b\c\"\'\n\r \x34\077 \08 \xag"
r" \" \' "
"testтест"
b"test"
b"test\r\n\xffff"
b"тестtest"
br"test"
br"\a\b\n\r"
# Formattings
" %f "
b" %f "
"{0.name!r:b} {0[n]} {name!s: } {{test}} {{}} {} {.__len__:s}"
b"{0.name!r:b} {0[n]} {name!s: } {{test}} {{}} {} {.__len__:s}"
"${test} ${test ${test}aname $$$ $test+nope"
b"${test} ${test ${test}aname $$$ $test+nope"
# Doctests.
"""
Test:
>>> a = 5
>>> a
5
Test
"""
'''
Test:
>>> a = 5
>>> a
5
Test
'''
# Erroneous symbols or bad variable names.
$ ? 6xav
&& || ===
# Indentation errors.
break
# Trailing space errors.
break
"""
test
"""
|
CMPUT404F16T06/CMPUT404Project
|
mysite/socialnet/migrations/0030_author_displayname.py
|
Python
|
apache-2.0
| 480 | 0 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-11-22
|
22:53
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('socialnet', '0029_auto_20161121_0543'),
]
opera
|
tions = [
migrations.AddField(
model_name='author',
name='displayname',
field=models.CharField(blank=True, max_length=255, null=True),
),
]
|
themartorana/python-postmark
|
postmark/django_backend.py
|
Python
|
mit
| 6,739 | 0.00089 |
from django.conf import settings
from django.core.mail.backends.base import BaseEmailBackend
from django.core.exceptions import ImproperlyConfigured
from django.core.mail import EmailMessage, EmailMultiAlternatives
import base64
from postmark.core import PMMail, PMBatchMail
class PMEmailMessage(EmailMessage):
def __init__(self, *args, **kwargs):
if 'tag' in kwargs:
self.tag = kwargs['tag']
del kwargs['tag']
else:
self.tag = None
if 'track_opens' in kwargs:
self.track_opens = kwargs['track_opens']
del kwargs['track_opens']
else:
self.track_opens = getattr(settings, 'POSTMARK_TRACK_OPENS', False)
if 'message_stream' in kwargs:
self.message_stream = kwargs['message_stream']
del kwargs['message_stream']
else:
self.message_stream = None
super(PMEmailMessage, self).__init__(*args, **kwargs)
class PMEmailMultiAlternatives(EmailMultiAlternatives):
def __init__(self, *args, **kwargs):
if 'tag' in kwargs:
self.tag = kwargs['tag']
del kwargs['tag']
else:
self.tag = None
if 'track_opens' in kwargs:
self.track_opens = kwargs['track_opens']
del kwargs['track_opens']
else:
self.track_opens = getattr(settings, 'POSTMARK_TRACK_OPENS', False)
if 'message_stream' in kwargs:
self.message_stream = kwargs['message_stream']
del kwargs['message_stream']
else:
self.message_stream = None
super(PMEmailMultiAlternatives, self).__init__(*args, **kwargs)
class EmailBackend(BaseEmailBackend):
def __init__(self, api_key=None, default_sender=None, **kwargs):
"""
Initialize the backend.
"""
super(EmailBackend, self).__init__(**kwargs)
self.api_key = api_key if api_key is not None else getattr(settings, 'POSTMARK_API_KEY', None)
if self.api_key is None:
raise ImproperlyConfigured('POSTMARK API key must be set in Django settings file or passed to backend constructor.')
self.default_sender = getattr(settings, 'POSTMARK_SENDER', default_sender)
self.test_mode = getattr(settings, 'POSTMARK_TEST_MODE', False)
self.return_message_id = getattr(settings, 'POSTMARK_RETURN_MESSAGE_ID', False)
def send_messages(self, email_messages):
"""
Sends one or more EmailMessage objects and returns the number of email
messages sent.
"""
if not email_messages:
return
sent, instance = self._send(email_messages)
if sent and self.return_message_id:
return [m.message_id for m in instance.messages]
elif sent:
return len(email_messages)
return 0
def _build_message(self, message):
"""A helper method to convert a PMEmailMessage to a PMMail"""
if not message.recipients():
return False
recipients = ','.join(message.to)
recipients_cc = ','.join(message.cc)
recipients_bcc = ','.join(message.bcc)
text_body = message.body
html_body = None
if isinstance(message, EmailMultiAlternatives):
for alt in message.alternatives:
if alt[1] == "text/html":
html_body = alt[0]
break
if getattr(message, 'content_subtype', None) == 'html':
# Don't send html content as plain text
text_body = None
html_body = message.body
reply_to = ','.join(message.reply_to)
custom_headers = {}
if message.extra_headers and isinstance(message.extra_headers, dict):
if 'Reply-To' in message.extra_headers:
reply_to = message.extra_headers.pop('Reply-To')
if len(message.extra_headers):
custom_heade
|
rs = message.extra_headers
attachments = []
if message.attachments and isinstance(message.attachments, list):
if len(message.attachments):
|
for item in message.attachments:
if isinstance(item, tuple):
(f, content, m) = item
if isinstance(content, str):
content = content.encode()
content = base64.b64encode(content)
# b64decode returns bytes on Python 3. PMMail needs a
# str (for JSON serialization). Convert on Python 3
# only to avoid a useless performance hit on Python 2.
if not isinstance(content, str):
content = content.decode()
attachments.append((f, content, m))
else:
attachments.append(item)
message_stream = getattr(message, 'message_stream', None)
postmark_message = PMMail(api_key=self.api_key,
subject=message.subject,
sender=message.from_email,
to=recipients,
cc=recipients_cc,
bcc=recipients_bcc,
text_body=text_body,
html_body=html_body,
reply_to=reply_to,
custom_headers=custom_headers,
attachments=attachments,
message_stream=message_stream)
postmark_message.tag = getattr(message, 'tag', None)
postmark_message.track_opens = getattr(message, 'track_opens', False)
return postmark_message
def _send(self, messages):
"""A helper method that does the actual sending."""
if len(messages) == 1:
to_send = self._build_message(messages[0])
if to_send is False:
# The message was missing recipients.
# Bail.
return False, None
else:
pm_messages = list(map(self._build_message, messages))
pm_messages = [m for m in pm_messages if m]
if len(pm_messages) == 0:
# If after filtering, there aren't any messages
# to send, bail.
return False, None
to_send = PMBatchMail(messages=pm_messages)
try:
to_send.send(test=self.test_mode)
except:
if self.fail_silently:
return False, to_send
raise
return True, to_send
|
cogniteev/easy-upgrade
|
easy_upgrade/lib/stow.py
|
Python
|
apache-2.0
| 2,651 | 0 |
import os
import os.path as osp
import shutil
import subprocess
from .. api import Installer, parse_version
from .. toolbox import find_executable, pushd
class StowInstaller(Installer):
name = 'stow'
def __init__(self, provider, release, config):
super(StowInstaller, self).__init__(provider, release, config)
self.path = config['path']
self.pkg_path = osp.join(self.path, 'stow')
self.activate = config.get('activate', True)
self.executable = find_executable(
*config.get('stow', ('stow', 'xstow'))
)
def release_dir_name(self,
|
version=''):
return '{}-{}'.format(self.release.pkg_name, version)
def get_local_versions(self):
|
versions = []
if not osp.isdir(self.pkg_path):
return versions
for p in os.listdir(self.pkg_path):
fp = osp.join(self.pkg_path, p)
if osp.isdir(fp) and p.startswith(self.release_dir_name()):
versions.append(p[len(self.release_dir_name()):])
return versions
def installed_version(self):
"""
:return: most recent version available in stow packages directory.
:rtype: string
"""
installed_version = reduce(
lambda a, b: a if a > b else b,
map(
lambda v: (parse_version(v), v),
self.get_local_versions()
),
(None, None)
)
return installed_version[1]
def _stow(self, *args):
with pushd(self.pkg_path):
subprocess.check_call([self.executable] + list(args))
def disable_package(self, version):
self._stow('-D', self.release_dir_name(version))
def enable_package(self, version):
self._stow(self.release_dir_name(version))
def install(self, fetched_items_path, version):
rdir_name = self.release_dir_name(version)
release_path = osp.join(self.pkg_path, rdir_name)
if not osp.isdir(self.pkg_path):
os.makedirs(self.pkg_path)
elif osp.isdir(release_path):
raise Exception(
"Cannot install {}/{} in {}: directory exists".format(
self.provider.name,
self.release.name,
release_path
)
)
shutil.copytree(fetched_items_path, release_path)
if self.activate:
versions_to_disable = set(self.get_local_versions())
versions_to_disable.remove(version)
for v in versions_to_disable:
self.disable_package(v)
self.enable_package(version)
|
christianurich/VIBe2UrbanSim
|
3rdparty/opus/src/opus_gui/general_manager/controllers/dependency_viewer.py
|
Python
|
gpl-2.0
| 1,509 | 0.003976 |
# Opus/UrbanSim urban simulation software.
# Copyright (C) 2005-2009 Univers
|
ity of Washington
# See opus_core/LICENSE
import os
from PyQt4 import QtGui, Qt, QtCore
from opus_gui.general_manager.views.ui_dependency_viewer import Ui_DependencyViewer
class DependencyViewer(QtGui.QDialog, Ui_DependencyViewer):
def __init__(self, parent_window):
flags = QtCore.Qt.WindowTitleHint | QtCore.Qt.WindowSystemMenuHint | QtCore.Qt.WindowMaximizeButtonHint
QtGui.QDialog.__init__(self, parent_window, flags)
self.setupUi(self
|
)
self.setModal(True) #TODO: this shouldn't be necessary, but without it the window is unresponsive
def show_error_message(self):
self.lbl_error.setVisible(True)
self.scrollArea.setVisible(False)
def show_graph(self, file_path, name):
self.lbl_error.setVisible(False)
self.scrollArea.setVisible(True)
self.setWindowTitle("Dependency graph of %s" % name)
self.image_file = file_path
pix = QtGui.QPixmap.fromImage(QtGui.QImage(file_path))
self.label.setPixmap(pix)
self.scrollAreaWidgetContents.setMinimumSize(pix.width(), pix.height())
self.label.setMinimumSize(pix.width(), pix.height())
rect = Qt.QApplication.desktop().screenGeometry(self)
self.resize(min(rect.width(), pix.width() + 35), min(rect.height(), pix.height() + 80))
self.update()
def on_closeWindow_released(self):
self.close()
os.remove(self.image_file)
|
networks-lab/metaknowledge
|
metaknowledge/medline/__init__.py
|
Python
|
gpl-2.0
| 458 | 0.004367 |
"""
These are the functions used to p
|
rocess medline (pubmed) f
|
iles at the backend. They are meant for use internal use by metaknowledge.
"""
from .recordMedline import MedlineRecord, medlineRecordParser
from .medlineHandlers import isMedlineFile, medlineParser
from .tagProcessing.tagNames import tagNameDict, authorBasedTags, tagNameConverterDict
from .tagProcessing.specialFunctions import medlineSpecialTagToFunc
from .tagProcessing.tagFunctions import *
|
ladybug-analysis-tools/ladybug-core
|
ladybug/datatype/__init__.py
|
Python
|
gpl-3.0
| 901 | 0.00111 |
# coding=utf-8
"""Module of Data Types (eg. Temperature, Area, etc.)
Possesses capabilities for unit conversions and range checks.
It also includes descriptions of the data types and the units.
Properti
|
es:
TYPES: A tuple indicating all currently supported data types.
BASETYPES: A tuple indicating all base types. Base types are the
data types on which unit systems are defined.
UNITS: A dictionary containing all currently supported units. The
keys of this dictionary are the base type names (eg. 'Temperature').
TYPESDICT
|
: A dictionary containing pointers to the classes of each data type.
The keys of this dictionary are the data type names.
"""
from .base import _DataTypeEnumeration
_data_types = _DataTypeEnumeration(import_modules=True)
TYPES = _data_types.types
BASETYPES = _data_types.base_types
UNITS = _data_types.units
TYPESDICT = _data_types.types_dict
|
isard-vdi/isard
|
engine/engine/start.py
|
Python
|
agpl-3.0
| 2,127 | 0.007522 |
from engine.services.lib.debug import check_if_debugging
check_if_debugging()
import inspect
import logging
import os
import sys
import traceback
from logging.handlers import RotatingFileHandler
from subprocess import check_call, check_output
from flask import Flask
## Moved populate & upgrade from webapp
from initdb.populate import Populate
from initdb.upgrade import Upgrade
from pid import PidFile, PidFileAlreadyLockedError, PidFileAlreadyRunningError
check_output(("/isard/generate_certs.sh"), text=True).strip()
try:
p = Populate()
except Exception as e:
print(traceback.format_exc())
print("Error populating...")
exit(1)
try:
u = Upgrade()
except Exception as e:
print(traceback.format_exc())
print("Error Upgrading...")
exit(1)
## End
from engine.services.lib.functions import check_tables_populated
check_tables_populated()
from engine.models.engine import Engine
from engine.services import db
def run(app):
http_server = WSGIServer(("0.0.0.0", 5555), app)
http_server.serve_forever()
# if app.debug:
# from werkzeug.debug import DebuggedApplication
# app.wsgi_app = DebuggedApplication( app.wsgi_app, True )
if __name__ == "__main__":
p = PidFile("engine")
try:
p.create()
except PidFileAlreadyLockedError:
import time
err_pid = PidFile(str(time.time()))
err_pid.create()
while True:
time.sleep(1)
app = Flask(__name__)
app.m = Engine(with_status_threads=False)
app.db = db
# remove default logging for get/post messages
werk = logging.getLogger("werkzeug")
werk.setLevel(logging.ERROR)
# add log handler
handler = RotatingFileHandler
|
("api.log", maxBytes=10000, backupCount=1)
handler.setLevel(logging.INFO)
app.logger.addHandler(handler)
# register blueprints
from engine.api import api as api_blueprint
app.regist
|
er_blueprint(api_blueprint, url_prefix="") # url_prefix /api?
# run(app)
if os.environ.get("LOG_LEVEL") == "DEBUG":
app.run(debug=True, host="0.0.0.0")
else:
app.run(host="0.0.0.0")
|
liangtianyou/ST
|
stclient/menus.py
|
Python
|
gpl-3.0
| 3,290 | 0.024443 |
#!/usr/bin/env python
#-*- coding: utf-8 -*-
impo
|
rt settings
#----------------------------------------
# globals
#----------------------------------------
web = settings.web
stow = web.utils.storage
#----------------------------------------
# 初始化菜单 权限控制
#----------------------------------------
d
|
ef init_menus():
_ = web.config.get('_gettext')
menus = [
stow({
'id':'base',
'title':_("menu_system_base"),
'menus':[
#stow({
# 'id':'menusystemstatus', #系统状态
# 'icon_class':'icon-speedometer',
# 'title':_("menu_system_status"),
# 'content':[
# stow({'name':_('system_status'),'url':"javascript:load_page('systemstatus')",'id':'systemstatus'}),
# stow({'name':_('service_control'),'url':"javascript:load_page('servicecontrol')",'id':'servicecontrol'}),
# ]
#}),
stow({
'id':'menunetwork', #网络设置
'icon_class':'icon-network',
'title':_("menu_network"),
'content':[
stow({'name':_('iface_manage'),'url':"javascript:load_page('iface')",'id':'iface'}),
stow({'name':_('dns_manage'),'url':"javascript:load_page('dns')",'id':'dns'}),
stow({'name':_('route_manage'),'url':"javascript:load_page('route')",'id':'route'}),
]
}),
stow({
'id':'menustorage', #存储设置
'icon_class':'icon-storage',
'title':_("menu_storage"),
'content':[
stow({'name':_('mdraid_manage'),'url':"javascript:load_page('mdraid')",'id':'mdraid'}),
stow({'name':_('lvm_manage'),'url':"javascript:load_page('lvm')",'id':'lvm'})
]
})
]
}),
#stow({
# 'id':'nas',
# 'title':_("menu_nas"),
# 'menus':[
#
# ]
#}),
stow({
'id':'san',
'title':_("menu_san"),
'menus':[
stow({
'id':'menusanbase', #SAN基础配置
'icon_class':'icon-speedometer',
'title':_("menu_san_base"),
'content':[
stow({'name':_('ipsan'),'url':"javascript:load_page('ipsan')",'id':'ipsan'}),
stow({'name':_('fcsan'),'url':"javascript:load_page('fcsan')",'id':'fcsan'}),
]
}),
#stow({
# 'id':'menusanoptional', #SAN高级配置
# 'icon_class':'icon-speedometer',
# 'title':_("menu_san_optional"),
# 'content':[
# stow({'name':_('ipsan'),'url':"javascript:load_page('ipsan')",'id':'ipsan'}),
# stow({'name':_('fcsan'),'url':"javascript:load_page('fcsan')",'id':'fcsan'}),
# ]
#})
]
})
]
return menus
|
bytedance/fedlearner
|
fedlearner/fedavg/cluster/__init__.py
|
Python
|
apache-2.0
| 90 | 0 |
fro
|
m .cluster_pb2 import FLNodeDef, FLClusterDef
from .clust
|
er_spec import FLClusterSpec
|
woozyking/tidehunter
|
tidehunter/__init__.py
|
Python
|
mit
| 858 | 0.002331 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
HTTP streaming toolbox with flow control, written in Python.
:copyright: (c) 2014 Runzhou Li (Leo)
:license: The MIT License (MIT), see LICENSE for details.
"""
__title__ = 'tidehunter'
__version__ = '1.0.1'
VERSION = tuple(map(int, __version__.split('.')))
__author__ = 'Runzhou Li (Leo)'
__license__ = 'The MIT License (MIT)'
__copyright__ = 'Runzhou Li (Leo)'
from tidehunter.stream import (
Hunter, SimpleStateCounter
)
__all__ = [
'Hunter', 'SimpleStateCounter'
]
# Set
|
default logging handler to avoid "No handler found" warnings.
import logging
try: # Py
|
thon 2.7+
from logging import NullHandler
except ImportError: # pragma: no cover
class NullHandler(logging.Handler):
def emit(self, record):
pass
logging.getLogger(__name__).addHandler(NullHandler())
|
ylitormatech/terapialaskutus
|
therapyinvoicing/customers/forms.py
|
Python
|
bsd-3-clause
| 7,087 | 0.001978 |
from django import forms
from django.utils.translation import ugettext_lazy as _
from .models import Customer, Session, CompanyProfile
class CustomerUpdateForm(forms.ModelForm):
"""
Update Customer form
Field enhancements:
* therapyCategory uses forms.TypedChoiceField.
"""
class Meta:
model = Customer
fields = ['status', 'firstName', 'lastName', 'additionalName', 'ssn', 'address', 'zipCode', 'city', 'telephone', 'email',
'therapyCategory', 'sessionprice', 'sessionpriceKelaRefund'
# , 'statementpriceKela'
]
labels = {
'firstName': _("Etunimi"),
'additionalName': _("Muut etunimet"),
'lastName': _("Sukunimi"),
'ssn': _("Sosiaaliturvatunnus"),
'address': _("Osoite"),
'zipCode': _("Postinumero"),
'city': _("Postitoimipaikka"),
'telephone': _("Puhelin"),
'email': _("Sähköposti"),
'status': _("Aktiivinen asiakas?"),
'therapyCategory': _("Terapialuokitus"),
'sessionprice': _("Tapaamisen perushinta"),
'sessionpriceKelaRefund': _("Kelan korvaus"),
# 'statementpriceKela': _("Kelakorvaus lausunnosta"),
}
therapyCategory = forms.TypedChoiceField(
label=Meta.labels['therapyCategory'],
choices=Customer.THERAPY_CATEGORY_CHOICES,
widget=forms.Select,
required=True
)
class CustomerCreateForm(forms.ModelForm):
"""
Create Customer form.
Field enhancements:
* therapyCategory uses forms.TypedChoiceField.
"""
class Meta:
model = Customer
fields = ['firstName', 'lastName', 'additionalName', 'ssn', 'address', 'zipCode', 'city', 'telephone', 'email',
'status',
'therapyCategory', 'sessionprice', 'sessionpriceKelaRefund']
labels = {
'firstName': _("Etunimi"),
'additionalName': _("Muut etunimet"),
'lastName': _("Sukunimi"),
'ssn': _("Sosiaaliturvatunnus"),
'address': _("Osoite"),
'zipCode': _("Postinumero"),
'city': _("Postitoimipaikka"),
'telephone': _("Puhelin"),
'email': _("Sähköposti"),
'status': _("Aktiivinen asiakas?"),
'therapyCategory': _("Terapialuokitus"),
'sessionprice': _("Tapaamisen perushinta"),
'sessionpriceKelaRefund': _("Kelan korvaus"),
}
therapyCategory = forms.TypedChoiceField(
label=Meta.labels['therapyCategory'],
choices=Customer.THERAPY_CATEGORY_CHOICES,
widget=forms.Select,
required=True
)
class SessionUpdateForm(forms.ModelForm):
"""
Update Customer form.
Field enhancements:
* time uses forms.TimeField with format '%H:%M'
* sessionInvoiceType uses forms.TypedChoiceField choices from Session.SESSION_INVOICE_CHOICES
* kelaInvoiceType uses forms.TypedChoiceField choices from Session.KELAINVOICABLE_CHOICES
* sessionType uses forms.TypedChoiceField choices from Session.SESSION_TYPE_CHOICES
"""
class Meta:
model = Session
fields = [
'date',
'time',
'sessionType',
'sessionInvoiceType',
'kelaInvoiceType',
'sessionprice',
'sessionpriceKelaRefund',
'sessionDone'
]
labels = {
'date': _("Tapaamispäivä"),
'time': _("Tapaamisaika"),
'sessionType': _("Tapaamisen tyyppi"),
'sessionInvoiceType': _("Tapaamisen laskutus"),
'kelaInvoiceType': _("Maksaako Kela osan asiakkaan kustannuksista?"),
'sessionprice': _("Tapaamisen hinta"),
'sessionpriceKelaRefund': _("Kelan maksama osuus tapaamisen hinnasta"),
'sessionDone': _("Onko tapaaminen pidetty?")
}
time = forms.TimeField(
label=Meta.labels['time'],
widget=forms.TimeInput(format='%H:%M'),
required=True
)
sessionInvoiceType = forms.TypedChoiceField(
label=Meta.labels['sessionInvoiceType'],
choices=Session.SESSION_INVOICE_CHOICES,
widget=forms.Select,
required=True
)
kelaInvoiceType = forms.TypedChoiceField(
label=Meta.labels['kelaInvoiceType'],
choices=Session.KELAINVOICABLE_CHOICES,
widget=forms.Select,
required=True
)
sessionType = forms.TypedChoiceField(
label=Meta.labels['sessionType'],
choices=Session.SESSION_TYPE_CHOICES,
widget=forms.Select,
required=True
)
class CompanyProfileUpdateForm(forms.ModelForm):
"""
CustomerProfile update form.
Field enhancements:
* time uses forms.TimeField with format '%H:%M'
* serviceproviderType uses forms.TypedChoiceField choices from CompanyProfile.SERVICEPROVIDER_TYPE_CHOICES
* invoiceRefType uses forms.TypedChoiceField choices from CompanyProfile.INVOICEREF_TYPE_CHOICES
* taxAdvanceType uses forms.TypedChoiceField choices from CompanyProfile.TAX_ADVANCE_COLLECTION_TYPE_
|
CHOICES
"""
class Meta:
model = CompanyProfile
fields = [
'companyName',
'firstName',
'additionalName',
'lastName',
'address',
'zipCode',
'city',
'country',
'telephone',
'email',
'vatId',
'iban',
|
'bic',
'serviceproviderType',
'invoiceRefType',
'taxAdvanceType'
]
labels = {
'companyName': _("Oman yrityksen nimi"),
'firstName': _("Etunimi"),
'additionalName': _("Muut etunimet"),
'lastName': _("Sukunimi"),
'address': _("Osoite"),
'zipCode': _("Postinumero"),
'city': _("Postitoimipaikka"),
'country': _("Maa"),
'telephone': _("Puhelin"),
'email': _("Sähköposti"),
'vatId': _("Y-tunnus/Henkilötunnus"),
'iban': _("Pankkitili (IBAN)"),
'bic': _("Pankkiyhteys (BIC)"),
'serviceproviderType': _("Palveluntarjoajatyyppi"),
'invoiceRefType': _("Kelan laskun viitetyyppi"),
'taxAdvanceType': _("Ennakinpidätysperuste")
}
serviceproviderType = forms.TypedChoiceField(
label=Meta.labels['serviceproviderType'],
choices=CompanyProfile.SERVICEPROVIDER_TYPE_CHOICES,
widget=forms.Select,
required=True
)
invoiceRefType = forms.TypedChoiceField(
label=Meta.labels['invoiceRefType'],
choices=CompanyProfile.INVOICEREF_TYPE_CHOICES,
widget=forms.Select,
required=True
)
taxAdvanceType = forms.TypedChoiceField(
label=Meta.labels['taxAdvanceType'],
choices=CompanyProfile.TAX_ADVANCE_COLLECTION_TYPE_CHOICES,
widget=forms.Select,
required=True
)
|
pratikmallya/heat
|
heat/engine/resources/openstack/keystone/role_assignments.py
|
Python
|
apache-2.0
| 15,791 | 0 |
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from heat.common import exception
from heat.common.i18n import _
from heat.engine import constraints
from heat.engine import properties
from heat.engine import resource
from heat.engine import support
class KeystoneRoleAssignmentMixin(object):
"""Implements role assignments between user/groups and project/domain.
heat_template_version: 2013-05-23
parameters:
... Group or User parameters
group_role:
type: string
description: role
group_role_domain:
type: string
description: group role domain
group_role_project:
type: string
description: group role project
resources:
admin_group:
type: OS::Keystone::Group OR OS::Keystone::User
properties:
... Group or User properties
roles:
- role: {get_param: group_role}
domain: {get_param: group_role_domain}
- role: {get_param: group_role}
project: {get_param: group_role_project}
"""
PROPERTIES = (
ROLES
) = (
'roles'
)
_ROLES_MAPPING_PROPERTIES = (
ROLE, DOMAIN, PROJECT
) = (
'role', 'domain', 'project'
)
mixin_properties_schema = {
ROLES: properties.Schema(
properties.Schema.LIST,
_('List of role assignments.'),
schema=properties.Schema(
properties.Schema.MAP,
_('Map between role with either project or domain.'),
schema={
ROLE: properties.Schema(
properties.Schema.STRING,
_('Keystone role'),
required=True,
constraints=([constraints.
CustomConstraint('keystone.role')])
),
PROJECT: properties.Schema(
|
properties.Schema.STRING,
_('Keystone project'),
constraints=([constraints.
CustomConstraint('keystone.project')])
),
DOMAIN: properties.Schema(
properties.Schema.STRING,
_('Keystone domain'),
constraints=([constraints.
CustomCo
|
nstraint('keystone.domain')])
),
}
),
update_allowed=True
)
}
def _add_role_assignments_to_group(self, group_id, role_assignments):
for role_assignment in self._normalize_to_id(role_assignments):
if role_assignment.get(self.PROJECT) is not None:
self.client().client.roles.grant(
role=role_assignment.get(self.ROLE),
project=role_assignment.get(self.PROJECT),
group=group_id
)
elif role_assignment.get(self.DOMAIN) is not None:
self.client().client.roles.grant(
role=role_assignment.get(self.ROLE),
domain=role_assignment.get(self.DOMAIN),
group=group_id
)
def _add_role_assignments_to_user(self, user_id, role_assignments):
for role_assignment in self._normalize_to_id(role_assignments):
if role_assignment.get(self.PROJECT) is not None:
self.client().client.roles.grant(
role=role_assignment.get(self.ROLE),
project=role_assignment.get(self.PROJECT),
user=user_id
)
elif role_assignment.get(self.DOMAIN) is not None:
self.client().client.roles.grant(
role=role_assignment.get(self.ROLE),
domain=role_assignment.get(self.DOMAIN),
user=user_id
)
def _remove_role_assignments_from_group(self, group_id, role_assignments):
for role_assignment in self._normalize_to_id(role_assignments):
if role_assignment.get(self.PROJECT) is not None:
self.client().client.roles.revoke(
role=role_assignment.get(self.ROLE),
project=role_assignment.get(self.PROJECT),
group=group_id
)
elif role_assignment.get(self.DOMAIN) is not None:
self.client().client.roles.revoke(
role=role_assignment.get(self.ROLE),
domain=role_assignment.get(self.DOMAIN),
group=group_id
)
def _remove_role_assignments_from_user(self, user_id, role_assignments):
for role_assignment in self._normalize_to_id(role_assignments):
if role_assignment.get(self.PROJECT) is not None:
self.client().client.roles.revoke(
role=role_assignment.get(self.ROLE),
project=role_assignment.get(self.PROJECT),
user=user_id
)
elif role_assignment.get(self.DOMAIN) is not None:
self.client().client.roles.revoke(
role=role_assignment.get(self.ROLE),
domain=role_assignment.get(self.DOMAIN),
user=user_id
)
def _normalize_to_id(self, role_assignment_prps):
role_assignments = []
if role_assignment_prps is None:
return role_assignments
for role_assignment in role_assignment_prps:
role = role_assignment.get(self.ROLE)
project = role_assignment.get(self.PROJECT)
domain = role_assignment.get(self.DOMAIN)
role_assignments.append({
self.ROLE: self.client_plugin().get_role_id(role),
self.PROJECT: (self.client_plugin().
get_project_id(project)) if project else None,
self.DOMAIN: (self.client_plugin().
get_domain_id(domain)) if domain else None
})
return role_assignments
def _find_differences(self, updated_prps, stored_prps):
updated_role_project_assignments = []
updated_role_domain_assignments = []
# Split the properties into two set of role assignments
# (project, domain) from updated properties
for role_assignment in updated_prps or []:
if role_assignment.get(self.PROJECT) is not None:
updated_role_project_assignments.append(
'%s:%s' % (
role_assignment[self.ROLE],
role_assignment[self.PROJECT]))
elif (role_assignment.get(self.DOMAIN)
is not None):
updated_role_domain_assignments.append(
'%s:%s' % (role_assignment[self.ROLE],
role_assignment[self.DOMAIN]))
stored_role_project_assignments = []
stored_role_domain_assignments = []
# Split the properties into two set of role assignments
# (project, domain) from updated properties
for role_assignment in (stored_prps or []):
if role_assignment.get(self.PROJECT) is not None:
stored_role_project_assignments.append(
'%s:%s' % (
role_assignment[self.ROLE],
role_assignment[self.PROJECT]))
elif (role_assignment.get(self.DOMAIN)
is not None):
|
antoniodemora/git-cola
|
cola/qtutils.py
|
Python
|
gpl-2.0
| 30,765 | 0.000163 |
# Copyright (C) 2007-2018 David Aguilar and contributors
"""Miscellaneous Qt utility functions."""
from __future__ import division, absolute_import, unicode_literals
import os
from qtpy import compat
from qtpy import QtGui
from qtpy import QtCore
from qtpy import QtWidgets
from qtpy.QtCore import Qt
from qtpy.QtCore import Signal
from . import core
from . import hotkeys
from . import icons
from . import utils
from .i18n import N_
from .compat import int_types
from .compat import ustr
from .models import prefs
from .widgets import defs
STRETCH = object()
SKIPPED = object()
def active_window():
"""Return the active window for the current application"""
return QtWidgets.QApplication.activeWindow()
def connect_action(action, fn):
"""Connect an action to a function"""
action.triggered[bool].connect(lambda x: fn())
def connect_action_bool(action, fn):
"""Connect a triggered(bool) action to a function"""
action.triggered[bool].connect(fn)
def connect_button(button, fn):
"""Connect a button to a function"""
# Some versions of Qt send the `bool` argument to the clicked callback,
# and some do not. The lambda consumes all callback-provided arguments.
button.clicked.connect(lambda *args, **kwargs: fn())
def connect_checkbox(widget, fn):
"""Connect a checkbox to a function taking bool"""
widget.clicked.connect(lambda *args, **kwargs: fn(get(checkbox)))
def connect_released(button, fn):
"""Connect a button to a function"""
button.released.connect(fn)
def button_action(button, action):
"""Make a button trigger an action"""
connect_button(button, action.trigger)
def connect_toggle(toggle, fn):
"""Connect a toggle button to a function"""
toggle.toggled.connect(fn)
def disconnect(signal):
"""Disconnect signal from all slots"""
try:
signal.disconnect()
except TypeError: # allow unconnected slots
pass
def get(widget):
"""Query a widget for its python value"""
if hasattr(widget, 'isChecked'):
value = widget.isChecked()
elif hasattr(widget, 'value'):
value = widget.value()
elif hasattr(widget, 'text'):
value = widget.text()
elif hasattr(widget, 'toPlainText'):
value = widget.toPlainText()
elif hasattr(widget, 'sizes'):
value = widget.sizes()
elif hasattr(widget, 'date'):
value = widget.date().toString(Qt.ISODate)
else:
value = None
return value
def hbox(margin, spacing, *items):
"""Create an HBoxLayout with the specified sizes and items"""
return box(QtWidgets.QHBoxLayout, margin, spacing, *items)
def vbox(margin, spacing, *items):
"""Create a VBoxLayout with the specified sizes and items"""
return box(QtWidgets.QVBoxLayout, margin, spacing, *items)
def buttongroup(*items):
"""Create a QButtonGroup for the specified items"""
group = QtWidgets.QButtonGroup()
for i in items:
group.addButton(i)
return group
def set_margin(layout, margin):
"""Set the content margins for a layout"""
layout.setContentsMargins(margin, margin, margin, margin)
def box(cls, margin, spacing, *items):
"""Create a QBoxLayout with the specified sizes and items"""
stretch = STRETCH
skipped = SKIPPED
layout = cls()
layout.setSpacing(spacing)
set_margin(layout, margin)
for i in items:
if isinstance(i, QtWidgets.QWidget):
layout.addWidget(i)
elif isinstance(i, (QtWidgets.QHBoxLayout, QtWidgets.QVBoxLayout,
QtWidgets.QFormLayout, QtWidgets.QLayout)):
layout.addLayout(i)
elif i is stretch:
layout.addStretch()
elif i is skipped:
continue
elif isinstance(i, int_types):
layout.addSpacing(i)
return layout
def form(margin, spacing, *widgets):
"""Create a QFormLayout with the specified sizes and items"""
layout = QtWidgets.QFormLayout()
layout.setSpacing(spacing)
layout.setFieldGrowthPolicy(QtWidgets.QFormLayout.ExpandingFieldsGrow)
set_margin(layout, margin)
for idx, (name, widget) in enumerate(widgets):
if isinstance(name, (str, ustr)):
layout.addRow(name, widget)
else:
layout.setWidget(idx, QtWidgets.QFormLayout.LabelRole, name)
layout.setWidget(idx, QtWidgets.QFormLayout.FieldRole, widget)
return layout
def grid(margin, spacing, *widgets):
"""Create a QGridLayout with the specified sizes and items"""
layout = QtWidgets.QGridLayout()
layout.setSpacing(spacing)
set_margin(layout, margin)
for row in widgets:
item = row[0]
if isinstance(item, QtWidgets.QWidget):
layout.addWidget(*row)
elif isinstance(item, QtWidgets.QLayoutItem):
layout.addItem(*row)
return layout
def splitter(orientation, *widgets):
"""Create a spliter over the specified widgets
:param orientation: Qt.Horizontal or Qt.Vertical
"""
layout = QtWidgets.QSplitter()
layout.setOrientation(orientation)
layout.setHandleWidth(defs.handle_width)
layout.setChildrenCollapsible(True)
for idx, widget in enumerate(widgets):
layout.addWidget(widget)
|
layout.setStretchFactor(idx, 1)
# Workaround for Qt not setting the WA_Hover property for QSplitter
# Cf. https://bugreports.qt.io/browse/QTBUG-1376
|
8
layout.handle(1).setAttribute(Qt.WA_Hover)
return layout
def label(text=None, align=None, fmt=None, selectable=True):
"""Create a QLabel with the specified properties"""
widget = QtWidgets.QLabel()
if align is not None:
widget.setAlignment(align)
if fmt is not None:
widget.setTextFormat(fmt)
if selectable:
widget.setTextInteractionFlags(Qt.TextBrowserInteraction)
widget.setOpenExternalLinks(True)
if text:
widget.setText(text)
return widget
class ComboBox(QtWidgets.QComboBox):
"""Custom read-only combobox with a convenient API"""
def __init__(self, items=None, editable=False, parent=None, transform=None):
super(ComboBox, self).__init__(parent)
self.setEditable(editable)
self.transform = transform
self.item_data = []
if items:
self.addItems(items)
self.item_data.extend(items)
def set_index(self, idx):
idx = utils.clamp(idx, 0, self.count()-1)
self.setCurrentIndex(idx)
def add_item(self, text, data):
self.addItem(text)
self.item_data.append(data)
def current_data(self):
return self.item_data[self.currentIndex()]
def set_value(self, value):
if self.transform:
value = self.transform(value)
try:
index = self.item_data.index(value)
except ValueError:
index = 0
self.setCurrentIndex(index)
def combo(items, editable=False, parent=None):
"""Create a readonly (by default) combobox from a list of items"""
return ComboBox(editable=editable, items=items, parent=parent)
def combo_mapped(data, editable=False, transform=None, parent=None):
"""Create a readonly (by default) combobox from a list of items"""
widget = ComboBox(editable=editable, transform=transform, parent=parent)
for (k, v) in data:
widget.add_item(k, v)
return widget
def textbrowser(text=None):
"""Create a QTextBrowser for the specified text"""
widget = QtWidgets.QTextBrowser()
widget.setOpenExternalLinks(True)
if text:
widget.setText(text)
return widget
def add_completer(widget, items):
"""Add simple completion to a widget"""
completer = QtWidgets.QCompleter(items, widget)
completer.setCaseSensitivity(Qt.CaseInsensitive)
completer.setCompletionMode(QtWidgets.QCompleter.InlineCompletion)
widget.setCompleter(completer)
def prompt(msg, title=None, text='', parent=None):
"""Presents the user with an input widget and returns the input."""
if title is None:
title = msg
if parent is None:
parent = active_window()
result = QtWidgets.QInputDialog.getText(
|
victorkeophila/alien4cloud-cloudify3-provider
|
src/test/resources/outputs/blueprints/openstack/tomcat/wrapper/Tomcat/tosca.interfaces.node.lifecycle.Standard/create/artifacts/tomcat-war-types/scripts/_a4c_tomcat_install.py
|
Python
|
apache-2.0
| 14,972 | 0.004542 |
from cloudify import ctx
from cloudify.exceptions import NonRecoverableError
from cloudify.state import ctx_parameters as inputs
import subprocess
import os
import re
import sys
import time
import threading
import platform
from StringIO import StringIO
from cloudify_rest_client import CloudifyClient
from cloudify import utils
if 'MANAGER_REST_PROTOCOL' in os.environ and os.environ['MANAGER_REST_PROTOCOL'] == "https":
client = CloudifyClient(host=utils.get_manager_ip(), port=utils.get_manager_rest_service_port(), protocol='https', trust_all=True)
else:
client = CloudifyClient(host=utils.get_manager_ip(), port=utils.get_manager_rest_service_port())
def convert_env_value_to_string(envDict):
for key, value in envDict.items():
envDict[str(key)] = str(envDict.pop(key))
def get_host(entity):
if entity.instance.relationships:
for relationship in entity.instance.relationships:
if 'cloudify.relationships.contained_in' in relationship.type_hierarchy:
return relationship.target
return None
def has_attribute_mapping(entity, attribute_name):
ctx.logger.info('Check if it exists mapping for attribute {0} in {1}'.format(attribute_name, entity.node.properties))
mapping_configuration = entity.node.properties.get('_a4c_att_' + attribute_name, None)
if mapping_configuration is not None:
if mapping_configuration['parameters'][0] == 'SELF' and mapping_configuration['parameters'][1] == attribute_name:
return False
else:
return True
return False
def process_attribute_mapping(entity, attribute_name, data_retriever_function):
# This is where attribute mapping is defined in the cloudify type
mapping_configuration = entity.node.properties['_a4c_att_' + attribute_name]
ctx.logger.info('Mapping configuration found for attribute {0} is {1}'.format(attribute_name, mapping_configuration))
# If the mapping configuration exist and if it concerns SELF then just get attribute of the mapped attribute name
# Else if it concerns TARGET then follow the relationship and retrieved the mapped attribute name from the TARGET
if mapping_configuration['parameters'][0] == 'SELF':
return data_retriever_function(entity, mapping_configuration['parameters'][1])
elif mapping_configuration['parameters'][0] == 'TARGET' and entity.instance.relationships:
for relationship in entity.instance.relationships:
if mapping_configuration['parameters'][1] in relationship.type_hierarchy:
return data_retriever_function(relationship.target, mapping_configuration['parameters'][2])
return ""
def get_nested_attribute(entity, attribute_names):
deep_properties = get_attribute(entity, attribute_names[0])
attribute_names_iter = iter(attribute_names)
next(attribute_names_iter)
for attribute_name in attribute_names_iter:
if deep_properties is None:
return ""
else:
deep_properties = deep_properties.get(attribute_name, None)
return deep_properties
def _all_instances_get_nested_attribute(entity, attribute_names):
return None
def get_attribute(entity, attribute_name):
if has_attribute_mapping(entity, attribute_name):
# First check if any mapping exist for attribute
mapped_value = process_attribute_mapping(entity, attribute_name, get_attribute)
ctx.logger.info('Mapping exists for attribute {0} with value {1}'.format(attribute_name, mapped_value))
return mapped_value
# No mapping exist, try to get directly the attribute from the entity
attribute_value = entity.instance.runtime_properties.get(attribute_name, None)
if attribute_value is not None:
ctx.logger.info('Found the attribute {0} with value {1} on the node {2}'.format(attribute_name, attribute_value, entity.node.id))
return attribute_value
# Attribute retrieval fails, fall back to property
property_value = entity.node.properties.get(attribute_name, None)
if property_value is not None:
return property_value
# Property retrieval fails, fall back to host instance
host = get_host(entity)
if host is not None:
ctx.logger.info('Attribute not found {0} go up to the parent node {1}'.format(attribute_name, host.node.id))
return get_attribute(host, attribute_name)
# Nothing is found
return ""
def _all_instances_get_attribute(entity, attribute_nam
|
e):
result_map = {}
# get all
|
instances data using cfy rest client
# we have to get the node using the rest client with node_instance.node_id
# then we will have the relationships
node = client.nodes.get(ctx.deployment.id, entity.node.id)
all_node_instances = client.node_instances.list(ctx.deployment.id, entity.node.id)
for node_instance in all_node_instances:
prop_value = __recursively_get_instance_data(node, node_instance, attribute_name)
if prop_value is not None:
ctx.logger.info('Found the property/attribute {0} with value {1} on the node {2} instance {3}'.format(attribute_name, prop_value, entity.node.id,
node_instance.id))
result_map[node_instance.id + '_'] = prop_value
return result_map
def get_property(entity, property_name):
# Try to get the property value on the node
property_value = entity.node.properties.get(property_name, None)
if property_value is not None:
ctx.logger.info('Found the property {0} with value {1} on the node {2}'.format(property_name, property_value, entity.node.id))
return property_value
# No property found on the node, fall back to the host
host = get_host(entity)
if host is not None:
ctx.logger.info('Property not found {0} go up to the parent node {1}'.format(property_name, host.node.id))
return get_property(host, property_name)
return ""
def get_instance_list(node_id):
result = ''
all_node_instances = client.node_instances.list(ctx.deployment.id, node_id)
for node_instance in all_node_instances:
if len(result) > 0:
result += ','
result += node_instance.id
return result
def get_host_node_name(instance):
for relationship in instance.relationships:
if 'cloudify.relationships.contained_in' in relationship.type_hierarchy:
return relationship.target.node.id
return None
def __get_relationship(node, target_name, relationship_type):
for relationship in node.relationships:
if relationship.get('target_id') == target_name and relationship_type in relationship.get('type_hierarchy'):
return relationship
return None
def __has_attribute_mapping(node, attribute_name):
ctx.logger.info('Check if it exists mapping for attribute {0} in {1}'.format(attribute_name, node.properties))
mapping_configuration = node.properties.get('_a4c_att_' + attribute_name, None)
if mapping_configuration is not None:
if mapping_configuration['parameters'][0] == 'SELF' and mapping_configuration['parameters'][1] == attribute_name:
return False
else:
return True
return False
def __process_attribute_mapping(node, node_instance, attribute_name, data_retriever_function):
# This is where attribute mapping is defined in the cloudify type
mapping_configuration = node.properties['_a4c_att_' + attribute_name]
ctx.logger.info('Mapping configuration found for attribute {0} is {1}'.format(attribute_name, mapping_configuration))
# If the mapping configuration exist and if it concerns SELF then just get attribute of the mapped attribute name
# Else if it concerns TARGET then follow the relationship and retrieved the mapped attribute name from the TARGET
if mapping_configuration['parameters'][0] == 'SELF':
return data_retriever_function(node, node_instance, mapping_configuration['parameters'][1])
elif mapping_configuration['parameters'][0] == 'TARGET' and node_instance.relationships:
for rel in node_instance.relation
|
nlloyd/SubliminalCollaborator
|
libs/twisted/test/test_usage.py
|
Python
|
apache-2.0
| 19,879 | 0.001811 |
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.python.usage}, a command line option parsing library.
"""
from twisted.trial import unittest
from twisted.python import usage
class WellBehaved(usage.Options):
optParameters = [['long', 'w', 'default', 'and a docstring'],
['another', 'n', 'no docstring'],
['longonly', None, 'noshort'],
['shortless', None, 'except',
'this one got docstring'],
]
optFlags = [['aflag', 'f',
"""
flagallicious docstringness for this here
"""],
['flout', 'o'],
]
def opt_myflag(self):
self.opts['myflag'] = "PONY!"
def opt_myparam(self, value):
self.opts['myparam'] = "%s WITH A PONY!" % (value,)
class ParseCorrectnessTest(unittest.TestCase):
"""
Test Options.parseArgs for correct values under good conditions.
"""
def setUp(self):
"""
Instantiate and parseOptions a well-behaved Options class.
"""
self.niceArgV = ("--long Alpha -n Beta "
"--shortless Gamma -f --myflag "
"--myparam Tofu").split()
self.nice = WellBehaved()
self.nice.parseOptions(self.niceArgV)
def test_checkParameters(self):
"""
Checking that parameters have correct values.
"""
self.assertEqual(self.nice.opts['long'], "Alpha")
self.assertEqual(self.nice.opts['another'], "Beta")
self.assertEqual(self.nice.opts['longonly'], "noshort")
self.assertEqual(self.nice.opts['shortless'], "Gamma")
def test_checkFlags(self):
"""
Checking that flags have correct values.
"""
self.assertEqual(self.nice.opts['aflag'], 1)
self.assertEqual(self.nice.opts['flout'], 0)
def test_checkCustoms(self):
"""
Checking that custom flags and parameters have correct values.
"""
self.assertEqual(self.nice.opts['myflag'], "PONY!")
self.assertEqual(self.nice.opts['myparam'], "Tofu WITH A PONY!")
class TypedOptions(usage.Options):
optParameters = [
['fooint', None, 392, 'Foo int', int],
['foofloat', None, 4.23, 'Foo float', float],
['eggint', None, None, 'Egg int without default', int],
['eggfloat', None, None, 'Egg float without default', float],
]
def opt_under_sco
|
re(self, value):
"""
This option has an underscore in its name to exercise the _ to -
translation.
"""
self.underscoreValue = value
opt_u = opt_under_score
class TypedTestCase(unittest.TestCase):
"""
Test Options.parseArgs for options with forced types.
"""
def setUp(self):
self.usage = TypedOptions()
def test_defaultValues(self):
|
"""
Test parsing of default values.
"""
argV = []
self.usage.parseOptions(argV)
self.assertEqual(self.usage.opts['fooint'], 392)
self.assert_(isinstance(self.usage.opts['fooint'], int))
self.assertEqual(self.usage.opts['foofloat'], 4.23)
self.assert_(isinstance(self.usage.opts['foofloat'], float))
self.assertEqual(self.usage.opts['eggint'], None)
self.assertEqual(self.usage.opts['eggfloat'], None)
def test_parsingValues(self):
"""
Test basic parsing of int and float values.
"""
argV = ("--fooint 912 --foofloat -823.1 "
"--eggint 32 --eggfloat 21").split()
self.usage.parseOptions(argV)
self.assertEqual(self.usage.opts['fooint'], 912)
self.assert_(isinstance(self.usage.opts['fooint'], int))
self.assertEqual(self.usage.opts['foofloat'], -823.1)
self.assert_(isinstance(self.usage.opts['foofloat'], float))
self.assertEqual(self.usage.opts['eggint'], 32)
self.assert_(isinstance(self.usage.opts['eggint'], int))
self.assertEqual(self.usage.opts['eggfloat'], 21.)
self.assert_(isinstance(self.usage.opts['eggfloat'], float))
def test_underscoreOption(self):
"""
A dash in an option name is translated to an underscore before being
dispatched to a handler.
"""
self.usage.parseOptions(['--under-score', 'foo'])
self.assertEqual(self.usage.underscoreValue, 'foo')
def test_underscoreOptionAlias(self):
"""
An option name with a dash in it can have an alias.
"""
self.usage.parseOptions(['-u', 'bar'])
self.assertEqual(self.usage.underscoreValue, 'bar')
def test_invalidValues(self):
"""
Check that passing wrong values raises an error.
"""
argV = "--fooint egg".split()
self.assertRaises(usage.UsageError, self.usage.parseOptions, argV)
class WrongTypedOptions(usage.Options):
optParameters = [
['barwrong', None, None, 'Bar with wrong coerce', 'he']
]
class WeirdCallableOptions(usage.Options):
def _bar(value):
raise RuntimeError("Ouch")
def _foo(value):
raise ValueError("Yay")
optParameters = [
['barwrong', None, None, 'Bar with strange callable', _bar],
['foowrong', None, None, 'Foo with strange callable', _foo]
]
class WrongTypedTestCase(unittest.TestCase):
"""
Test Options.parseArgs for wrong coerce options.
"""
def test_nonCallable(self):
"""
Check that using a non callable type fails.
"""
us = WrongTypedOptions()
argV = "--barwrong egg".split()
self.assertRaises(TypeError, us.parseOptions, argV)
def test_notCalledInDefault(self):
"""
Test that the coerce functions are not called if no values are
provided.
"""
us = WeirdCallableOptions()
argV = []
us.parseOptions(argV)
def test_weirdCallable(self):
"""
Test what happens when coerce functions raise errors.
"""
us = WeirdCallableOptions()
argV = "--foowrong blah".split()
# ValueError is swallowed as UsageError
e = self.assertRaises(usage.UsageError, us.parseOptions, argV)
self.assertEqual(str(e), "Parameter type enforcement failed: Yay")
us = WeirdCallableOptions()
argV = "--barwrong blah".split()
# RuntimeError is not swallowed
self.assertRaises(RuntimeError, us.parseOptions, argV)
class OutputTest(unittest.TestCase):
def test_uppercasing(self):
"""
Error output case adjustment does not mangle options
"""
opt = WellBehaved()
e = self.assertRaises(usage.UsageError,
opt.parseOptions, ['-Z'])
self.assertEqual(str(e), 'option -Z not recognized')
class InquisitionOptions(usage.Options):
optFlags = [
('expect', 'e'),
]
optParameters = [
('torture-device', 't',
'comfy-chair',
'set preferred torture device'),
]
class HolyQuestOptions(usage.Options):
optFlags = [('horseback', 'h',
'use a horse'),
('for-grail', 'g'),
]
class SubCommandOptions(usage.Options):
optFlags = [('europian-swallow', None,
'set default swallow type to Europian'),
]
subCommands = [
('inquisition', 'inquest', InquisitionOptions,
'Perform an inquisition'),
('holyquest', 'quest', HolyQuestOptions,
'Embark upon a holy quest'),
]
class SubCommandTest(unittest.TestCase):
def test_simpleSubcommand(self):
o = SubCommandOptions()
o.parseOptions(['--europian-swallow', 'inquisition'])
self.assertEqual(o['europian-swallow'], True)
self.assertEqual(o.subCommand, 'inquisition')
self.failUnless(isinstance(o.subOptions, InquisitionOptions))
self.assertEqual(o.subOptions['expect'], False)
self.assertEqual(o.subOptions['torture-device'], 'comfy-chair')
def tes
|
chrsrds/scikit-learn
|
sklearn/utils/tests/test_fixes.py
|
Python
|
bsd-3-clause
| 2,534 | 0 |
# Authors: Gael Varoquaux <gael.varoquaux@normalesup.org>
# Justin Vincent
# Lars Buitinck
# License: BSD 3 clause
import pickle
import numpy as np
import pytest
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.fixes import MaskedArray
from sklearn.utils.fixes import _joblib_parallel_args
from sklearn.utils.fixes import _object_dtype_isnan
def test_masked_array_obj_dtype_pickleable():
marr = MaskedArray([1, None, 'a'], dtype=object)
for mask in (True, False, [0, 1, 0]):
marr.mask = mask
marr_pickled = pickle.loads(pickle.dumps(marr))
assert_array_equal(marr.data, marr_pickled.data)
assert_array_equal(marr.ma
|
sk, marr_pickled.mask)
@pytest.mark.parametrize('joblib_version', ('0.11', '0.12.0'))
def test_joblib_parallel_args(monkeypatch, joblib_version):
import joblib
monkeypatch.setattr(joblib, '__version__', joblib_version)
if joblib_version == '0.12.0':
# arguments are s
|
imply passed through
assert _joblib_parallel_args(prefer='threads') == {'prefer': 'threads'}
assert _joblib_parallel_args(prefer='processes', require=None) == {
'prefer': 'processes', 'require': None}
assert _joblib_parallel_args(non_existing=1) == {'non_existing': 1}
elif joblib_version == '0.11':
# arguments are mapped to the corresponding backend
assert _joblib_parallel_args(prefer='threads') == {
'backend': 'threading'}
assert _joblib_parallel_args(prefer='processes') == {
'backend': 'multiprocessing'}
with pytest.raises(ValueError):
_joblib_parallel_args(prefer='invalid')
assert _joblib_parallel_args(
prefer='processes', require='sharedmem') == {
'backend': 'threading'}
with pytest.raises(ValueError):
_joblib_parallel_args(require='invalid')
with pytest.raises(NotImplementedError):
_joblib_parallel_args(verbose=True)
else:
raise ValueError
@pytest.mark.parametrize("dtype, val", ([object, 1],
[object, "a"],
[float, 1]))
def test_object_dtype_isnan(dtype, val):
X = np.array([[val, np.nan],
[np.nan, val]], dtype=dtype)
expected_mask = np.array([[False, True],
[True, False]])
mask = _object_dtype_isnan(X)
assert_array_equal(mask, expected_mask)
|
macosforge/ccs-calendarserver
|
txdav/who/augment.py
|
Python
|
apache-2.0
| 18,083 | 0.000608 |
# -*- test-case-name: txdav.who.test.test_augment -*-
##
# Copyright (c) 2013-2017 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
"""
Augmenting Directory Service
"""
__all__ = [
"AugmentedDirectoryService",
]
import time
from zope.interface import implementer
from twisted.internet.defer import inlineCallbacks, returnValue, succeed
from twistedcaldav.directory.augment import AugmentRecord
from twext.python.log import Logger
from twext.who.directory import DirectoryRecord
from twext.who.directory import DirectoryService as BaseDirectoryService
from twext.who.idirectory import (
IDirectoryService, RecordType, FieldName as BaseFieldName, NotAllowedError
)
from twext.who.util import ConstantsContainer
from txdav.common.idirectoryservice import IStoreDirectoryService
from txdav.who.directory import (
CalendarDirectoryRecordMixin, CalendarDirectoryServiceMixin,
)
from txdav.who.idirectory import (
AutoScheduleMode, FieldName, RecordType as CalRecordType
)
log = Logger()
def timed(f):
"""
A decorator which keeps track of the wrapped function's call count and
total duration
"""
def recordTiming(result, key, startTime):
"""
Figures out how much time to add to the total time spent within the
method identified by key and stores that in the timings dict.
@param result: the result of the wrapped method
@param timings: the dictionary to store timings in
@type timings: C{dict}
@param key: the method name
@type key: C{str}
@param startTime: the start time of the call in seconds
@type startTime: C{float}
"""
AugmentedDirectoryService._addTiming(key, time.time() - startTime)
return result
def timingWrapper(self, *args, **kwds):
"""
Records the start time of the call and the method's name
"""
startTime = time.time()
d = f(self, *args, **kwds)
d.addBoth(recordTiming, f.func_name, startTime)
return d
return timingWrapper
@implementer(IDirectoryService, IStoreDirectoryService)
class AugmentedDirectoryService(
BaseDirectoryService, CalendarDirectoryServiceMixin
):
"""
Augmented directory service.
This is a directory service that wraps an L{IDirectoryService} and augments
directory records with additional or modified fields.
"""
fieldName = ConstantsContainer((
BaseFieldName,
FieldName,
))
_timings = {}
def __init__(self, directory, store, augmentDB):
BaseDirectoryService.__init__(self, directory.realmName)
self._directory = directory
self._store = store
self._augmentDB = augmentDB
# An LDAP DS has extra info to expose via the dashboard
# This is assigned in buildDirectory()
self._ldapDS = None
@classmethod
def _addTiming(cls, key, duration):
if key not in cls._timings:
cls._timings[key] = (0, 0.0)
count, timeSpent = cls._timings[key]
count += 1
timeSpent += duration
cls._timings[key] = (count, timeSpent)
def flush(self):
return self._directory.flush()
def stats(self):
results = {}
results.update(self._timings)
# An LDAP DS has extra info to expose via the dashboard
if self._ldapDS is not None:
results.update(self._ldapDS.poolStats)
return succeed(results)
@property
def recordType(self):
# Defer to the directory service we're augmenting
return self._directory.recordType
def recordTypes(self):
# Defer to the directory service we're augmenting
return self._directory.recordTypes()
@inlineCallbacks
def recordsFromExpression(
self, expression, recordTypes=None,
limitResults=None, timeoutSeconds=None
):
records = yield self._directory.recordsFromExpression(
expression, recordTypes=recordTypes,
limitResults=limitResults, timeoutSeconds=timeoutSeconds
)
augmented = []
for record in records:
record = yield self._augment(record)
augmented.append(record)
returnValue(augmented)
@inlineCallbacks
def recordsWithFieldValue(
self, fieldName, value, limitResults=None, timeoutSeconds=None
):
records = yield self._directory.recordsWithFieldValue(
fieldName, value,
limitResults=limitResults, timeoutSeconds=timeoutSeconds
)
augmented = []
for record in records:
record = yield self._augment(record)
augmented.append(record)
returnValue(augmented)
@timed
@inlineCallbacks
def recordWithUID(self, uid, timeoutSeconds=None):
# MOVE2WHO, REMOVE THIS:
if not isinstance(uid, unicode):
# log.warn("Need to change uid to unicode")
uid = uid.decode("utf-8")
record = yield self._directory.recordWithUID(
uid, timeoutSeconds=timeoutSeconds
)
record = yield self._augment(record)
returnValue(record)
@timed
@inlineCallbacks
def recordWithGUID(self, guid, timeoutSeconds=None):
record = yield self._directory.recordWithGUID(
guid, timeoutSeconds=timeoutSeconds
)
record = yield self._augment(record)
returnValue(record)
@timed
@inlineCallbacks
def recordsWithRecordType(
self, recordType, limitResults=None, timeoutSeconds=None
):
records = yield self._directory.recordsWithRecordType(
recordType, limitResults=limitResults, timeoutSeconds=timeoutSeconds
)
augmented = []
for record in records:
record = yield self._augment(record)
augmented.append(record)
returnValue(augmented)
@timed
@inlineCallbacks
def recordWithShortName(self, recordType, shortName, timeoutSeconds=None):
# MOVE2WHO, REMOVE THIS:
if not isinstance(shortName, unicode):
# log.warn("Need to change shortName to unicode")
shortNa
|
me = shortName.decode("utf-8")
record = yield self._directory.recordWithShortName(
recordType, shortName, timeoutSeconds=timeoutSeconds
)
record = yield self._augment(record)
returnValue(record)
@timed
@inlineCallbacks
def recordsWithEmailAddress(
self, emailAddress, limitResults=None, t
|
imeoutSeconds=None
):
# MOVE2WHO, REMOVE THIS:
if not isinstance(emailAddress, unicode):
# log.warn("Need to change emailAddress to unicode")
emailAddress = emailAddress.decode("utf-8")
records = yield self._directory.recordsWithEmailAddress(
emailAddress,
limitResults=limitResults, timeoutSeconds=timeoutSeconds
)
augmented = []
for record in records:
record = yield self._augment(record)
augmented.append(record)
returnValue(augmented)
@timed
def recordWithCalendarUserAddress(self, *args, **kwds):
return CalendarDirectoryServiceMixin.recordWithCalendarUserAddress(
self, *args, **kwds
)
@timed
def recordsMatchingTokens(self, *args, **kwds):
return CalendarDirectoryServiceMixin.recordsMatchingTokens(
self, *args, **kwds
)
@timed
def recordsMatchingFields(self, *args, **kwds):
return CalendarDirectoryServiceMixin.recordsMatchingFields(
self, *args, **kwds
)
@timed
@inl
|
radare/bitcointools
|
deserialize.py
|
Python
|
mit
| 11,831 | 0.021384 |
#
#
#
from BCDataStream import *
from enumeration import Enumeration
from base58 import public_key_to_bc_address, hash_160_to_bc_address
import logging
import socket
import time
from util import short_hex, long_hex
def parse_CAddress(vds):
d = {}
d['nVersion'] = vds.read_int32()
d['nTime'] = vds.read_uint32()
d['nServices'] = vds.read_uint64()
d['pchReserved'] = vds.read_bytes(12)
d['ip'] = socket.inet_ntoa(vds.read_bytes(4))
d['port'] = socket.htons(vds.read_uint16())
return d
def deserialize_CAddress(d):
return d['ip']+":"+str(d['port'])+" (lastseen: %s)"%(time.ctime(d['nTime']),)
def parse_setting(setting, vds):
if setting[0] == "f": # flag (boolean) settings
return str(vds.read_boolean())
elif setting == "addrIncoming":
return "" # bitcoin 0.4 purposely breaks addrIncoming setting in encrypted wallets.
elif setting[0:4] == "addr": # CAddress
d = parse_CAddress(vds)
return deserialize_CAddress(d)
elif setting == "nTransactionFee":
return vds.read_int64()
elif setting == "nLimitProcessors":
return vds.read_int32()
return 'unknown setting'
def parse_TxIn(vds):
d = {}
d['prevout_hash'] = vds.read_bytes(32)
d['prevout_n'] = vds.read_uint32()
d['scriptSig'] = vds.read_bytes(vds.read_compact_size())
d['sequence'] = vds.read_uint32()
return d
def deserialize_TxIn(d, transaction_index=None, owner_keys=None):
if d['prevout_hash'] == "\x00"*32:
result = "TxIn: COIN GENERATED"
result += " coinbase:"+d['scriptSig'].encode('hex_codec')
elif transaction_index is not None and d['prevout_hash'] in transaction_index:
p = transaction_index[d['prevout_hash']]['txOut'][d['prevout_n']]
result = "TxIn: value: %f"%(p['value']/1.0e8,)
result += " prev("+long_hex(d['prevout_hash'][::-1])+":"+str(d['prevout_n'])+")"
else:
result = "TxIn: prev("+long_hex(d['prevout_hash'][::-1])+":"+str(d['prevout_n'])+")"
pk = extract_public_key(d['scriptSig'])
result += " pubkey: "+pk
result += " sig: "+decode_script(d['scriptSig'])
if d['sequence'] < 0xffffffff: result += " sequence: "+hex(d['sequence'])
return result
def parse_TxOut(vds):
d = {}
d['value'] = vds.read_int64()
d['scriptPubKey'] = vds.read_bytes(vds.read_compact_size())
return d
def deserialize_TxOut(d, owner_keys=None):
result = "TxOut: value: %f"%(d['value']/1.0e8,)
pk = extract_public_key(d['scriptPubKey'])
result += " pubkey: "+pk
result += " Script: "+decode_script(d['scriptPubKey'])
if owner_keys is not None:
if pk in owner_keys: result += " Own: True"
else: result += " Own: False"
return result
def parse_Transaction(vds):
d = {}
d['version'] = vds.read_int32()
n_vin = vds.read_compact_size()
d['txIn'] = []
for i in xrange(n_vin):
d['txIn'].append(parse_TxIn(vds))
n_vout = vds.read_compact_size()
d['txOut'] = []
for i in xrange(n_vout):
d['txOut'].append(parse_TxOut(vds))
d['lockTime'] = vds.read_uint32()
return d
def deserialize_Transaction(d, transaction_index=None, owner_keys=None):
result = "%d tx in, %d out\n"%(len(d['txIn']), len(d['txOut']))
for txIn in d['txIn']:
result += deserialize_TxIn(txIn, transaction_index) + "\n"
for txOut in d['txOut']:
result += deserialize_TxOut(txOut, owner_keys) + "\n"
return result
def parse_MerkleTx(vds):
d = parse_Transaction(vds)
d['hashBlock'] = vds.read_bytes(32)
n_merkleBranch = vds.read_compact_size()
d['merkleBranch'] = vds.read_bytes(32*n_merkleBranch)
d['nIndex'] = vds.read_int32()
return d
def deserialize_MerkleTx(d, transaction_index=None, owner_keys=None):
tx = deserialize_Transaction(d, transaction_index, owner_keys)
result = "block: "+(d['hashBlock'][::-1]).encode('hex_codec')
result += " %d hashes in merkle branch\n"%(len(d['merkleBranch'])/32,)
return result+tx
def parse_WalletTx(vds):
d = parse_MerkleTx(vds)
n_vtxPrev = vds.read_compact_size()
d['vtxPrev'] = []
for i in xrange(n_vtxPrev):
d['vtxPrev'].append(parse_MerkleTx(vds))
d['mapValue'] = {}
n_mapValue = vds.read_compact_size()
for i in xrange(n_mapValue):
key = vds.read_string()
value = vds.read_string()
d['mapValue'][key] = value
n_orderForm = vds.read_compact_size()
d['orderForm'] = []
for i in xrange(n_orderForm):
first = vds.read_string()
second = vds.read_string()
d['orderForm'].append( (first, second) )
d['fTimeReceivedIsTxTime'] = vds.read_uint32()
d['timeReceived'] = vds.read_uint32()
d['fromMe'] = vds.read_boolean()
d['spent'] = vds.read_boolean()
return d
def deserialize_WalletTx(d, transaction_index=None, owner_keys=None):
result = deserialize_MerkleTx(d, transaction_index, owner_keys)
result += "%d vtxPrev txns\n"%(len(d['vtxPrev']),)
result += "mapValue:"+str(d['mapValue'])
if len(d['orderForm']) > 0:
result += "\n"+" orderForm:"+str(d['orderForm'])
result += "\n"+"timeReceived:"+time.ctime(d['timeReceived'])
result += " fromMe:"+str(d['fromMe'])+" spent:"+str(d['spent'])
return result
# The CAuxPow (auxiliary proof of work) structure supports merged mining.
# A flag in the block version field indicates the structure's presence.
# As of 8/2011, the Original Bitcoin Client does not use it. CAuxPow
# originated in Namecoin; see
# https://github.com/vinced/namecoin/blob/mergedmine/doc/README_merged-mining.md.
def parse_AuxPow(vds):
d = parse_MerkleTx(vds)
n_chainMerkleBranch = vds.read_compact_size()
d['chainMerkleBranch'] = vds.read_bytes(32*n_chainMerkleBranch)
d['chainIndex'] = vds.read_int32()
d['parentBlock'] = parse_BlockHeader(vds)
return d
def parse_BlockHeader(vds):
d = {}
header_start = vds.read_cursor
d['version'] = vds.read_int32()
d['hashPrev'] = vds.read_bytes(32)
d['hashMerkleRoot'] = vds.read_bytes(32)
d['nTime'] = vds.read_uint32()
d['nBits'] = vds.read_uint32()
d['nNonce'] = vds.read_uint32()
header_end = vds.read_cursor
d['__header__'] = vds.input[header_start:header_end]
return d
def parse_Block(vds):
d = parse_BlockHeader(vds)
d['transactions'] = []
# if d['version'] & (1 << 8):
#
|
d['auxpow'] = parse_AuxPow(vds)
nTransactions = vds.read_compact_size()
for i in xrange(nTransactions):
d['transactions'].append(parse_Transaction(vds))
return d
def deserialize_Block(d):
result = "Time: "+time.ctime(d['nTime'])+" Nonce: "+str(d['nNonce'])
result += "\nnBits: 0x"+hex(d['nBits'])
result += "\nhashMerkleRoot: 0x"+d['hashMerkleRoot'][::-1].encode('hex_codec')
resu
|
lt += "\nPrevious block: "+d['hashPrev'][::-1].encode('hex_codec')
result += "\n%d transactions:\n"%len(d['transactions'])
for t in d['transactions']:
result += deserialize_Transaction(t)+"\n"
result += "\nRaw block header: "+d['__header__'].encode('hex_codec')
return result
def parse_BlockLocator(vds):
d = { 'hashes' : [] }
nHashes = vds.read_compact_size()
for i in xrange(nHashes):
d['hashes'].append(vds.read_bytes(32))
return d
def deserialize_BlockLocator(d):
result = "Block Locator top: "+d['hashes'][0][::-1].encode('hex_codec')
return result
opcodes = Enumeration("Opcodes", [
("OP_0", 0), ("OP_PUSHDATA1",76), "OP_PUSHDATA2", "OP_PUSHDATA4", "OP_1NEGATE", "OP_RESERVED",
"OP_1", "OP_2", "OP_3", "OP_4", "OP_5", "OP_6", "OP_7",
"OP_8", "OP_9", "OP_10", "OP_11", "OP_12", "OP_13", "OP_14", "OP_15", "OP_16",
"OP_NOP", "OP_VER", "OP_IF", "OP_NOTIF", "OP_VERIF", "OP_VERNOTIF", "OP_ELSE", "OP_ENDIF", "OP_VERIFY",
"OP_RETURN", "OP_TOALTSTACK", "OP_FROMALTSTACK", "OP_2DROP", "OP_2DUP", "OP_3DUP", "OP_2OVER", "OP_2ROT", "OP_2SWAP",
"OP_IFDUP", "OP_DEPTH", "OP_DROP", "OP_DUP", "OP_NIP", "OP_OVER", "OP_PICK", "OP_ROLL", "OP_ROT",
"OP_SWAP", "OP_TUCK", "OP_CAT", "OP_SUBSTR", "OP_LEFT", "OP_RIGHT", "OP_SIZE", "OP_INVERT", "OP_AND",
"OP_OR", "OP_XOR", "OP_EQUAL", "OP_EQUALVERIFY", "OP_RESERVED1", "OP_RESERVED2", "OP_1ADD", "OP_1SUB", "OP_2MUL",
"OP_2DIV", "OP_NEGATE", "OP_ABS", "OP_NOT", "OP_0NOTEQUAL", "OP_ADD", "OP_SUB", "OP_MUL", "OP_DIV",
"OP_MOD", "OP_LSHIFT", "OP_RSHIFT", "OP_BOOLAND", "OP_BOOLOR",
"OP_NUMEQUAL", "OP_NUMEQUALVERIFY", "OP_NUMNOTEQUAL", "OP_LESSTHAN",
"OP_
|
Yukarumya/Yukarum-Redfoxes
|
testing/web-platform/mach_commands.py
|
Python
|
mpl-2.0
| 13,041 | 0.00207 |
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Integrates the web-platform-tests test runner with mach.
from __future__ import absolute_import, unicode_literals, print_function
import os
import sys
from mozbuild.base import (
MachCommandBase,
MachCommandConditions as conditions,
MozbuildObject,
)
from mach.decorators import (
CommandProvider,
Command,
)
# This should probably be consolidated with similar classes in other test
# runners.
class InvalidTestPathError(Exception):
"""Exception raised when the test path is not valid."""
class WebPlatformTestsRunner(MozbuildObject):
"""Run web platform test
|
s."""
def setup_kwargs(self, kwargs):
from wptrunner import wptcommandline
build_path = os.path.join(self.topobjdir, 'build')
if build_path not in sys.path:
sys.path.append(
|
build_path)
if kwargs["config"] is None:
kwargs["config"] = os.path.join(self.topsrcdir, 'testing', 'web-platform', 'wptrunner.ini')
if kwargs["binary"] is None:
kwargs["binary"] = self.get_binary_path()
if kwargs["prefs_root"] is None:
kwargs["prefs_root"] = os.path.join(self.topobjdir, '_tests', 'web-platform', "prefs")
if kwargs["certutil_binary"] is None:
kwargs["certutil_binary"] = self.get_binary_path('certutil')
if kwargs["stackfix_dir"] is None:
kwargs["stackfix_dir"] = os.path.split(
self.get_binary_path(validate_exists=False))[0]
here = os.path.split(__file__)[0]
if kwargs["ssl_type"] in (None, "pregenerated"):
if kwargs["ca_cert_path"] is None:
kwargs["ca_cert_path"] = os.path.join(here, "certs", "cacert.pem")
if kwargs["host_key_path"] is None:
kwargs["host_key_path"] = os.path.join(here, "certs", "web-platform.test.key")
if kwargs["host_cert_path"] is None:
kwargs["host_cert_path"] = os.path.join(here, "certs", "web-platform.test.pem")
kwargs["capture_stdio"] = True
kwargs = wptcommandline.check_args(kwargs)
def run_tests(self, **kwargs):
from wptrunner import wptrunner
self.setup_kwargs(kwargs)
logger = wptrunner.setup_logging(kwargs, {"mach": sys.stdout})
result = wptrunner.run_tests(**kwargs)
return int(not result)
def list_test_groups(self, **kwargs):
from wptrunner import wptrunner
self.setup_kwargs(kwargs)
wptrunner.list_test_groups(**kwargs)
class WebPlatformTestsUpdater(MozbuildObject):
"""Update web platform tests."""
def run_update(self, **kwargs):
import update
from update import updatecommandline
if kwargs["config"] is None:
kwargs["config"] = os.path.join(self.topsrcdir, 'testing', 'web-platform', 'wptrunner.ini')
if kwargs["product"] is None:
kwargs["product"] = "firefox"
updatecommandline.check_args(kwargs)
logger = update.setup_logging(kwargs, {"mach": sys.stdout})
try:
update.run_update(logger, **kwargs)
except Exception:
import pdb
import traceback
traceback.print_exc()
# pdb.post_mortem()
class WebPlatformTestsReduce(WebPlatformTestsRunner):
def run_reduce(self, **kwargs):
from wptrunner import reduce
self.setup_kwargs(kwargs)
kwargs["capture_stdio"] = True
logger = reduce.setup_logging(kwargs, {"mach": sys.stdout})
tests = reduce.do_reduce(**kwargs)
if not tests:
logger.warning("Test was not unstable")
for item in tests:
logger.info(item.id)
class WebPlatformTestsCreator(MozbuildObject):
template_prefix = """<!doctype html>
%(documentElement)s<meta charset=utf-8>
"""
template_long_timeout = "<meta name=timeout content=long>\n"
template_body_th = """<title></title>
<script src=/resources/testharness.js></script>
<script src=/resources/testharnessreport.js></script>
<script>
</script>
"""
template_body_reftest = """<title></title>
<link rel=%(match)s href=%(ref)s>
"""
template_body_reftest_wait = """<script src="/common/reftest-wait.js"></script>
"""
def rel_path(self, path):
if path is None:
return
abs_path = os.path.normpath(os.path.abspath(path))
return os.path.relpath(abs_path, self.topsrcdir)
def rel_url(self, rel_path):
upstream_path = os.path.join("testing", "web-platform", "tests")
local_path = os.path.join("testing", "web-platform", "mozilla", "tests")
if rel_path.startswith(upstream_path):
return rel_path[len(upstream_path):].replace(os.path.sep, "/")
elif rel_path.startswith(local_path):
return "/_mozilla" + rel_path[len(local_path):].replace(os.path.sep, "/")
else:
return None
def run_create(self, context, **kwargs):
import subprocess
path = self.rel_path(kwargs["path"])
ref_path = self.rel_path(kwargs["ref"])
if kwargs["ref"]:
kwargs["reftest"] = True
if self.rel_url(path) is None:
print("""Test path %s is not in wpt directories:
testing/web-platform/tests for tests that may be shared
testing/web-platform/mozilla/tests for Gecko-only tests""" % path)
return 1
if ref_path and self.rel_url(ref_path) is None:
print("""Reference path %s is not in wpt directories:
testing/web-platform/tests for tests that may be shared
testing/web-platform/mozilla/tests for Gecko-only tests""" % ref_path)
return 1
if os.path.exists(path) and not kwargs["overwrite"]:
print("Test path already exists, pass --overwrite to replace")
return 1
if kwargs["mismatch"] and not kwargs["reftest"]:
print("--mismatch only makes sense for a reftest")
return 1
if kwargs["wait"] and not kwargs["reftest"]:
print("--wait only makes sense for a reftest")
return 1
args = {"documentElement": "<html class=reftest-wait>\n" if kwargs["wait"] else ""}
template = self.template_prefix % args
if kwargs["long_timeout"]:
template += self.template_long_timeout
if kwargs["reftest"]:
args = {"match": "match" if not kwargs["mismatch"] else "mismatch",
"ref": self.rel_url(ref_path) if kwargs["ref"] else '""'}
template += self.template_body_reftest % args
if kwargs["wait"]:
template += self.template_body_reftest_wait
else:
template += self.template_body_th
try:
os.makedirs(os.path.dirname(path))
except OSError:
pass
with open(path, "w") as f:
f.write(template)
if kwargs["no_editor"]:
editor = None
elif kwargs["editor"]:
editor = kwargs["editor"]
elif "VISUAL" in os.environ:
editor = os.environ["VISUAL"]
elif "EDITOR" in os.environ:
editor = os.environ["EDITOR"]
else:
editor = None
proc = None
if editor:
proc = subprocess.Popen("%s %s" % (editor, path), shell=True)
if not kwargs["no_run"]:
p = create_parser_wpt()
wpt_kwargs = vars(p.parse_args(["--manifest-update", path]))
context.commands.dispatch("web-platform-tests", context, **wpt_kwargs)
if proc:
proc.wait()
class WPTManifestUpdater(MozbuildObject):
def run_update(self, check_clean=False, **kwargs):
import manifestupdate
from wptrunner import wptlogging
logger = wptlogging.setup(kwargs, {"mach": sys.stdout})
wpt_dir = os.path.abspath(os.path.join(self.topsrcdir, 'testing', 'web-platform'))
manifestupdate.update(logger, wpt_dir, check
|
wooga/airflow
|
airflow/providers/facebook/ads/hooks/ads.py
|
Python
|
apache-2.0
| 5,572 | 0.001436 |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
This module contains Facebook Ads Reporting hooks
"""
import time
from enum import Enum
from typing import Any, Dict, List
from cached_property import cached_property
from facebook_business.adobjects.adaccount import AdAccount
from facebook_business.adobjects.adreportrun import AdReportRun
from facebook_business.adobjects.adsinsights import AdsInsights
from facebook_business.api import FacebookAdsApi
from airflow.exceptions import AirflowException
from airflow.hooks.base_hook import BaseHook
class JobStatus(Enum):
"""
Available options for facebook async task status
"""
COMPLETED = 'Job Completed'
STARTED = 'Job Started'
RUNNING = 'Job Running'
FAILED = 'Job Failed'
SKIPPED = 'Job Skipped'
class FacebookAdsReportingHook(BaseHook):
"""
Hook for the Facebook Ads API
.. seealso::
For more information on the Facebook Ads API, take a look at the API docs:
https://developers.facebook.com/docs/marketing-apis/
:param facebook_conn_id: Airflow Facebook Ads connection ID
:type facebook_conn_id: str
:param api_version: The version of Facebook API. Default to v6.0
:type api_version: str
"""
def __init__(
self,
facebook_conn_id: str = "facebook_default",
api_version: str = "v6.0",
) -> None:
super().__init__()
self.facebook_conn_id = facebook_conn_id
self.api_version = api_version
self.client_required_fields = ["app_id",
"app_secret",
"access_token",
"account_id"]
def _get_service(self) -> FacebookAdsApi:
""" Returns Facebook Ads Client using a service account"""
config = self.facebook_ads_config
return FacebookAdsApi.init(app_id=config["app_id"],
app_secret=config["app_secret"],
access_token=config["access_token"],
account_id=config["account_id"],
api_version=self.api_version)
@cached_property
def facebook_ads_config(self) -> Dict:
"""
Gets Facebook ads connection from meta db and sets
facebook_ads_config attribute with returned config file
"""
self.log.info("Fetching fb connection: %s", self.facebook_conn_id)
conn = self.get_connection(self.facebook_conn_id)
config = conn.extra_dejson
missings_keys = self.client_required_fields - config.keys()
if missings_keys:
message = "{missings_keys} fields are missing".format(missings_keys=missings_keys)
raise AirflowException(message)
return config
def bulk_facebook_report(
self,
params: Dict[str, Any],
fields: List[str],
sleep_time: int = 5,
) -> List[AdsInsights]:
"""
Pulls data from the Facebook Ads API
:param fields: List of fields that is obtained from Facebook. Found in AdsInsights.Field class.
https://developers.facebook.com/docs/marketing-api/insights/parameters/v6.0
:type fields: List[str]
:param params: Parameters that determine the query for Facebook
https://developers.facebook.com/docs/marketing-api/insights/parameters/v6.0
:type fields: Dict[str, Any]
:param sleep_time: Time to sleep when async call is happening
:type sleep_time: int
:return: Facebook Ads API response, converted to Facebook Ads Row objects
:rtype: List[AdsInsights]
"""
api = self._get_service()
ad_account = AdAccount(api.get_default_account_id(), api=api)
_async = ad_account.get_insights(params=params, fields=fields, is_async=True)
while True:
request = _async.api_get()
async_status = request[AdReportRun.Field.a
|
sync_status]
percent = request[AdReportRun.Field.async_percent_completion]
self.log.info("%s %s completed, async_status: %s", percent, "%", async_status)
if async_status == JobStatus.COMPLETED.value:
self.log.info("Job run completed")
break
if async_status in [JobStatus.SKIPPED.value, JobStatus.FAILED.value]:
message = "{async_status}. Please retry.".format(async_status=async_status)
|
raise AirflowException(message)
time.sleep(sleep_time)
report_run_id = _async.api_get()["report_run_id"]
report_object = AdReportRun(report_run_id, api=api)
insights = report_object.get_insights()
self.log.info("Extracting data from returned Facebook Ads Iterators")
return list(insights)
|
501code/Fletcher-Street-Urban-Riding-Club
|
pages/migrations/0007_auto_20160221_1533.py
|
Python
|
mit
| 473 | 0 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-02-21 15:33
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('pages', '00
|
06_auto_20160221_1241'),
]
operations = [
migrations.AlterField(
model_name='page',
|
name='image',
field=models.ImageField(default='none.jpg', upload_to='uploads/'),
),
]
|
CS205IL-sp15/workbook
|
demo_colorFreq_start/py/compute.py
|
Python
|
mit
| 594 | 0.065657 |
# imports/modules
import os
import random
import json
import collections
|
from PIL import Image
# Convert (r, g, b) into #rrggbb color
def getRGBstring( (r, g, b) ):
s = "#"
s = s + format(r, '02x')
s = s + format(g, '02x')
s = s + format(b, '02x')
return s
def do_compute():
# Open the image
origImgFile = 'res/bryce.jpg'
origImg = Image.open(origImgFile)
# Process the image
# Save the processed information
output = { 'file': origImgFile,
|
'freq': freq }
f = open("res/freq.json",'w')
s = json.dumps(output, indent = 4)
f.write(s)
|
patverga/torch-relation-extraction
|
bin/analysis/plot-pr-curve.py
|
Python
|
mit
| 1,479 | 0.01217 |
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.ticker as ticker
import matplotlib.colors
import sys
matplotlib.rc('text', usetex=True)
fontsize = 22
font = {'family' : 'serif',
'serif' : 'Times Roman',
'size' : fontsize}
matplotlib.rc('font', **font)
output_dir = "doc/naacl2016/"
# load in data
data_fname = sys.argv[1]
labels = np.unique(np.loadtxt(data_fname, usecols=[2], dtype='str'))
print labels
data = np.loadtxt(data_fname, converters = {2: lambda y: np.where(labels==y)[0]})
labels = ["LSTM", "USchema"]
colors = ['0.25', '0.6']
width = 4
print data
recall_idx = 0
precision_idx = 1
model_idx = 2
# initialize figures
fig1 = plt.figure()
ax1 = fig1.add_subplot(111)
ax1.set_title("LSTM + USchema: Recall vs. Precision", fontsize=fontsize)
a
|
x1.set_xlabel("Recall")
ax1.set_ylabel("Precision")
plt.xlim((0.075, 0.5,))
plt.ylim((0.075, 0.7,))
plt.yticks((0.
|
1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7))
plt.xticks((0.1, 0.2, 0.3, 0.4, 0.5))
for i in range(len(labels)):
indices = np.where(data[:,model_idx] == i)
ax1.plot(data[indices,recall_idx][0], data[indices,precision_idx][0], label=labels[i], color=colors[i], lw=width)
ax1.yaxis.set_major_formatter(ticker.FuncFormatter(lambda y, pos: ('%.1f')%(y)))
ax1.xaxis.set_major_formatter(ticker.FuncFormatter(lambda x, pos: ('%.1f')%(x)))
# add legend
ax1.legend(fontsize=18)
plt.tight_layout()
fig1.savefig("%s/pr-curve.pdf" % (output_dir), bbox_inches='tight')
plt.show()
|
martindurant/misc
|
congrid.py
|
Python
|
mit
| 3,851 | 0.014801 |
import numpy as n
import scipy.interpolate
import scipy.ndimage
def congrid(a, newdims, method='linear', centre=False, minusone=False):
'''Arbitrary resampling of source array to new dimension sizes.
Currently only supports maintaining the same number of dimensions.
To use 1-D arrays, first promote them to shape (x,1).
Uses the same parameters and creates the same co-ordinate lookup points
as IDL''s congrid routine, which apparently originally came from a VAX/VMS
routine of the same name.
method:
neighbour - closest value from original data
nearest and linear - uses n x 1-D interpolations using
scipy.interpolate.interp1d
(see Numerical Recipes for validity of use of n 1-D interpolations)
spline - uses ndimage.map_coordinates
centre:
True - interpolation points are at the centres of the bins
False - points are at the front edge of the bin
minusone:
For example- inarray.shape = (i,j) & new dimensions = (x,y)
False - inarray is resampled by factors of (i/x) * (j/y)
True - inarray is resampled by(i-1)/(x-1) * (j-1)/(y-1)
This prevents extrapolation one element beyond bounds of input array.
'''
if not a.dtype in [n.float64, n.float32]:
a = n.cast[float](a)
m1 = n.cast[int](minusone)
ofs = n.cast[int](centre) * 0.5
old = n.array( a.shape )
ndims = len( a.shape )
if len( newdims ) != ndims:
print "[congrid] dimensions error. " \
"This routine currently only support " \
"rebinning to the same number of dimensions."
return None
newdims = n.asarray( newdims, dtype=float )
dimlist = []
if method == 'neighbour':
for i in range( ndims ):
base = n.indices(newdims)[i]
dimlist.append( (old[i] - m1) / (newdims[i] - m1) \
* (base + ofs) - ofs )
cd = n.array( dimlist ).round().astype(int)
newa = a[list( cd )]
return newa
elif method in ['nearest','linear']:
# calculate new dims
for i in range( ndims ):
base = n.arange( newdims[i] )
dimlist.append( (old[i] - m1) / (newdims[i] - m1) \
* (base + ofs) - ofs )
#
|
specify old dims
olddims = [n.arange(i, dtype = n.float) for i in list( a.shape )]
# first interpolation - for ndims = any
mint = scipy.interpolate.interp1d( olddims[-1], a, kind=method )
newa = mint( dimlist[-1] )
trorder = [ndims - 1] + range( ndims - 1 )
for i in range( ndims - 2, -1, -1 ):
ne
|
wa = newa.transpose( trorder )
mint = scipy.interpolate.interp1d( olddims[i], newa,
kind=method )
newa = mint( dimlist[i] )
if ndims > 1:
# need one more transpose to return to original dimensions
newa = newa.transpose( trorder )
return newa
elif method in ['spline']:
oslices = [ slice(0,j) for j in old ]
oldcoords = n.ogrid[oslices]
nslices = [ slice(0,j) for j in list(newdims) ]
newcoords = n.mgrid[nslices]
newcoords_dims = range(n.rank(newcoords))
#make first index last
newcoords_dims.append(newcoords_dims.pop(0))
newcoords_tr = newcoords.transpose(newcoords_dims)
# makes a view that affects newcoords
newcoords_tr += ofs
deltas = (n.asarray(old) - m1) / (newdims - m1)
newcoords_tr *= deltas
newcoords_tr -= ofs
newa = scipy.ndimage.map_coordinates(a, newcoords)
return newa
else:
print "Congrid error: Unrecognized interpolation type.\n", \
"Currently only \'neighbour\', \'nearest\',\'linear\',", \
"and \'spline\' are supported."
return None
|
googleapis/python-dialogflow
|
samples/generated_samples/dialogflow_generated_dialogflow_v2_documents_reload_document_async.py
|
Python
|
apache-2.0
| 1,621 | 0.000617 |
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for ReloadDocument
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install go
|
ogle-cloud-dialogflow
# [START dialogflow_generated_dialogflow_v2_Documents_ReloadDocument_async]
from google.cloud import dialogflow_v2
async def sample_reload_document():
# Cre
|
ate a client
client = dialogflow_v2.DocumentsAsyncClient()
# Initialize request argument(s)
request = dialogflow_v2.ReloadDocumentRequest(
content_uri="content_uri_value",
name="name_value",
)
# Make the request
operation = client.reload_document(request=request)
print("Waiting for operation to complete...")
response = await operation.result()
# Handle the response
print(response)
# [END dialogflow_generated_dialogflow_v2_Documents_ReloadDocument_async]
|
OptimalDesignLab/Kona
|
src/kona/linalg/matrices/preconds/idf_schur.py
|
Python
|
lgpl-3.0
| 5,770 | 0.00312 |
from kona.linalg.matrices.hessian.basic import BaseHessian
class ReducedSchurPreconditioner(BaseHessian):
"""
An IDF-Schur preconditioner designed to precondition the KKT system for
multidisciplinary design optimization problems formulated using the IDF
architecture.
The preconditioner solves a system defined by the matrix:
.. math::
\\begin{bmatrix} I && A^T \\\\ A && 0 \\end{bmatrix}
This solution is used as the preconditioner to the complete KKT sy
|
stem.
Unlike the complete KKT system, this solution can be performed using FGMRES.
Attributes
----------
krylov : KrylovSolver
cnstr_jac : TotalConstraintJacobian
"""
def __init__(self, vector_factories, optns=None):
super(ReducedSchurPreconditioner, self).__init__(
vector_factories, optns)
self.primal_factory.request_num_vectors(3)
if self.eq_factory is not None:
|
self.eq_factory.request_num_vectors(1)
else:
raise RuntimeError(
"ReducedSchurPreconditioner >> " +
"Problem must have equality constraints!")
if self.ineq_factory is not None:
self.ineq_factory.request_num_vectors(1)
# initialize the internal FGMRES solver
krylov_opts = {
'subspace_size' : 5,
'rel_tol' : 1e-2,
'check_res' : False,
'check_LS_grad' : False,
'krylov_file' : KonaFile(
'kona_schur.dat', self.primal_factory._memory.rank)}
self.krylov = FGMRES(self.primal_factory, optns=krylov_opts)
# initialize an identity preconditioner
self.eye = IdentityMatrix()
self.precond = self.eye.product
# initialize the total constraint jacobian block
self.cnstr_jac = TotalConstraintJacobian(vector_factories)
# set misc settings
self.diag = 0.0
self._allocated = False
def prod_target(self, in_vec, out_vec):
self.design_prod.equals(in_vec)
self.design_prod.restrict_to_target()
self.cnstr_jac.approx.product(self.design_prod, self.dual_prod)
out_vec.equals(0.0)
self.dual_prod.convert_to_design(out_vec)
def prod_target_t(self, in_vec, out_vec):
self.dual_prod.equals(0.0)
in_vec.convert_to_dual(self.dual_prod)
self.cnstr_jac.T.approx.product(self.dual_prod, out_vec)
out_vec.restrict_to_target()
def linearize(self, at_primal, at_state, scale=1.0):
# store references to the evaluation point
if isinstance(at_primal, CompositePrimalVector):
self.at_design = at_primal.design
else:
self.at_design = at_primal
self.at_state = at_state
# save the scaling on constraint terms
self.scale = scale
# linearize the constraint jacobian
self.cnstr_jac.linearize(self.at_design, self.at_state, scale=self.scale)
# if this is the first linearization, allocate some useful vectors
if not self._allocated:
# design vectors
self.design_prod = self.primal_factory.generate()
self.design_work = []
for i in xrange(2):
self.design_work.append(self.primal_factory.generate())
# dual vectors
self.dual_prod = None
if self.eq_factory is not None and self.ineq_factory is not None:
self.dual_prod = CompositeDualVector(
self.eq_factory.generate(), self.ineq_factory.generate())
else:
self.dual_prod = self.eq_factory.generate()
def product(self, in_vec, out_vec):
# do some aliasing
try:
in_design = in_vec.primal.design
out_design = out_vec.primal.design
out_vec.primal.slack.equals(in_vec.primal.slack)
except Exception:
in_design = in_vec.primal
out_design = out_vec.primal
in_dual = in_vec.dual
out_dual = out_vec.dual
design_work = self.design_work
out_design.equals(0.0)
out_dual.equals(0.0)
# Step 1: Solve A_targ^T * v_dual = u_targ
design_work[1].equals(in_design)
design_work[1].restrict_to_target()
design_work[0].equals(0.0)
self.prod_target_t(design_work[1], design_work[0])
self.krylov.solve(
self.prod_target_t, design_work[1], design_work[0], self.precond)
design_work[0].convert_to_dual(out_dual)
# Step 2: Compute v_x = u_x - A_x^T * v_dual
design_work[0].equals(0.0)
self.cnstr_jac.T.approx.product(out_dual, design_work[0])
out_design.equals_ax_p_by(1., in_design, -1., design_work[0])
out_design.restrict_to_design()
# Step 3: Solve A_targ * v_targ = u_dual - A_x * v_x
self.dual_prod.equals(0.0)
self.cnstr_jac.approx.product(out_design, self.dual_prod)
self.dual_prod.equals_ax_p_by(1., in_dual, -1., self.dual_prod)
self.dual_prod.convert_to_design(design_work[1])
design_work[1].restrict_to_target()
design_work[0].equals(0.0)
self.krylov.solve(
self.prod_target, design_work[1], design_work[0], self.precond)
design_work[0].restrict_to_target()
out_design.plus(design_work[0])
# imports here to prevent circular errors
import numpy as np
from kona.linalg.vectors.composite import CompositePrimalVector
from kona.linalg.vectors.composite import CompositeDualVector
from kona.linalg.matrices.common import IdentityMatrix
from kona.linalg.matrices.hessian import TotalConstraintJacobian
from kona.linalg.solvers.krylov import FGMRES
from kona.linalg.memory import KonaFile
|
rogerhil/flaviabernardes
|
flaviabernardes/flaviabernardes/artwork/migrations/0012_auto_20160831_2148.py
|
Python
|
apache-2.0
| 376 | 0 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dep
|
endencies = [
('artwork', '0011_auto_20160217_1921'),
]
operations = [
migrations.Al
|
terModelOptions(
name='artwork',
options={'ordering': ('name', 'id')},
),
]
|
openstack/barbican
|
functionaltests/api/v1/functional/test_acls_rbac.py
|
Python
|
apache-2.0
| 13,953 | 0 |
# Copyright (c) 2015 Cisco Systems
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from barbican.tests import utils
from functionaltests.api import base
from functionaltests.api.v1.behaviors import acl_behaviors
from functionaltests.api.v1.behaviors import container_behaviors
from functionaltests.api.v1.behaviors import secret_behaviors
from functionaltests.api.v1.models import acl_models
from functionaltests.api.v1.models import container_models
from functionaltests.api.v1.models import secret_models
from functionaltests.common import config
CONF = config.get_config()
admin_a = CONF.rbac_users.admin_a
creator_a = CONF.rbac_users.creator_a
observer_a = CONF.rbac_users.observer_a
auditor_a = CONF.rbac_users.auditor_a
admin_b = CONF.rbac_users.admin_b
observer_b = CONF.rbac_users.observer_b
def get_acl_default():
return {'read': {'project-access': True}}
def get_acl_one():
return {'read': {'users': ['reader1'], 'project-access': False}}
def get_acl_two():
return {'read': {'users': ['reader2'], 'project-access': False}}
test_data_set_secret_acl = {
'with_admin_a': {'user': admin_a, 'expected_return': 200},
'with_creator_a': {'user': creator_a, 'expected_return': 200},
'with_observer_a': {'user': observer_a, 'expected_return': 403},
'with_auditor_a': {'user': auditor_a, 'expected_return': 403},
'with_admin_b': {'user': admin_b, 'expected_return': 403},
'with_observer_b': {'user': observer_b, 'expected_return': 403},
}
test_data_get_secret_acl = {
'with_admin_a': {'user': admin_a, 'expected_return': 200},
'with_creator_a': {'user': creator_a, 'expected_return': 200},
'with_observer_a': {'user': observer_a, 'expected_return': 200},
'with_auditor_a': {'user': auditor_a, 'expected_return': 403},
'with_admin_b': {'user': admin_b, 'expected_return': 403},
'with_observer_b': {'user': observer_b, 'expected_return': 403},
}
test_data_update_secret_acl = {
'with_admin_a': {'user': admin_a, 'expected_return': 200},
'with_creator_a': {'user': creator_a, 'expected_return': 200},
'with_observer_a': {'user': observer_a, 'expected_return': 403},
'with_auditor_a': {'user': auditor_a, 'expected_return': 403},
'with_admin_b': {'user': admin_b, 'expected_return': 403},
'with_observer_b': {'user': observer_b, 'expected_return': 403},
}
test_data_delete_secret_acl = {
'with_admin_a': {'user': admin_a, 'expected_return': 200},
'with_creator_a': {'user': creator_a, 'expected_return': 200},
'with_observer_a': {'user': observer_a, 'expected_return': 403},
'with_auditor_a': {'user': auditor_a, 'expected_return': 403},
'with_admin_b': {'user': admin_b, 'expected_return': 403},
'with_observer_b': {'user': observer_b, 'expected_return': 403},
}
test_data_set_container_acl = {
'with_admin_a': {'user': admin_a, 'expected_return': 200},
'with_creator_a': {'user': creator_a, 'expected_return': 200},
'with_observer_a': {'user': observer_a, 'expected_return': 403},
'with_auditor_a': {'user': auditor_a, 'expected_return': 403},
'with_admin_b': {'user': admin_b, 'expected_return': 403},
'with_observer_b': {'user': observer_b, 'expected_return': 403},
}
test_data_get_container_acl = {
'with_admin_a': {'user': admin_a, 'expected_return': 200},
'with_creator_a': {'user': creator_a, 'expected_return': 200},
'with_observer_a': {'user': observer_a, 'expected_return': 200},
'with_auditor_a': {'user': auditor_a, 'expected_return': 403},
'with_admin_b': {'user': admin_b, 'expected_return': 403},
'with_observer_b': {'user': observer_b, 'expected_return': 403},
}
test_data_update_container_acl = {
'with_admin_a': {'user': admin_a, 'expected_return': 200},
'with_creator_a': {'user': creator_a, 'expected_return': 200},
'with_observer_a': {'user': observer_a, 'expected_return': 403},
'with_auditor_a': {'user': auditor_a, 'expected_return': 403},
'with_admin_b': {'user': admin_b, 'expected_return': 403},
'with_observer_b': {'user': observer_b, 'expected_return': 403},
}
test_data_delete_container_acl = {
'with_admin_a': {'user': admin_a, 'expected_return': 200},
'with_creator_a': {'user': creator_a, 'expected_return': 200},
'with_observer_a': {'user': observer_a, 'expected_return': 403},
'with_auditor_a': {'user': auditor_a, 'expected_return': 403},
'with_admin_b': {'user': admin_b, 'expected_return': 403},
'with_observer_b': {'user': observer_b, 'expected_return': 403},
}
@utils.parameterized_test_case
class RBACAclsTestCase(base.TestCase):
"""Functional tests exercising RBAC Policies for ACL Operations"""
def setUp(self):
super(RBACAclsTestCase, self).setUp()
self.secret_behaviors = secret_behaviors.SecretBehaviors(self.client)
self.container_behaviors = container_behaviors.ContainerBehaviors(
self.client)
self.acl_behaviors = acl_behaviors.AclBehaviors(self.client)
def tearDown(self):
self.acl_behaviors.delete_all_created_acls()
self.secret_behaviors.delete_all_created_secrets()
self.container_behaviors.delete_all_created_containers()
super(RBACAclsTestCase, self).tearDown()
@utils.parameterized_dataset(test_data_set_secret_acl)
def test_set_secret_acl(self, user, expected_return):
secret_ref = self.store_secret()
status = self.set_secret_acl(secret_ref, get_acl_one(),
user_name=user)
self.assertEqual(expected_return, status)
@utils.parameterized_dataset(test_data_get_secret_acl)
def test_get_secret_acl(self, user, expected_return):
secret_ref = self.store_secret()
status = self.set_secret_acl(secret_ref, get_acl_one())
self.assertEqual(200, status)
resp = self.acl_behaviors.get_acl(secret_ref + '/acl', user_name=user)
self.assertEqual(expected_return, resp.st
|
atus_code)
if expected_return == 200:
self.assertIn('reader1', resp.model.read['users'])
else:
|
self.assertIsNone(resp.model)
@utils.parameterized_dataset(test_data_update_secret_acl)
def test_update_secret_acl(self, user, expected_return):
secret_ref = self.store_secret()
status = self.set_secret_acl(secret_ref, get_acl_one())
self.assertEqual(200, status)
status, model = self.update_secret_acl(secret_ref,
get_acl_two(),
user_name=user)
self.assertEqual(expected_return, status)
get_resp = self.acl_behaviors.get_acl(secret_ref + '/acl',
user_name=admin_a)
if expected_return == 200:
self.assertIsNotNone(model.acl_ref)
# verify update happened
self.assertIn('reader2', get_resp.model.read['users'])
else:
self.assertIsNone(model)
# verify no update happened
self.assertIn('reader1', get_resp.model.read['users'])
@utils.parameterized_dataset(test_data_delete_secret_acl)
def test_delete_secret_acl(self, user, expected_return):
secret_ref = self.store_secret()
status = self.set_secret_acl(secret_ref, get_acl_one())
self.assertEqual(200, status)
resp = self.acl_behaviors.delete_acl(secret_ref + '/acl',
user_name=user)
self.assertEqual(expected_return, resp.status_code)
get_resp = self.acl_behaviors.get_acl(secret_ref + '/acl',
user_name=admin_a)
if
|
obsoleter/suds
|
suds/mx/typer.py
|
Python
|
lgpl-3.0
| 4,234 | 0.003779 |
# This program is free software; you can redistribute it and/or modify
# it under the terms of the (LGPL) GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library Lesser General Public License for more details at
# ( http://www.gnu.org/licenses/lgpl.html ).
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# written by: Jeff Ortel ( jortel@redhat.com )
"""
Provides sx typing classes.
"""
from logging import getLogger
from suds import *
from suds.mx import *
from suds.sax import Namespace as NS
from suds.sax.text import Text
log = getLogger(__name__)
class Typer:
"""
Provides XML node typing as either automatic or manual.
@cvar types: A dict of class to xs type mapping.
@type types: dict
"""
types = {
int : ('int', NS.xsdns),
int : ('long', NS.xsdns),
float : ('float', NS.xsdns),
str : ('string', NS.xsdns),
str : ('string', NS.xsdns),
Text : ('string', NS.xsdns),
bool : ('boolean', NS.xsdns),
}
@classmethod
def auto(cls, node, value=None):
"""
Automatically set the node's xsi:type attribute based on either I{value}'s
class or the class of the node's text. When I{value} is an unmapped class,
the default type (xs:any) is set.
@param node: An XML node
@type node: L{sax.element.Element}
@param value: An object that is or would be the node's text.
@type value: I{any}
@return: The specified node.
@rtype: L{sax.element.Element}
"""
if value is None:
value = node.getText()
if isinstance(value, Object):
known = cls.known(value)
if known.name is None:
return node
tm = (known.name, known.namespace())
else:
tm = cls.types.get(value.__class__, cls.types.get(str))
cls.manual(node, *tm)
return node
@classmethod
def manual(cls, node, tval, ns=None):
"""
Set the node's xsi:type attribute based on either I{value}'s
clas
|
s or the class of the node's text. Then adds the r
|
eferenced
prefix(s) to the node's prefix mapping.
@param node: An XML node
@type node: L{sax.element.Element}
@param tval: The name of the schema type.
@type tval: str
@param ns: The XML namespace of I{tval}.
@type ns: (prefix, uri)
@return: The specified node.
@rtype: L{sax.element.Element}
"""
xta = ':'.join((NS.xsins[0], 'type'))
node.addPrefix(NS.xsins[0], NS.xsins[1])
if ns is None:
node.set(xta, tval)
else:
ns = cls.genprefix(node, ns)
qname = ':'.join((ns[0], tval))
node.set(xta, qname)
node.addPrefix(ns[0], ns[1])
return node
@classmethod
def genprefix(cls, node, ns):
"""
Generate a prefix.
@param node: An XML node on which the prefix will be used.
@type node: L{sax.element.Element}
@param ns: A namespace needing an unique prefix.
@type ns: (prefix, uri)
@return: The I{ns} with a new prefix.
"""
for n in range(1, 1024):
p = 'ns%d' % n
u = node.resolvePrefix(p, default=None)
if u is None or u == ns[1]:
return (p, ns[1])
raise Exception('auto prefix, exhausted')
@classmethod
def known(cls, object):
try:
md = object.__metadata__
known = md.sxtype
return known
except:
pass
|
Huai-Xv/CSU_FreeClassroom
|
setup.py
|
Python
|
gpl-3.0
| 322 | 0 |
#!/usr/bin/env python
# -*-
|
coding: utf-8 -*-
from setuptools i
|
mport setup
setup(name='my_project',
version='0.1.0',
packages=['my_project'],
entry_points={
'console_scripts': [
'my_project = crawler.__main__:main'
]
},
install_requires='requests'
)
|
kollad/turbo-ninja
|
tools/bootstrap.py
|
Python
|
mit
| 1,403 | 0.001426 |
from distutils.dir_util import copy_tree, remove_tree
import os
import shutil
def _copy_function(source, d
|
estination):
print('Bootstrapping project at %s' % destination)
copy_tree(source, destination)
def create_app():
cwd = os.getcwd()
game_logic_path = os.path.join(cwd, 'game_logic')
game_app_interface = os.path.join(cwd, 'game_
|
app.py')
app_template = os.path.join(cwd, 'engine', 'app_template')
_game_logic_path_exists = os.path.exists(game_logic_path)
_game_app_interface_exists = os.path.exists(game_app_interface)
if _game_logic_path_exists or _game_app_interface_exists:
answer = input(
'game_app.py or game_logic module already exists. Continue? (y/n). ' +
'\nWARNING: This will remove all contents of game_logic module, use at your own risk:'.upper()
)
if answer == 'y':
if _game_app_interface_exists:
os.remove(game_app_interface)
if _game_logic_path_exists:
remove_tree(game_logic_path)
_copy_function(app_template, cwd)
else:
_copy_function(app_template, cwd)
if not os.path.exists('settings.yaml'):
shutil.copy2('settings.yaml.template', 'settings.yaml')
if not os.path.exists('logging.yaml'):
shutil.copy2('logging.yaml.template', 'logging.yaml')
if __name__ == '__main__':
create_app()
|
sixuanwang/SAMSaaS
|
wirecloud-develop/src/wirecloud/fiware/marketAdaptor/views.py
|
Python
|
gpl-2.0
| 5,496 | 0.00455 |
# -*- coding: utf-8 -*-
# Copyright (c) 2012-2014 CoNWeT Lab., Universidad Politécnica de Madrid
# This file is part of Wirecloud.
# Wirecloud is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Wirecloud is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of
|
the GNU Affero General Public License
# along with Wirecloud. If not, see <http://www.gnu.org/licenses/>.
import json
from dj
|
ango.http import HttpResponse
from django.shortcuts import get_object_or_404
from wirecloud.commons.baseviews import Resource
from wirecloud.commons.utils.http import get_absolute_reverse_url
from wirecloud.fiware.marketAdaptor.marketadaptor import MarketAdaptor
from wirecloud.platform.models import Market, MarketUserData
market_adaptors = {}
def get_market_adaptor(market_user, market):
if market_user is None or market_user == 'public':
market_user = None
username = ''
else:
username = market_user
if market_user not in market_adaptors:
market_adaptors[username] = {}
if market not in market_adaptors[username]:
m = get_object_or_404(Market, user__username=market_user, name=market)
options = json.loads(m.options)
market_adaptors[username][market] = MarketAdaptor(options['url'])
return market_adaptors[username][market]
def get_market_user_data(user, market_user, market_name):
if market_user == 'public':
market_user = None
user_data = {}
for user_data_entry in MarketUserData.objects.filter(market__user__username=market_user, market__name=market_name, user=user):
try:
user_data[user_data_entry.name] = json.loads(user_data_entry.value)
except:
user_data[user_data_entry.name] = None
try:
user_data['idm_token'] = user.social_auth.filter(provider='fiware').get().tokens['access_token']
except:
pass
return user_data
class ServiceCollection(Resource):
def read(self, request, market_user, market_name, store):
adaptor = get_market_adaptor(market_user, market_name)
user_data = get_market_user_data(request.user, market_user, market_name)
try:
result = adaptor.get_all_services_from_store(store, **user_data)
except:
return HttpResponse(status=502)
return HttpResponse(json.dumps(result), content_type='application/json; charset=UTF-8')
class ServiceSearchCollection(Resource):
def read(self, request, market_user, market_name, store='', search_string='widget'):
adaptor = get_market_adaptor(market_user, market_name)
user_data = get_market_user_data(request.user, market_user, market_name)
try:
result = adaptor.full_text_search(store, search_string, user_data)
except:
return HttpResponse(status=502)
return HttpResponse(json.dumps(result), content_type='application/json; chaset=UTF-8')
class ServiceEntry(Resource):
def read(self, request, market_user, market_name, store, offering_id):
adaptor = get_market_adaptor(market_user, market_name)
user_data = get_market_user_data(request.user, market_user, market_name)
try:
offering_info = adaptor.get_offering_info(store, offering_id, user_data)[0]
except:
return HttpResponse(status=502)
return HttpResponse(json.dumps(offering_info), content_type='application/json; charset=UTF-8')
class AllStoresServiceCollection(Resource):
def read(self, request, market_user, market_name):
adaptor = get_market_adaptor(market_user, market_name)
user_data = get_market_user_data(request.user, market_user, market_name)
result = {'resources': []}
try:
stores = adaptor.get_all_stores()
for store in stores:
store_services = adaptor.get_all_services_from_store(store['name'], **user_data)
result['resources'].extend(store_services['resources'])
except:
return HttpResponse(status=502)
return HttpResponse(json.dumps(result), content_type='application/json; charset=UTF-8')
class StoreCollection(Resource):
def read(self, request, market_user, market_name):
adaptor = get_market_adaptor(market_user, market_name)
try:
result = adaptor.get_all_stores()
except:
return HttpResponse(status=502)
return HttpResponse(json.dumps(result), content_type='application/json; chaset=UTF-8')
def start_purchase(request, market_user, market_name, store):
adaptor = get_market_adaptor(market_user, market_name)
user_data = get_market_user_data(request.user, market_user, market_name)
data = json.loads(request.body)
redirect_uri = get_absolute_reverse_url('wirecloud.fiware.store_redirect_uri', request)
try:
result = adaptor.start_purchase(store, data['offering_url'], redirect_uri, **user_data)
except:
return HttpResponse(status=502)
return HttpResponse(json.dumps(result), content_type='application/json; chaset=UTF-8')
|
MauHernandez/cyclope
|
cyclope/apps/locations/migrations/0001_initial.py
|
Python
|
gpl-3.0
| 2,676 | 0.007848 |
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Country'
db.create_table('locations_country', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=255)),
))
db.send_create_signal('locations', ['Country'])
# Adding model 'Region'
db.create_table('locations_region', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('country', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['locations.Country'])),
('name', self.gf('django.db.models.fields.CharField')(max_length=255)),
))
db.send_create_signal('locations', ['Region'])
# Adding model 'City'
db.create_table('locations_city', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('region', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['locations.Region'])),
('name', self.gf('django.db.models.fields.CharField')(max_length=255)),
))
db.send_create_signal('locations', ['City'])
def backwards(self, orm):
# Deleting model 'Country'
db.delete_table('locations_country')
# Deleting model 'Region'
db.delete_table('locations_region')
# Deleting model 'City'
db.delete_table('locations_city'
|
)
models = {
'locations.city': {
'Meta': {'object_name': 'City'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'region': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['locations.Region']"})
},
|
'locations.country': {
'Meta': {'object_name': 'Country'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'locations.region': {
'Meta': {'object_name': 'Region'},
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['locations.Country']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
}
}
complete_apps = ['locations']
|
pparacch/PlayingWithPython
|
computationAndProgrammingUsingPython/src/classesAndObjects/week6_L11_part05.py
|
Python
|
mit
| 1,393 | 0.010768 |
class intSet(object):
"""An intSet is a set of integers
The value is represented by a list of ints, self.vals.
Each integer in the set occurs in self.vals exactly once."""
def __init__(self):
"""Create an empty set of integers"""
self.vals = []
def __str__(self):
"""Returns a string representation of self"""
self.vals.sort()
return '{' + ',
|
'.join([str(e) for e in self.vals]) + '}'
def __len__(self):
return len(self.vals)
def intersect(self, other):
result = intSet()
for e in self.vals:
if e in other.vals:
result.insert(e)
return result
def insert(self, e):
"""Assumes e is an integer and inserts e into self"""
if not e in self.vals:
self.vals.append(e)
def member(self, e):
"""Assumes e is an integer
Returns True if e is in self, and Fa
|
lse otherwise"""
return e in self.vals
def remove(self, e):
"""Assumes e is an integer and removes e from self
Raises ValueError if e is not in self"""
try:
self.vals.remove(e)
except:
raise ValueError(str(e) + ' not found')
s = intSet()
print s
s.insert(3)
s.insert(4)
s.insert(9)
s.insert(5)
print s
t = intSet()
print t
t.insert(1)
t.insert(4)
t.insert(15)
t.insert(90)
print t
print t.__len__()
print len(t)
print s.intersect(t)
|
aniversarioperu/django-manolo
|
scrapers/tests/test_mincu_spider.py
|
Python
|
bsd-3-clause
| 1,379 | 0.002913 |
# -*- coding: utf-8 -*-
import os
import unittest
from manolo_scraper.spiders.mincu import MincuSpider
from utils import fake_response_from_file
class TestMincuSpider(unittest.TestCase):
def setUp(self):
self.spider = MincuSpider()
def test_parse_item(self):
filename = os.path.join('data/mincu', '18-08-2015.html')
items = self.spider.parse(fake_response_from_file(filename, meta={'date': u'18/08/2015'}))
item = next(items)
self.assertEqual(item.get('full_name'), u'INGRID BARRIONUEVO ECHEGARAY')
self.assertEqual(item.get('time_start'), u'16:40')
self.assertEqual(item.get('institution'), u'mincu')
self.assertEqual(item.get('id_document'), u'DNI')
self.assertEqual(item.get('id_number'), u'10085172')
self.assertEqual(item.get('entity'), u'PARTICULAR')
self.assertEqual(item.get('reason'), u'REUNIÓN DE TRABAJO')
self.assertEqual(item.get('host_name'), u'JOIZ ELIZABETH DOBLADILLO ORTIZ')
self.assertEqual(ite
|
m.get('title'), u'[SERVICIOS DE UN ASISTENTE EN COMUNICACIONES]')
self.assertEqual(item.get('office'), u'QHAPAQ ÑAN')
self.assertEqual(item.get('time_end'), u'16:53')
self.assertEqual(item.get('date'), u'2015-08-18')
number_of_items = 1 + sum(1 for x in items)
self.assertEqual(numbe
|
r_of_items, 15)
|
djaodjin/djaodjin-survey
|
survey/urls/api/__init__.py
|
Python
|
bsd-2-clause
| 1,664 | 0.000601 |
# Copyright (c) 2020, DjaoDjin inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PRO
|
VIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIE
|
D WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from django.conf.urls import url, include
urlpatterns = [
url(r'^campaign/', include('survey.urls.api.campaigns')),
url(r'^', include('survey.urls.api.matrix')), # No trailing '/'
# because of PATH_RE.
url(r'^sample/', include('survey.urls.api.sample')),
]
|
TMiguelT/PandasSchema
|
test/test_column.py
|
Python
|
gpl-3.0
| 2,051 | 0.003901 |
import unittest
import pandas as pd
from pandas_schema import Column
from pandas_schema.validation import CanConvertValidation, LeadingWhitespaceValidation, TrailingWhitespaceValidation
class SingleValidationColumn(unittest.TestCase):
"""
Test a column with one single validation
"""
NAME = 'col1'
col = Column(NAME, [CanConvertValidation(int)], allow_empty=False)
ser = pd.Series([
'a',
'b',
'c'
])
def test_name(self):
self.assertEqual(self.col.name, self.NAME, 'A Column does not store its name correctly')
def test_outputs(self):
results = self.col.validate(self.ser)
self.assertEqual(len(results), len(self.ser), 'A Column produces the wrong number of errors')
for i in range(2):
self.assertTrue(any([r.row == i for r in results]), 'A Column does not report errors for every row')
class DoubleValidationColumn(unittest.TestCase):
"""
Test a column with two different validations
"""
NAME = 'col1'
col = Column(NAME, [TrailingWhitespaceValidation(), LeadingWhitespaceValidation()], allow_empty=False)
ser = pd.Series([
' a ',
' b ',
' c '
])
def test_outputs(self):
results = se
|
lf.col.validate(self.ser)
# There should be 6 errors, 2 for each row
self.assertEqual(len(results), 2 * len(self.ser), 'A Column produces the wrong number of errors')
for i in range(2):
in_row = [r for r in results if r.row == i]
self.assertEqual(len(in_row), 2, 'A Column does not report both e
|
rrors for every row')
class AllowEmptyColumn(unittest.TestCase):
"""
Test a column with one single validation that allows empty columns
"""
NAME = 'col1'
col = Column(NAME, [CanConvertValidation(int)], allow_empty=True)
ser = pd.Series([
'',
])
def test_outputs(self):
results = self.col.validate(self.ser)
self.assertEqual(len(results), 0, 'allow_empty is not allowing empty columns')
|
home-assistant/home-assistant
|
homeassistant/util/executor.py
|
Python
|
apache-2.0
| 3,555 | 0 |
"""Executor util helpers."""
from __future__ import annotations
from concurrent.futures import ThreadPoolExecutor
import contextlib
import logging
import queue
import sys
from threading import Thread
import time
import traceback
from .thread import async_raise
_LOGGER = logging.getLogger(__name__)
MAX_LOG_ATTEMPTS = 2
_JOIN_ATTEMPTS = 10
EXECUTOR_SHUTDOWN_TIMEOUT = 10
def _log_thread_running_at_shutdown(name: str, ident: int) -> None:
"""Log the stack of a thread that was still running at shutdown."""
frames = sys._current_frames() # pylint: disable=protected-access
stack = frames.get(ident)
formatted_stack = traceback.format_stack(stack)
_LOGGER.warning(
"Thread[%s] is still running at shutdown: %s",
name,
"".join(formatted_stack).strip(),
)
def join_or_interrupt_threads(
threads: set[Thread], timeout: float, log: bool
) -> set[Thread]:
"""Attempt to join or interrupt a set of threads."""
joined = set()
timeout_per_thread = timeout / len(threads)
for thread in threads:
thread.join(timeout=timeout_per_thread)
if not thread.is_alive() or thread.ident is None:
joined.add(thread)
continue
if log:
_log_thread_running_at_shutdown(thread.name, thread.ident)
|
with contextlib.suppress(SystemError):
# SystemError at this stage is usually a race condition
# where the thread happens
|
to die right before we force
# it to raise the exception
async_raise(thread.ident, SystemExit)
return joined
class InterruptibleThreadPoolExecutor(ThreadPoolExecutor):
"""A ThreadPoolExecutor instance that will not deadlock on shutdown."""
def shutdown(self, *args, **kwargs) -> None: # type: ignore
"""Shutdown backport from cpython 3.9 with interrupt support added."""
with self._shutdown_lock: # type: ignore[attr-defined]
self._shutdown = True
# Drain all work items from the queue, and then cancel their
# associated futures.
while True:
try:
work_item = self._work_queue.get_nowait()
except queue.Empty:
break
if work_item is not None:
work_item.future.cancel()
# Send a wake-up to prevent threads calling
# _work_queue.get(block=True) from permanently blocking.
self._work_queue.put(None)
# The above code is backported from python 3.9
#
# For maintainability join_threads_or_timeout is
# a separate function since it is not a backport from
# cpython itself
#
self.join_threads_or_timeout()
def join_threads_or_timeout(self) -> None:
"""Join threads or timeout."""
remaining_threads = set(self._threads) # type: ignore[attr-defined]
start_time = time.monotonic()
timeout_remaining: float = EXECUTOR_SHUTDOWN_TIMEOUT
attempt = 0
while True:
if not remaining_threads:
return
attempt += 1
remaining_threads -= join_or_interrupt_threads(
remaining_threads,
timeout_remaining / _JOIN_ATTEMPTS,
attempt <= MAX_LOG_ATTEMPTS,
)
timeout_remaining = EXECUTOR_SHUTDOWN_TIMEOUT - (
time.monotonic() - start_time
)
if timeout_remaining <= 0:
return
|
CallMeMhz/megablog
|
src/blog/urls.py
|
Python
|
gpl-3.0
| 235 | 0 |
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.index, name='index'),
u
|
rl(r'^new', views.add_post_fo
|
rm, name='add'),
url(r'^post/(?P<slug>[\w-]+)/$', views.post_view, name='post'),
]
|
radproject/protocols
|
script.py
|
Python
|
cc0-1.0
| 3,299 | 0.036072 |
#!/usr/bin/env python
#coding: utf8
#get the list of the scanned election results papers ( proces verbaux )
# sudo apt-get install python-setuptools
# easy_install beautifulsoup4
import urllib2
from bs4 import BeautifulSoup
from string import maketrans
from string import whitespace
import csv
import time
import json
import os #for the se of wget command to download the files
source_url = "http://www.isie.tn/index.php/fr/243-proces-verbaux-de-depouillement-et-les-decisions-de-correction-y-afferentes.html"
# using urllib2 to read the remote html page
html = urllib2.urlopen(source_url).read()
#using BeautifulSoup library for pulling data out of HTML
soup = BeautifulSoup(html)
#gettting all the disticts represented by a directory tree
main_directory =soup.find('ul', class_="php-file-tree")
#print main_directory
districts = main_directory.find_all('li', recursive=False)
for district in districts :
district_link = district.findChild('a');
district_name = district_link.contents[0].encode('utf-8').strip().replace(' ', '_')
if not os.path.exists(district_name):
os.makedirs(district_name)
#delegation : electoral level 2
delegation_directory= district.findChild('ul',recursive=False)
if delegation_directory == None :
print "Error:data unavailable, Level: district , name:"+district_name
else:
delegations = delegation_directory.find_all('li', recursive=False)
#Processing delegation level
for delegation in delegations :
delegation_link = delegation.findChild('a');
delegation_name = delegation_link.contents[0].encode('utf-8').strip().replace(' ', '_')
if not os.path.exists(district_name+ "/" +delegation_name):
os.makedirs(district_name+ "/" +delegation_name)
polling_center_directory= delegation.findChild('ul',recursive=False)
if polling_center_directory == None :
print "Error:data unavailable, Level: delegation , name:"+delegation_name
else:
polling_centers = polling_center_directory.find_all('li', class_='pft-directory', recursive=False)
#Processing polling center level
for polling_center in polling_centers:
polling_center_link = polling_center.findChild('a');
polling_center_name = polling_center_link.contents[0].encode('utf-8').strip().replace(' ', '_')
print polling_center_name
if not os.path.exists(district_name+ "/" +delegation_name+ "/" + polling_center_name):
os.makedirs(district_name+ "/" +delegation_name+ "/" + polling_center_name)
#find files list
files_directory= polling_center.findChild('ul',recursive=False)
if files_directory == None :
print "Error:data unavailable, Level: Polling_center , name:"+ polling_center_name
else:
files = files_directory.find_all('li', class_='pft-file', recursive=False)
# pv stands for
|
Proces Verbal which in english means protocol -- in election lexique
for pv in files:
pv_link = pv.findChild('a', href=True)
pv_ref = pv_link['href']
file_link = "http://isie.tn"+ pv_ref
fullurl = urllib2.quote(file_link.encode('utf-8'), safe="%/:=&?~#+!$,;'@()*[]")
download_path= district_name+ "/" +delegation_name+ "/" + p
|
olling_center_name
download_command= "wget -P " + download_path + " " + fullurl
os.system(download_command)
|
johnsonc/OTM2
|
opentreemap/otm1_migrator/migration_rules/philadelphia.py
|
Python
|
gpl-3.0
| 3,768 | 0.000531 |
from otm1_migrator.migration_rules.standard_otm1 import MIGRATION_RULES
from treemap.models import ITreeCodeOverride, ITreeRegion, User
UDFS = {
'plot': {
'owner_additional_id': {
'udf.name': 'Owner Additional Id'
},
'owner_additional_properties': {
'udf.name': 'Owner Additional Properties'
},
'type': {
'udf.name': 'Plot Type',
'udf.choices': ['Well/Pit', 'Median/Island', 'Tree Lawn',
'Park', 'Planter', 'Other', 'Yard',
'Natural Area']
},
'powerline_conflict_potential': {
'udf.name': 'Powerlines Overhead',
'udf.choices': ['Yes', 'No', 'Unknown']
},
'sidewalk_damage': {
'udf.name': 'Sidewalk Damage',
'udf.choices': ['Minor or No Damage', 'Raised More Than 3/4 Inch']
}
},
'tree': {
'sponsor': {'udf.name': 'Sponsor'},
'projects': {'udf.name': 'Projects'},
'canopy_condition': {
'udf.name': 'Canopy Condition',
'udf.choices': ['Full - No Gaps',
'Small Gaps (up to 25% missing)',
'Moderate Gaps (up to 50% missing)',
'Large Gaps (up to 75% missing)',
'Little or None (up to 100% missing)']
},
'condition': {
'udf.name': 'Tree Condition',
'udf.choices': ['Dead', 'Critical', 'Poor',
'Fair', 'Good',
'Very Good', 'Excellent']
}
}
}
SORT_ORDER_INDEX = {
'Bucks': 3,
'Burlington': 4,
'Camden': 5,
'Chester': 6,
'Delaware': 7,
'Gloucester': 8,
'Kent': 9,
'Mercer': 10,
'Montgomery': 11,
'New Castle': 12,
'Salem': 13,
'Sussex': 14,
}
def create_override(species_obj, species_dict):
for region in ['NoEastXXX', 'PiedmtCLT']:
override = ITreeCodeOverride(
instance_species_id=species_obj.pk,
region=ITreeRegion.objects.get(code=region),
itree_code=species_dict['fields']['itree_code'])
override.save_with_user(User.system_user())
return species_obj
MIGRATION_RULES['species']['postsave_actions'] = (MIGRATION_RULES['species']
.get('postsave_actions', [])
+ [create_override])
def mutate_boundary(boundary_obj, boundary_dict):
otm1_fields = boundary_dict.get('fields')
if ((boundary_obj.name.find('County') != -1
or boundary_obj.name == 'Philadelphia')):
boundary_obj.category = 'County'
boundary_obj.sort_order = 1
elif otm1_fields['county'] == 'Philadelphia':
boundary_obj.category = 'Philadelphia Neighborhood'
boundary_obj.sort_order = 2
else:
county = otm1_fields['county']
boundary_obj.category = county + ' Township'
boundary_obj.sort_order = SORT_ORDER_INDEX[county]
return bound
|
ary_obj
MIGRATION_RULES['boundary']['presave_actions'] = (MIGRATION_RULES['boundary']
.get('presave_actions', [])
+ [mutate_boundary])
MIGRATION_RULES['species']['missing_fields'] |= {'other'}
# these fields don't exist in the ptm fixture, so can't be specified
# as a value that g
|
ets discarded. Remove them.
MIGRATION_RULES['species']['removed_fields'] -= {'family'}
MIGRATION_RULES['tree']['removed_fields'] -= {'pests', 'url'}
# this field doesn't exist, so can no longer have a to -> from def
del MIGRATION_RULES['species']['renamed_fields']['other_part_of_name']
|
kun--hust/sccloud
|
test/probe/test_object_failures.py
|
Python
|
apache-2.0
| 7,804 | 0 |
#!/usr/bin/python -u
# Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
from os import listdir, unlink
from os.path import join as path_join
from unittest import main
from uuid import uuid4
from swiftclient import client
from swift.common import direct_client
from swift.common.exceptions import ClientException
from swift.common.utils import hash_path, readconf
from swift.obj.diskfile import write_metadata, read_metadata, get_data_dir
from test.probe.common import ReplProbeTest
RETRIES = 5
def get_data_file_path(obj_dir):
files = []
# We might need to try a few times if a request hasn't yet settled. For
# instance, a PUT can return success when just 2 of 3 nodes has completed.
for attempt in xrange(RETRIES + 1):
try:
files = sorted(listdir(obj_dir), reverse=True)
break
except Exception:
if attempt < RETRIES:
time.sleep(1)
else:
raise
for filename in files:
return path_join(obj_dir, filename)
class TestObjectFailures(ReplProbeTest):
def _setup_data_file(self, container, obj, data):
client.put_container(self.url, self.token, container,
headers={'X-Storage-Policy':
self.policy.name})
client.put_object(self.url, self.token, container, obj, data)
odata = client.get_object(self.url, self.token, container, obj)[-1]
self.assertEquals(odata, data)
opart, onodes = self.object_ring.get_nodes(
self.account, container, obj)
onode = onodes[0]
node_id = (onode['port'] - 6000) / 10
device = onode['device']
hash_str = hash_path(self.account, container, obj)
obj_server_conf = readconf(self.configs['object-server'][node_id])
devices = obj_server_conf['app:object-server']['devices']
obj_dir = '%s/%s/%s/%s/%s/%s/' % (devices, device,
get_data_dir(self.policy),
opart, hash_str[-3:], hash_str)
data_file = get_data_file_path(obj_dir)
return onode, opart, data_file
def run_quarantine(self):
container = 'container-%s' % uuid4()
obj = 'object-%s' % uuid4()
onode, opart, data_file = self._setup_data_fil
|
e(container, obj,
'VERIFY')
metadata = read_metadata(data_file)
metadata['ETag'] = 'badetag'
write_metadata(data_file, metadata)
odata = direct_client.direct_get_object(
onode, opart, self.account, container, obj, headers={
|
'X-Backend-Storage-Policy-Index': self.policy.idx})[-1]
self.assertEquals(odata, 'VERIFY')
try:
direct_client.direct_get_object(
onode, opart, self.account, container, obj, headers={
'X-Backend-Storage-Policy-Index': self.policy.idx})
raise Exception("Did not quarantine object")
except ClientException as err:
self.assertEquals(err.http_status, 404)
def run_quarantine_range_etag(self):
container = 'container-range-%s' % uuid4()
obj = 'object-range-%s' % uuid4()
onode, opart, data_file = self._setup_data_file(container, obj,
'RANGE')
metadata = read_metadata(data_file)
metadata['ETag'] = 'badetag'
write_metadata(data_file, metadata)
base_headers = {'X-Backend-Storage-Policy-Index': self.policy.idx}
for header, result in [({'Range': 'bytes=0-2'}, 'RAN'),
({'Range': 'bytes=1-11'}, 'ANGE'),
({'Range': 'bytes=0-11'}, 'RANGE')]:
req_headers = base_headers.copy()
req_headers.update(header)
odata = direct_client.direct_get_object(
onode, opart, self.account, container, obj,
headers=req_headers)[-1]
self.assertEquals(odata, result)
try:
direct_client.direct_get_object(
onode, opart, self.account, container, obj, headers={
'X-Backend-Storage-Policy-Index': self.policy.idx})
raise Exception("Did not quarantine object")
except ClientException as err:
self.assertEquals(err.http_status, 404)
def run_quarantine_zero_byte_get(self):
container = 'container-zbyte-%s' % uuid4()
obj = 'object-zbyte-%s' % uuid4()
onode, opart, data_file = self._setup_data_file(container, obj, 'DATA')
metadata = read_metadata(data_file)
unlink(data_file)
with open(data_file, 'w') as fpointer:
write_metadata(fpointer, metadata)
try:
direct_client.direct_get_object(
onode, opart, self.account, container, obj, conn_timeout=1,
response_timeout=1, headers={'X-Backend-Storage-Policy-Index':
self.policy.idx})
raise Exception("Did not quarantine object")
except ClientException as err:
self.assertEquals(err.http_status, 404)
def run_quarantine_zero_byte_head(self):
container = 'container-zbyte-%s' % uuid4()
obj = 'object-zbyte-%s' % uuid4()
onode, opart, data_file = self._setup_data_file(container, obj, 'DATA')
metadata = read_metadata(data_file)
unlink(data_file)
with open(data_file, 'w') as fpointer:
write_metadata(fpointer, metadata)
try:
direct_client.direct_head_object(
onode, opart, self.account, container, obj, conn_timeout=1,
response_timeout=1, headers={'X-Backend-Storage-Policy-Index':
self.policy.idx})
raise Exception("Did not quarantine object")
except ClientException as err:
self.assertEquals(err.http_status, 404)
def run_quarantine_zero_byte_post(self):
container = 'container-zbyte-%s' % uuid4()
obj = 'object-zbyte-%s' % uuid4()
onode, opart, data_file = self._setup_data_file(container, obj, 'DATA')
metadata = read_metadata(data_file)
unlink(data_file)
with open(data_file, 'w') as fpointer:
write_metadata(fpointer, metadata)
try:
headers = {'X-Object-Meta-1': 'One', 'X-Object-Meta-Two': 'Two',
'X-Backend-Storage-Policy-Index': self.policy.idx}
direct_client.direct_post_object(
onode, opart, self.account,
container, obj,
headers=headers,
conn_timeout=1,
response_timeout=1)
raise Exception("Did not quarantine object")
except ClientException as err:
self.assertEquals(err.http_status, 404)
def test_runner(self):
self.run_quarantine()
self.run_quarantine_range_etag()
self.run_quarantine_zero_byte_get()
self.run_quarantine_zero_byte_head()
self.run_quarantine_zero_byte_post()
if __name__ == '__main__':
main()
|
MatthieuDartiailh/pyvisa
|
visa.py
|
Python
|
mit
| 1,581 | 0.003163 |
# -*- coding: utf-8 -*-
"""
pyvisa.visa
~~~~~~~~~~~
Module to provide an import shortcut for the most common VISA operations.
This file is part of PyVISA.
:copyright: 2014 by PyVISA Authors, see AUTHORS for more details.
:license: MIT, see COPYING for more details.
"""
from __future__ import division, unicode_literals, print_function, absolute_import
from pyvisa import logger, __version__, log_to_screen, constants
from pyvisa.highlevel import ResourceManager
from pyvisa.errors import (Error, VisaIOError, VisaIOWarning, VisaTypeError,
UnknownHandler, OSNotSupported, InvalidBinaryFormat,
InvalidSession, LibraryError)
# This is needed to registry all resources.
from pyvisa.resources import Resource
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(description='PyVISA command-line utilities')
parser.add_argument('--backend', '-b', dest='backend', action='store', default=None,
help='backend to be used (default: ni)')
subparsers = parser.add_subparsers(title='command', dest='command')
info_parser = subparsers.add_parser
|
('info', help='print information to diagnose PyVISA')
console_parser = subparsers.add_parser('shell', help='start the PyVISA console')
args = parser.parse_args()
if args.command == 'info':
from pyvisa import util
util.get_debug_info()
elif args.c
|
ommand == 'shell':
from pyvisa import shell
shell.main('@' + args.backend if args.backend else '')
|
renalreg/radar
|
radar/api/serializers/medications.py
|
Python
|
agpl-3.0
| 4,625 | 0.002162 |
from cornflake import fields
from cornflake.exceptions import ValidationError
from cornflake.sqlalchemy_orm import ModelSerializer, ReferenceField
from cornflake.validators import max_length, min_, none_if_blank, optional, required
from radar.api.serializers.common import (
MetaMixin,
PatientMixin,
SourceMixin,
StringLookupField,
)
from radar.api.serializers.validators import valid_date_for_patient
from radar.models.medications import (
CurrentMedication,
Drug,
DrugGroup,
Medication,
MEDICATION_DOSE_UNITS,
MEDICATION_ROUTES
)
class DrugGroupSerializer(ModelSerializer):
class Meta(object):
model_class = DrugGroup
exclude = ['parent_drug_group_id']
class DrugGroupField(ReferenceField):
model_class = DrugGroup
serializer_class = DrugGroupSerializer
class DrugSerializer(ModelSerializer):
drug_group = DrugGroupField()
class Meta(object):
model_class = Drug
exclude = ['drug_group_id']
class DrugField(ReferenceField):
model_class = Drug
serializer_class = DrugSerializer
class MedicationSerializer(PatientMixin, SourceMixin, MetaMixin, ModelSerializer):
from_date = fields.DateField()
to_date = fields.DateField(required=False)
drug = DrugField(required=False)
dose_quantity = fields.FloatField(required=False, validators=[min_(0)])
dose_unit = StringLookupField(MEDICATION_DOSE_UN
|
ITS, required=False)
frequency = fields.StringField(required=False, validators=[none_if_blank(), optional(), max_length(1000)])
route = StringLookupField(
|
MEDICATION_ROUTES, required=False)
drug_text = fields.StringField(required=False, validators=[none_if_blank(), optional(), max_length(10000)])
dose_text = fields.StringField(required=False, validators=[none_if_blank(), optional(), max_length(10000)])
class Meta(object):
model_class = Medication
exclude = ['drug_id']
validators = [
valid_date_for_patient('from_date'),
valid_date_for_patient('to_date'),
]
def pre_validate(self, data):
# Coded drug overrides drug free-text
if data['drug']:
data['drug_text'] = None
return data
def validate(self, data):
data = super(MedicationSerializer, self).validate(data)
# To date must be after from date
if data['to_date'] is not None and data['to_date'] < data['from_date']:
raise ValidationError({'to_date': 'Must be on or after from date.'})
# Must specify either a coded drug or a free-text drug
if data['drug'] is None and data['drug_text'] is None:
raise ValidationError({
'drug': 'Must specify a drug.',
'drug_text': 'Must specify a drug.',
})
# Coded dose quantities must have a unit
if data['dose_quantity'] is not None:
self.run_validators_on_field(data, 'dose_unit', [required()])
return data
class CurrentMedicationSerializer(PatientMixin, SourceMixin, MetaMixin, ModelSerializer):
date_recorded = fields.DateField()
drug = DrugField(required=False)
dose_quantity = fields.FloatField(required=False, validators=[min_(0)])
dose_unit = StringLookupField(MEDICATION_DOSE_UNITS, required=False)
frequency = fields.StringField(required=False, validators=[none_if_blank(), optional(), max_length(1000)])
route = StringLookupField(MEDICATION_ROUTES, required=False)
drug_text = fields.StringField(required=False, validators=[none_if_blank(), optional(), max_length(10000)])
dose_text = fields.StringField(required=False, validators=[none_if_blank(), optional(), max_length(10000)])
class Meta(object):
model_class = CurrentMedication
exclude = ['drug_id']
validators = [
valid_date_for_patient('date_recorded'),
]
def pre_validate(self, data):
# Coded drug overrides drug free-text
if data['drug']:
data['drug_text'] = None
return data
def validate(self, data):
data = super(CurrentMedicationSerializer, self).validate(data)
# Must specify either a coded drug or a free-text drug
if data['drug'] is None and data['drug_text'] is None:
raise ValidationError({
'drug': 'Must specify a drug.',
'drug_text': 'Must specify a drug.',
})
# Coded dose quantities must have a unit
if data['dose_quantity'] is not None:
self.run_validators_on_field(data, 'dose_unit', [required()])
return data
|
PalmDr/XRD-Data-Analysis-Toolkit
|
Beta1.3/Builder.py
|
Python
|
apache-2.0
| 2,100 | 0.012857 |
__author__ = 'j'
from tkinter import ttk
from tkinter import *
from TreeView import *
def buildFrame(root):
sub_1 = Frame(root)
sub_1.pack(side=LEFT,anchor = 'w', fill='both', expand=True)
sub_1_1 = Frame(sub_1)
sub_1_1.pack(side=TOP, anch
|
or='n',fill='both',expand=True)
sub_1_2 = Frame(sub_1
|
)
sub_1_2.pack(side=BOTTOM,anchor = 's',expand=False,fill='x')
sub_2 = Frame(root)
sub_2.pack(side=RIGHT, anchor='w', fill='both', expand=True)
sub_2_1 = Frame(sub_2)
sub_2_1.pack(side=LEFT, anchor='w',expand=False)
sub_2_2 = Frame(sub_2)
sub_2_2.pack(side=RIGHT,anchor='e',fill='both',expand=True)
sub_2_2_1 = Frame(sub_2_2)
sub_2_2_1.pack(side=TOP,anchor='e',fill='both',expand=True)
return sub_1, sub_2, sub_1_1, sub_1_2, sub_2_1, sub_2_2, sub_2_2_1
def buildTree(sub_1_1, sub_1_2):
treeview = ttk.Treeview(master=sub_1_1,columns=("fullpath", "type"), displaycolumns='')
treeview.grid(column=0, row=0, sticky='nsew', in_=sub_1_1)
treeview.bind('<<TreeviewOpen>>', update_tree)
vsb = Scrollbar(orient="vertical", command=treeview.yview)
hsb = Scrollbar(orient="horizontal", command=treeview.xview)
treeview.configure(yscrollcommand=vsb.set, xscrollcommand=hsb.set)
vsb.grid(column=1, row=0, sticky='ns', in_=sub_1_1)
hsb.grid(column=0, row=1, sticky='ew', in_=sub_1_1)
sub_1_1.grid_columnconfigure(0, weight=1)
sub_1_1.grid_rowconfigure(0, weight=1)
entry = Entry(master=sub_1_2)
entry.pack(side=LEFT,anchor="w",expand=True,fill='x')
return treeview, entry
def buildSelectedBox(sub_2_2_1):
selected = Listbox(master=sub_2_2_1,selectmode=EXTENDED)
vsb = Scrollbar(orient="vertical", command=selected.yview)
hsb = Scrollbar(orient="horizontal", command=selected.xview)
selected.grid(column=0, row=0, sticky='nsew', in_=sub_2_2_1)
vsb.grid(column=1, row=0, sticky='ns', in_=sub_2_2_1)
hsb.grid(column=0, row=1, sticky='ew', in_=sub_2_2_1)
sub_2_2_1.grid_columnconfigure(0, weight=1)
sub_2_2_1.grid_rowconfigure(0, weight=1)
return selected
|
deisi/home-assistant
|
homeassistant/components/device_tracker/unifi.py
|
Python
|
mit
| 2,645 | 0 |
"""
Support for Unifi WAP controllers.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/device_tracker.unifi/
"""
import logging
import urllib
from homeassistant.components.device_tracker import DOMAIN
from homeassistant.const import CONF_HOST, CONF_USERNAME, CONF_PASSWORD
from homeassistant.helpers import validate_config
# Unifi package doesn't list urllib3 as a requirement
REQUIREMENTS = ['urllib3', 'unifi==1.2.5']
_LOGGER = logging.getLogger(__name__)
CONF_PORT = 'port'
CONF_SITE_ID = 'site_id'
def get_scanner(hass, config):
"""Setup Unifi device_tracker."""
from unifi.controller import Controller
if not validate_config(config, {DOMAIN: [CONF_USERNAME,
CONF_PASSWORD]},
_LOGGER):
_LOGGER.error('Invalid configuration')
return False
this_config = config[DOMAIN]
host = this_config.get(CONF_HOST, 'localhost')
username = this_config.get(CONF_USERNAME)
password = this_config.get(CONF_PASSWORD)
site_id = this_config.get(CONF_SITE_ID, 'default')
try:
port = int(this_config.get(CONF_PORT, 8443))
except ValueError:
_LOGGER.error('Invalid port (must be numeric like 8443)')
return False
try:
ctrl = Controller(host, username, password, port, 'v4', site_id)
except urllib.error.HTTPError as ex:
_LOGGER.error('Failed to connect to unifi: %s', ex)
return False
return UnifiScanner(ctrl)
class UnifiScanner(object):
"""Provide device_tracker support from Unifi WAP client data."""
def __init__(self, controller):
"""Initialize the scanner."""
self._controller = controller
self._update()
def _update(self):
"""Get the clients from the device."""
try:
clients = self._controller.get_clients()
except urllib.error.HTTPError as ex:
_LOGGER.error('Failed to scan clients: %s', ex)
clients = []
self._clients = {client['mac']: client for client in clients}
def scan_devices(self):
"""Scan for devices."""
self._u
|
pdate()
return self._clients.keys()
def get_device_name(self, mac):
"""Return the name (if known) of the device.
If a name has been set in Unifi, then return that, else
return the hostname if i
|
t has been detected.
"""
client = self._clients.get(mac, {})
name = client.get('name') or client.get('hostname')
_LOGGER.debug('Device %s name %s', mac, name)
return name
|
antoinecarme/pyaf
|
tests/artificial/transf_Logit/trend_MovingAverage/cycle_12/ar_12/test_artificial_128_Logit_MovingAverage_12_12_100.py
|
Python
|
bsd-3-clause
| 267 | 0.086142 |
import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 128 , FREQ = 'D', seed = 0, trendtype = "MovingAverage", cycle_length = 12, transform = "Logit", sigma = 0.0, exog_count = 100, ar_order = 12)
|
;
|
|
eedf/becours
|
becours/settings.py
|
Python
|
mit
| 3,562 | 0.001123 |
"""
Django settings for becours project.
Generated by 'django-admin startproject' using Django 1.10.5.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'v8rea$)b8+a)1vbdbdn727zw7#hj$4czarlp)*j&ei@eh%=!9^'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'cuser',
'booking',
'accounting',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'booking.middleware.CuserMiddleware',
]
ROOT_URLCONF = 'becours.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(BASE_DIR, 'becours/templates'),
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'becours.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'HOST': 'localhost',
'USER': 'postgres',
'PASSWORD': '',
'NAME': 'becours',
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-vali
|
dators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.
|
password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'fr-fr'
TIME_ZONE = 'Europe/Paris'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'becours', 'static'),
)
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
LOGIN_URL = 'auth:login'
USE_THOUSAND_SEPARATOR = True
|
dparks1134/STAMP
|
stamp/plugins/samples/plots/SeqHistogram.py
|
Python
|
gpl-3.0
| 9,822 | 0.032376 |
#=======================================================================
# Author: Donovan Parks
#
# Sequence histogram plot.
#
# Copyright 2011 Donovan Parks
#
# This file is part of STAMP.
#
# STAMP is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# STAMP is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with STAMP. If not, see <http://www.gnu.org/licenses/>.
#=======================================================================
import sys
import math
from PyQt4 import QtCore, QtGui
from stamp.plugins.samples.AbstractSamplePlotPlugin import AbstractSamplePlotPlugin, TestWindow, ConfigureDialog
from stamp.plugins.samples.plots.configGUI.seqHistogramUI import Ui_SeqHistogramDialog
class SeqHistogram(AbstractSamplePlotPlugin):
'''
Sequence histogram plot.
'''
def __init__(self, preferences, parent=None):
AbstractSamplePlotPlugin.__init__(self, preferences, parent)
self.preferences = preferences
self.name = 'Sequence histogram'
self.type = 'Exploratory'
self.settings = preferences['Settings']
self.figWidth = self.settings.value(self.name + '/width', 7.0).toDouble()[0]
self.figHeight = self.settings.value(self.name + '/height', 7.0).toDouble()[0]
self.bCustomBinWidth = self.settings.value(self.name + '/custom bin width', False).toBool()
self.binWidth = self.settings.value(self.name + '/bin width', 100.0).toDouble()[0]
self.yAxisLogScale = self.settings.value(self.name + '/log scale', False).toBool()
self.bCustomXaxis = self.settings.value(self.name + '/custom x-axis extents', False).toBool()
self.xLimitLeft = self.settings.value(self.name + '/min value', 0.0).toDouble()[0]
self.xLimitRight = self.settings.value(self.name + '/max value', 1.0).toDouble()[0]
self.legendPos = self.settings.value(self.name + '/legend position', 0).toInt()[0]
def mirrorProperties(self, plotToCopy):
self.name = plotToCopy.name
self.figWidth = plotToCopy.figWidth
self.figHeight = plotToCopy.figHeight
self.bCustomBinWidth = plotToCopy.bCustomBinWidth
self.binWidth = plotToCopy.binWidth
self.yAxisLogScale = plotToCopy.yAxisLogScale
self.bCustomXaxis = plotToCopy.bCustomXaxis
self.xLimitLeft = plotToCopy.xLimitLeft
self.xLimitRight = plotToCopy.xLimitRight
self.legendPos = plotToCopy.legendPos
def plot(self, profile, statsResults):
if len(profile.profileDict) <= 0:
self.emptyAxis()
return
# *** Colour of plot elements
axesColour = str(self.preferences['Axes colour'].name())
profile1Colour = str(self.preferences['Sample 1 colour'].name())
profile2Colour = str(self.preferences['Sample 2 colour'].name())
# *** Get sequence counts
seqs1 = profile.getSequenceCounts(0)
seqs2 = profile.getSequenceCounts(1)
# *** Set x-axis limit
self.xMin = min(min(seqs1),min(seqs2))
if self.xLimitLeft == None:
self.xLimitLeft = self.xMin
self.xMax = max(max(seqs1),max(seqs2))
if self.xLimitRight == None:
self.xLimitRight = self.xMax
# Set bin width
if not self.bCustomBinWidth:
self.binWidth = (self.xMax - self.xMin) / 40
# *** Set size of figure
self.fig.clear()
self.fig.set_size_inches(self.figWidth, self.figHeight)
heightBottomLabels = 0.4 # inches
widthSideLabel = 0.5 # inches
padding = 0.2 # inches
axesHist = self.fig.add_axes([widthSideLabel/self.figWidth,heightBottomLabels/self.figHeight,\
1.0-(widthSideLabel+padding)/self.figWidth,\
1.0-(heightBottomLabels+padding)/self.figHeight])
# *** Histogram plot
bins = [0]
binEnd = self.binWidth
while binEnd <= self.xMax:
bins.append(binEnd)
binEnd += self.binWidth
bins.append(binEnd)
n, b, patches = axesHist.hist([seqs1, seqs2], bins=bins, log=self.yAxisLogScale)
for patch in patches[0]:
patch.set_facecolor(profile1Colour)
for patch in patches[1]:
patch.set_facecolor(profile2Colour)
if self.bCustomXaxis:
axesHist.set_xlim(self.xLimitLeft, self.xLimitRight)
axesHist.set_xlabel('Sequences')
axesHist.set_ylabel('Number of features')
# *** Prettify plot
if self.legendPos != -1:
legend = axesHist.legend([patches[0][0
|
], patches[1][0]], (profile.sampleNames[0], profile.sampleNames[1]), loc=self.legendPos)
legend.get_frame().set_linewidth(0)
for a in axesHist.yaxis.majorTicks:
a.tick1On=True
|
a.tick2On=False
for a in axesHist.xaxis.majorTicks:
a.tick1On=True
a.tick2On=False
for line in axesHist.yaxis.get_ticklines():
line.set_color(axesColour)
for line in axesHist.xaxis.get_ticklines():
line.set_color(axesColour)
for loc, spine in axesHist.spines.iteritems():
if loc in ['right','top']:
spine.set_color('none')
else:
spine.set_color(axesColour)
self.updateGeometry()
self.draw()
def configure(self, profile, statsResults):
self.profile = profile
self.configDlg = ConfigureDialog(Ui_SeqHistogramDialog)
self.connect(self.configDlg.ui.chkCustomBinWidth, QtCore.SIGNAL('toggled(bool)'), self.changeCustomBinWidth)
self.connect(self.configDlg.ui.chkCustomXaxis, QtCore.SIGNAL('toggled(bool)'), self.changeCustomXaxis)
self.connect(self.configDlg.ui.btnXmin, QtCore.SIGNAL('clicked()'), self.setXaxisMin)
self.connect(self.configDlg.ui.btnXmax, QtCore.SIGNAL('clicked()'), self.setXaxisMax)
self.configDlg.ui.spinFigWidth.setValue(self.figWidth)
self.configDlg.ui.spinFigHeight.setValue(self.figHeight)
self.configDlg.ui.chkCustomBinWidth.setChecked(self.bCustomBinWidth)
self.configDlg.ui.spinBinWidth.setValue(self.binWidth)
self.configDlg.ui.chkLogScale.setChecked(self.yAxisLogScale)
self.configDlg.ui.chkCustomXaxis.setChecked(self.bCustomXaxis)
self.configDlg.ui.spinXmin.setValue(self.xLimitLeft)
self.configDlg.ui.spinXmax.setValue(self.xLimitRight)
self.changeCustomBinWidth()
self.changeCustomXaxis()
# legend position
if self.legendPos == 0:
self.configDlg.ui.radioLegendPosBest.setDown(True)
elif self.legendPos == 1:
self.configDlg.ui.radioLegendPosUpperRight.setChecked(True)
elif self.legendPos == 7:
self.configDlg.ui.radioLegendPosCentreRight.setChecked(True)
elif self.legendPos == 4:
self.configDlg.ui.radioLegendPosLowerRight.setChecked(True)
elif self.legendPos == 2:
self.configDlg.ui.radioLegendPosUpperLeft.setChecked(True)
elif self.legendPos == 6:
self.configDlg.ui.radioLegendPosCentreLeft.setChecked(True)
elif self.legendPos == 3:
self.configDlg.ui.radioLegendPosLowerLeft.setChecked(True)
else:
self.configDlg.ui.radioLegendPosNone.setChecked(True)
if self.configDlg.exec_() == QtGui.QDialog.Accepted:
self.figWidth = self.configDlg.ui.spinFigWidth.value()
self.figHeight = self.configDlg.ui.spinFigHeight.value()
self.bCustomBinWidth = self.configDlg.ui.chkCustomBinWidth.isChecked()
self.binWidth = self.configDlg.ui.spinBinWidth.value()
self.yAxisLogScale = self.configDlg.ui.chkLogScale.isChecked()
self.bCustomXaxis = self.configDlg.ui.chkCustomXaxis.isChecked()
self.xLimitLeft = self.configDlg.ui.spinXmin.value()
self.xLimitRight = self.configDlg.ui.spinXmax.value()
# legend position
if self.configDlg.ui.radioLegendPosBest.isChecked() == True:
self.legendPos = 0
elif self.configDlg.ui.radioLegendPosUpperRight.isChecked() == True:
self.legendPos = 1
elif self.configDlg.ui.radioLegendPosCentreRight.isChecked() == True:
self.legendPos = 7
elif self.configDlg.ui.radioLegendPosLowerRight.isChecked() == True:
self.legendPos = 4
elif self.configDlg.ui.radioLegendPosUpperLeft.isChecked() == True:
self.legendPos = 2
elif self.configDlg.ui.radioLegendPosCentreLeft.isChecked() == True:
self.legendPos = 6
elif self.configDlg.ui.
|
miurahr/seahub
|
seahub/wopi/settings.py
|
Python
|
apache-2.0
| 1,424 | 0.005618 |
# Copyright (c) 2012-2016 Seafile Ltd.
import seahub.settings as settings
# OfficeOnlineServer, OnlyOffice, CollaboraOffice
OFFICE_SERVER_TYPE = getattr(settings, 'OFFICE_SERVER_TYPE', '')
OFFICE_WEB_APP_BASE_URL = getattr(settings, 'OFFICE_WEB_APP_BASE_URL', '')
WOPI_ACCESS_TOKEN_EXPIRATION = getattr(settings, 'WOPI_ACCESS_TOKEN_EXPIRATION', 12 * 60 * 60)
OFFICE_WEB_APP_DISCOVERY_EXPIRATION = getattr(settings, '
|
OFFICE_WEB_APP_DISCOVERY_EXPIRATION', 7 * 24 * 60 * 60)
ENABLE_OFFICE_WEB_APP = getattr(settings, 'ENABLE_OFFICE_WEB_APP', False)
OFFICE_WEB_APP_FILE_EXTENSION = getattr(settings, 'OFFICE_WEB_APP_FILE_EXTENSION', ())
ENABLE_OFFICE_WEB_APP_EDIT = getattr(settings, 'ENABLE_OFFICE_WEB_APP_EDIT', False)
OFFICE_WEB_APP_EDIT_FILE_EXTENSION = getattr(settings, 'OFFICE_WEB_APP_EDIT_FILE_EXTENSION', ())
## Client certificates ##
# path to client.cert
|
when use client authentication
OFFICE_WEB_APP_CLIENT_CERT = getattr(settings, 'OFFICE_WEB_APP_CLIENT_CERT', '')
# path to client.key when use client authentication
OFFICE_WEB_APP_CLIENT_KEY = getattr(settings, 'OFFICE_WEB_APP_CLIENT_KEY', '')
# path to client.pem when use client authentication
OFFICE_WEB_APP_CLIENT_PEM = getattr(settings, 'OFFICE_WEB_APP_CLIENT_PEM', '')
## Server certificates ##
# Path to a CA_BUNDLE file or directory with certificates of trusted CAs
OFFICE_WEB_APP_SERVER_CA = getattr(settings, 'OFFICE_WEB_APP_SERVER_CA', True)
|
fp7-netide/Tools
|
traffem/apps/http/serverPUT.py
|
Python
|
epl-1.0
| 819 | 0.006105 |
import sys
import signal
from threading import Thread
from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
|
class PUTHandler(BaseHTTPRequestHandler):
def do_PUT(self):
print "----- SOMETHING WAS PUT!! ------"
print self.headers
length = int(self.headers['Content-Length'])
content = self.rfile.read(length)
self.send_response(200)
print content
def run_on(p
|
ort):
print("Starting a server on port %i" % port)
server_address = ('localhost', port)
httpd = HTTPServer(server_address, PUTHandler)
httpd.serve_forever()
if __name__ == "__main__":
server = Thread(target=run_on, args=[81])
server.daemon = True # Do not make us wait for you to exit
server.start()
signal.pause() # Wait for interrupt signal, e.g. KeyboardInterrupt
|
persandstrom/home-assistant
|
homeassistant/components/device_tracker/xiaomi_miio.py
|
Python
|
apache-2.0
| 2,487 | 0 |
"""
Support for Xiaomi Mi WiFi Repeater 2.
For more details about this platform, please refer to the documentation
https://home-assistant.io/components/device_tracker.xiaomi_miio/
"""
import logging
import voluptuous as vol
import homeassistant.helpers.config_validation as cv
from homeassistant.components.device_tracker import (DOMAIN, PLATFORM_SCHEMA,
DeviceScanner)
from homeassistant.const import (CONF_HOST, CONF_TOKEN)
_LOGGER = logging.getLogger(__name__)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_TOKEN): vol.All(cv.string, vol.Length(min=32, max=32)),
})
REQUIREMENTS = ['python-miio==0.4.1', 'construct==2.9.41']
def get_scanner(hass, config):
"""Return a Xiaomi MiIO device scanner."""
from miio import WifiRepeater, DeviceException
scanner = None
host = config[DOMAIN].get(CONF_HOST)
token = config[DOMAIN].get(CONF_TOKEN)
_LOGGER.info(
"Initializing with host %s (token %s...)", host, token[:5])
try:
device = WifiRepeater(host, token)
device_info = device.info()
_LOGGER.info("%s %s %s detected",
device_info.model,
device_info.firmware_version,
device_info.hardware_version)
scanner = XiaomiMiioDeviceScanner(device)
except DeviceException as ex:
_LOGGER.error("Device unavailable or token incorrect: %s", ex)
return scanner
class XiaomiMiioDeviceScanner(DeviceScanner):
"""This class queries a Xiaomi Mi WiFi Repeater."""
def __init__(self, device):
"""Initialize the scanner."""
s
|
elf.device = device
async def async_scan_devices(self):
"""Scan for devices and return a list containing found device ids."""
from miio impor
|
t DeviceException
devices = []
try:
station_info = await self.hass.async_add_job(self.device.status)
_LOGGER.debug("Got new station info: %s", station_info)
for device in station_info.associated_stations:
devices.append(device['mac'])
except DeviceException as ex:
_LOGGER.error("Got exception while fetching the state: %s", ex)
return devices
async def async_get_device_name(self, device):
"""Return None.
The repeater doesn't provide the name of the associated device.
"""
return None
|
chetan51/neon
|
neon/layers/__init__.py
|
Python
|
apache-2.0
| 1,116 | 0.001792 |
# ----------------------------------------------------------------------------
# Copyright 2015 Nervana Systems Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------
from neon.layers.layer import (Linear, Bias, Affine, Conv, Convolution, GeneralizedCost, Dropout,
Pooling, Activation
|
, BatchNorm, BatchNormAutodiff,
Deconv, GeneralizedCostMask)
from neon.layers.merge import Merge, MergeSum, MergeConcat, MergeConcatSequence
from neon.layers.recurrent import Recurrent,
|
LSTM, GRU
|
yssk22/gaecouch
|
couch/models/document.py
|
Python
|
apache-2.0
| 708 | 0.011299 |
from datetime import datetime
from google.appengine.ext import db
from django.utils import simplejson as json
from couch import errors
from couch.models.util import gen_uuid
class DocumentRoot(db.Model):
''' Controls document '''
revno = db.IntegerProperty(default = 0)
revsuffix = db.StringPropert
|
y()
deleted = db.BooleanProperty(default = False)
def rev(self):
return '%s-%s' % (self.revno, self.revsuffix)
class Document(db
|
.Model):
id = db.StringProperty()
rev = db.StringProperty()
dbname = db.StringProperty()
docstring = db.TextProperty()
deleted = db.BooleanProperty(default = False)
def to_dict(self):
return json.loads(self.docstring)
|
GastonLab/ddb-mongodb
|
vcf_parsing.py
|
Python
|
mit
| 11,779 | 0.000679 |
import sys
from cyvcf2 import VCF
from collections import defaultdict
def parse_caller_vcfs(sample_dict, caller_list):
caller_vcf_records = defaultdict(lambda: dict())
for caller in caller_list:
parse_vcf(sample_dict[caller], caller, caller_vcf_records)
return caller_vcf_records
def parse_vcf(vcf_file, caller, caller_vcf_records):
sys.stdout.write("Reading {}\n".format(vcf_file))
vcf = VCF(vcf_file)
for record in vcf:
if len(record.ALT) > 1:
sys.stderr.write("ERROR: More than one alternative allele detected in file "
"{}\n Record: {}\n".format(vcf_file, record))
sys.exit()
key = ("chr{}".format(record.CHROM), int(record.start), int(record.end), record.REF,
record.ALT[0])
caller_vcf_records[caller][key] = record
def parse_mutect_vcf_record(record):
# Pseudocount. Encountered a division by zero issue in at least one mutect record
depth = int(record.gt_depths[0])
if depth < 1:
depth = 1
info = {'DP': str(depth),
'FILTER': str(record.FILTER),
'GTF_DP': str(record.gt_depths[0]),
'GTF_AD': str(record.gt_alt_depths[0]),
'MULTIALLELIC': str(record.INFO.get('OLD_MULTIALLELIC')) or None,
'AAF': str(float(record.gt_alt_depths[0]) / float(depth))}
return info
def parse_vardict_vcf_record(record):
info = {'DP': str(record.INFO.get('DP')),
'VD': str(record.INFO.get('VD')),
'AF': str(record.INFO.get('AF')),
'FILTER': str(record.FILTER),
'BIAS': str(record.INFO.get('BIAS')),
'REFBIAS': str(record.INFO.get('REFBIAS')),
'VARBIAS': str(record.INFO.get('VARBIAS')),
'QUAL': str(record.INFO.get('QUAL')),
'QSTD': str(record.INFO.get('QSTD')),
'SBF': str(record.INFO.get('SBF')),
'ODDRATIO': str(record.INFO.get('ODDRATIO')),
'MQ': str(record.INFO.get('MQ')),
'SN': str(record.INFO.get('SN')),
'HIAF': str(record.INFO.get('HIAF')),
'ADJAF': str(record.INFO.get('ADJAF')),
'MSI': str(record.INFO.get('MSI')),
'MSILEN': str(record.INFO.get('MSILEN')),
'SHIFT3': str(record.INFO.get('SHIFT3')),
'NM': str(record.INFO.get('NM')),
'GDAMP': str(record.INFO.get('GDAMP')),
'LSEQ': str(record.INFO.get('LSEQ')),
'RSEQ': str(record.INFO.get('RSEQ')),
'TLAMP': str(record.INFO.get('TLAMP')),
'NCAMP': str(record.INFO.get('NCAMP')),
'AMPFLAG': str(record.INFO.get('AMPFLAG')),
'HICNT': str(record.INFO.get('HICNT')),
'HICOV': str(record.INFO.get('HICOV')),
'GTF_DP': str(record.gt_depths[0]),
'GTF_AD': str(record.gt_alt_depths[0]),
'MULTIALLELIC': str(record.INFO.get('OLD_MULTIALLELIC')) or None,
'AAF': str(float(record.gt_alt_depths[0]) / float(record.gt_depths[0]))}
return info
def parse_freebayes_vcf_record(record):
info = {'DP': str(record.INFO.get('DP')),
'AF': str(record.INFO.get('AF')),
'FILTER': str(recor
|
d.FILTER),
'AC': str(record.INFO.get('AC')),
'RO': str(record.INFO.get('RO')),
'AO': str(record.INFO.get('AO')),
'PRO': str(record.INFO.get('PRO')),
'PAO': str(record.INFO.get('PAO')),
'QR': str(recor
|
d.INFO.get('QR')),
'QA': str(record.INFO.get('QA')),
'PQR': str(record.INFO.get('PQR')),
'PQA': str(record.INFO.get('PQA')),
'SRF': str(record.INFO.get('SRF')),
'SRR': str(record.INFO.get('SRR')),
'SAF': str(record.INFO.get('SAF')),
'SAR': str(record.INFO.get('SAR')),
'SRP': str(record.INFO.get('SRP')),
'SAP': str(record.INFO.get('SAP')),
'AB': str(record.INFO.get('AB')),
'ABP': str(record.INFO.get('ABP')),
'RUN': str(record.INFO.get('RUN')),
'RPP': str(record.INFO.get('RPP')),
'RPPR': str(record.INFO.get('RPPR')),
'RPL': str(record.INFO.get('RPL')),
'RPR': str(record.INFO.get('RPR')),
'EPP': str(record.INFO.get('EPP')),
'EPPR': str(record.INFO.get('EPPR')),
'DRPA': str(record.INFO.get('DRPA')),
'ODDS': str(record.INFO.get('ODDS')),
'GTI': str(record.INFO.get('GTI')),
'TYPE': str(record.INFO.get('TYPE')),
'CIGAR': str(record.INFO.get('CIGAR')),
'NUMALT': str(record.INFO.get('NUMALT')),
'MEANALT': str(record.INFO.get('MEANALT')),
'LEN': str(record.INFO.get('LEN')),
'MQM': str(record.INFO.get('MQM')),
'MQMR': str(record.INFO.get('MQMR')),
'PAIRED': str(record.INFO.get('PAIRED')),
'PAIREDR': str(record.INFO.get('PAIREDR')),
'GTF_DP': str(record.gt_depths[0]),
'MULTIALLELIC': str(record.INFO.get('OLD_MULTIALLELIC')) or None,
'AAF': str(float(record.INFO.get('AO')) / float(record.gt_depths[0]))}
return info
def parse_scalpel_vcf_record(record):
info = {'DP': str(record.gt_depths[0]),
'AVGCOV': str(record.INFO.get('AVGCOV')),
'MINCOV': str(record.INFO.get('MINCOV')),
'ALTCOV': str(record.INFO.get('ALTCOV')),
'COVRATIO': str(record.INFO.get('COVRATIO')),
'FILTER': str(record.FILTER),
'ZYG': str(record.INFO.get('ZYG')),
'CHI2': str(record.INFO.get('CHI2')),
'FISHERPHREDSCORE': str(record.INFO.get('FISHERPHREDSCORE')),
'INH': str(record.INFO.get('INH')),
'BESTSTATE': str(record.INFO.get('BESTSTATE')),
'COVSTATE': str(record.INFO.get('COVSTATE')),
'SOMATIC': str(record.INFO.get('SOMATIC')),
'DENOVO': str(record.INFO.get('DENOVO')),
'GTF_DP': str(record.gt_depths[0]),
'GTF_AD': str(record.gt_alt_depths[0]),
'MULTIALLELIC': str(record.INFO.get('OLD_MULTIALLELIC')) or None,
'AAF': str(float(record.gt_alt_depths[0]) / float(record.gt_depths[0]))}
return info
def parse_platypus_vcf_record(record):
multi_allelic = record.INFO.get('OLD_MULTIALLELIC') or False
if multi_allelic:
tr = record.INFO.get('TR')[0]
else:
tr = record.INFO.get('TR')
if float(record.INFO.get('TC')) < 1:
aaf = "0"
else:
aaf = str(float(tr) / float(record.INFO.get('TC')))
info = {'DP': str(tr),
'FR': str(record.INFO.get('FR')),
'MMLQ': str(record.INFO.get('MMLQ')),
'TCR': str(record.INFO.get('TCR')),
'HP': str(record.INFO.get('HP')),
'WE': str(record.INFO.get('WE')),
'WS': str(record.INFO.get('WS')),
'FS': str(record.INFO.get('FS')),
'TR': str(tr),
'NF': str(record.INFO.get('NF')),
'TCF': str(record.INFO.get('TCF')),
'NR': str(record.INFO.get('NR')),
'TC': str(record.INFO.get('TC')),
'END': str(record.INFO.get('END')),
'MGOF': str(record.INFO.get('MGOF')),
'SbPval': str(record.INFO.get('SbPval')),
'START': str(record.INFO.get('START')),
'ReadPosRankSum': str(record.INFO.get('ReadPosRankSum')),
'MQ': str(record.INFO.get('MQ')),
'QD': str(record.INFO.get('QD')),
'SC': str(record.INFO.get('SC')),
'BRF': str(record.INFO.get('BRF')),
'HapScore': str(record.INFO.get('HapScore')),
'FILTER': str(record.FILTER),
'MULTIALLELIC': str(record.INFO.get('OLD_MULTIALLELIC')) or None,
'AAF': aaf}
return info
def parse_pindel_vcf_record(record):
info = {'DP': str(record.gt_depths[0]),
'END': str(record.INFO.get('END')),
'HOMLEN': str(record.INFO.get('HOMLEN')),
'HOMSEQ': str(record.INFO.get('HOMSEQ')),
'SVLEN': str(record.INFO.get('SVLEN'
|
airbnb/airflow
|
airflow/providers/google/cloud/example_dags/example_dataflow_sql.py
|
Python
|
apache-2.0
| 2,471 | 0.000809 |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Example Airflow DAG for Google Cloud Dataflow service
"""
import os
from airflow import models
from airflow.providers.google.cloud.operators.dataflow import DataflowStartSqlJobOperator
from airflow.utils.dates import days_ago
GCP_PROJECT_ID = os.environ.get("GCP_PROJECT_ID", "example-project")
BQ_SQL_DATASET = os.environ.get("DATAFLOW_BQ_SQL_DATASET", "airflow_dataflow_samples")
BQ_SQL_TABLE_INPUT = os.environ.get("BQ_SQL_TABLE_INPUT", "beam_input")
BQ_SQL_TABLE_OUTPUT = os.environ.get("BQ_SQL_TABLE_OUTPUT", "beam_output")
DATAFLOW_SQL_JOB_NAME = os.environ.get("DATAFLOW_SQL_JOB_NAME", "dataflow-sql")
DATAFLOW_SQL_LOCATION = os.environ.get("DATAFLOW_SQL_LOCATION", "us-west1")
with models.DAG(
dag_id="example_gcp_dataflow_sql",
start_date=days_ago(1),
schedule_interval=None, # Override to match your needs
tags=['example'],
) as dag_sql:
start_sql = DataflowStartSqlJobOperator(
task_id="start_sql_query",
job_name=DATAFLOW_SQL_JOB_NAME,
query=f"""
SELECT
sales_region as sales_region,
count(state_id) as count_state
FROM
bigquery.table.`{GCP_PROJECT_ID}`.`{BQ_SQL_DATASET}`.`{BQ_SQL_TABLE_INPUT}`
WHERE state_id >= @state_id_min
GROUP BY sales_region;
|
""",
options={
"bigquery-project": GCP_PROJECT_ID,
"bigquery-dataset": BQ_SQL_DATASET,
"bigq
|
uery-table": BQ_SQL_TABLE_OUTPUT,
"bigquery-write-disposition": "write-truncate",
"parameter": "state_id_min:INT64:2",
},
location=DATAFLOW_SQL_LOCATION,
do_xcom_push=True,
)
|
datalogics/scons
|
test/option/srcdir.py
|
Python
|
mit
| 1,950 | 0.006667 |
#!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
"""
Verify that the --srcdir option works to fet
|
ch things from a repository.
"""
import TestSCons
|
test = TestSCons.TestSCons()
test.subdir('repository', 'work1')
repository = test.workpath('repository')
test.write(['repository', 'SConstruct'], r"""
env = Environment()
env.Command('file.out', 'file.in', Copy('$TARGET', '$SOURCE'))
""")
test.write(['repository', 'file.in'], "repository/file.in\n")
opts = '--srcdir ' + repository
# Make the entire repository non-writable, so we'll detect
# if we try to write into it accidentally.
test.writable('repository', 0)
test.run(chdir = 'work1', options = opts, arguments = '.')
test.must_match(['work1', 'file.out'], "repository/file.in\n")
test.up_to_date(chdir = 'work1', options = opts, arguments = '.')
#
test.pass_test()
|
SuyashD95/python-assignments
|
Assignment 3/odd.py
|
Python
|
mit
| 403 | 0.044665 |
"""
Q4- Write a Python function, odd, that takes in one number and returns True when the number is odd and False otherwise. You should us
|
e the % (mod) operator, not if. This function takes in one number and returns a boolean
"""
def odd( numb
|
er ):
return number % 2 == 1
number = int( input( "Enter a number: ") )
print( "Is the number " + str( number ) + " odd? Answer: " + str( odd( number) ) )
|
nanshe-org/nanshe_workflow
|
nanshe_workflow/util.py
|
Python
|
apache-2.0
| 733 | 0.001364 |
import contextlib
import gzip
import hashlib
import io
import mmap
from builtins import (
map as imap,
)
def gzip_compress(data, compresslevel=6):
compressed = io.BytesIO()
with gzip.GzipFile(fileobj=compressed,
mode="wb",
compresslevel=compresslevel) as compressor:
compressor.write(data)
return compressed.getvalue()
def hash_file(fn, hn):
h = hashlib.new(hn
|
)
with open(fn, "r") as fh:
with contextlib.closing(mmap.mmap(fh.fileno(), 0, prot=mmap.PROT_READ)) as mm:
h.update(mm)
return h.digest()
def inde
|
nt(text, spaces):
spaces = " " * int(spaces)
return "\n".join(imap(lambda l: spaces + l, text.splitlines()))
|
manhhomienbienthuy/pythondotorg
|
pages/urls.py
|
Python
|
apache-2.0
| 142 | 0 |
from .views import PageView
from django.urls import path
urlpatterns = [
path('<path:path>/', PageView.as_view(), name='page_detail'),
]
| ||
anhstudios/swganh
|
data/scripts/templates/object/creature/npc/droid/crafted/shared_droideka_advanced.py
|
Python
|
mit
| 474 | 0.046414 |
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLE
|
S
from swgpy.object import *
def create(kernel):
result = Creature()
result.template = "object/creature/npc/droid/crafted/shared_droideka_advanced.iff"
result.attribute_template_id = 3
result.stfName("droid_name","droideka_crafted_advanced")
|
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
MichalMaM/ella
|
test_ella/test_app/models.py
|
Python
|
bsd-3-clause
| 487 | 0 |
from django.db import models
from django.util
|
s.translation import ugettext_lazy as _
from ella.core.models import Publishable
class XArticle(Publishable):
"""
``XArticle`` is extra publishable descendant for testing.
Is used for possibility testing descendants of publishable
with different content type
"""
content = models.TextField(_('Content'), default='')
class Meta:
|
verbose_name = _('XArticle')
verbose_name_plural = _('XArticles')
|
rmelchorv/TDD-Cuervos
|
lists/forms.py
|
Python
|
mit
| 435 | 0.034483 |
from
|
django import forms
from lists.models import Item
EMPTY_LIST_ERROR = "You can't have an empty list item"
class ItemForm(forms.models.ModelForm):
class Meta:
model = Item
fields = ('text',)
widgets = {
'text': forms.fields.TextInput(attrs={
'placeholder': 'Enter a to-do item',
'class': 'form-control input-lg'
}),
}
error_messages = {
'text': {'required
|
': "You can't have an empty list item"}
}
|
agustinhenze/logbook.debian
|
logbook/_fallback.py
|
Python
|
bsd-3-clause
| 6,767 | 0.000591 |
# -*- coding: utf-8 -*-
"""
logbook._fallback
|
~~~~~~~~~~~~~~~~~
Fallb
|
ack implementations in case speedups is not around.
:copyright: (c) 2010 by Armin Ronacher, Georg Brandl.
:license: BSD, see LICENSE for more details.
"""
from itertools import count
from logbook.helpers import get_iterator_next_method
from logbook.concurrency import (thread_get_ident, greenlet_get_ident,
thread_local, greenlet_local,
ThreadLock, GreenletRLock, is_gevent_enabled)
_missing = object()
_MAX_CONTEXT_OBJECT_CACHE = 256
def group_reflected_property(name, default, fallback=_missing):
"""Returns a property for a given name that falls back to the
value of the group if set. If there is no such group, the
provided default is used.
"""
def _get(self):
rv = getattr(self, '_' + name, _missing)
if rv is not _missing and rv != fallback:
return rv
if self.group is None:
return default
return getattr(self.group, name)
def _set(self, value):
setattr(self, '_' + name, value)
def _del(self):
delattr(self, '_' + name)
return property(_get, _set, _del)
class _StackBound(object):
def __init__(self, obj, push, pop):
self.__obj = obj
self.__push = push
self.__pop = pop
def __enter__(self):
self.__push()
return self.__obj
def __exit__(self, exc_type, exc_value, tb):
self.__pop()
class StackedObject(object):
"""Baseclass for all objects that provide stack manipulation
operations.
"""
def push_greenlet(self):
"""Pushes the stacked object to the greenlet stack."""
raise NotImplementedError()
def pop_greenlet(self):
"""Pops the stacked object from the greenlet stack."""
raise NotImplementedError()
def push_thread(self):
"""Pushes the stacked object to the thread stack."""
raise NotImplementedError()
def pop_thread(self):
"""Pops the stacked object from the thread stack."""
raise NotImplementedError()
def push_application(self):
"""Pushes the stacked object to the application stack."""
raise NotImplementedError()
def pop_application(self):
"""Pops the stacked object from the application stack."""
raise NotImplementedError()
def __enter__(self):
if is_gevent_enabled():
self.push_greenlet()
else:
self.push_thread()
return self
def __exit__(self, exc_type, exc_value, tb):
if is_gevent_enabled():
self.pop_greenlet()
else:
self.pop_thread()
def greenletbound(self, _cls=_StackBound):
"""Can be used in combination with the `with` statement to
execute code while the object is bound to the greenlet.
"""
return _cls(self, self.push_greenlet, self.pop_greenlet)
def threadbound(self, _cls=_StackBound):
"""Can be used in combination with the `with` statement to
execute code while the object is bound to the thread.
"""
return _cls(self, self.push_thread, self.pop_thread)
def applicationbound(self, _cls=_StackBound):
"""Can be used in combination with the `with` statement to
execute code while the object is bound to the application.
"""
return _cls(self, self.push_application, self.pop_application)
class ContextStackManager(object):
"""Helper class for context objects that manages a stack of
objects.
"""
def __init__(self):
self._global = []
self._thread_context_lock = ThreadLock()
self._thread_context = thread_local()
self._greenlet_context_lock = GreenletRLock()
self._greenlet_context = greenlet_local()
self._cache = {}
self._stackop = get_iterator_next_method(count())
def iter_context_objects(self):
"""Returns an iterator over all objects for the combined
application and context cache.
"""
use_gevent = is_gevent_enabled()
tid = greenlet_get_ident() if use_gevent else thread_get_ident()
objects = self._cache.get(tid)
if objects is None:
if len(self._cache) > _MAX_CONTEXT_OBJECT_CACHE:
self._cache.clear()
objects = self._global[:]
objects.extend(getattr(self._thread_context, 'stack', ()))
if use_gevent:
objects.extend(getattr(self._greenlet_context, 'stack', ()))
objects.sort(reverse=True)
objects = [x[1] for x in objects]
self._cache[tid] = objects
return iter(objects)
def push_greenlet(self, obj):
self._greenlet_context_lock.acquire()
try:
self._cache.pop(greenlet_get_ident(), None) # remote chance to conflict with thread ids
item = (self._stackop(), obj)
stack = getattr(self._greenlet_context, 'stack', None)
if stack is None:
self._greenlet_context.stack = [item]
else:
stack.append(item)
finally:
self._greenlet_context_lock.release()
def pop_greenlet(self):
self._greenlet_context_lock.acquire()
try:
self._cache.pop(greenlet_get_ident(), None) # remote chance to conflict with thread ids
stack = getattr(self._greenlet_context, 'stack', None)
assert stack, 'no objects on stack'
return stack.pop()[1]
finally:
self._greenlet_context_lock.release()
def push_thread(self, obj):
self._thread_context_lock.acquire()
try:
self._cache.pop(thread_get_ident(), None)
item = (self._stackop(), obj)
stack = getattr(self._thread_context, 'stack', None)
if stack is None:
self._thread_context.stack = [item]
else:
stack.append(item)
finally:
self._thread_context_lock.release()
def pop_thread(self):
self._thread_context_lock.acquire()
try:
self._cache.pop(thread_get_ident(), None)
stack = getattr(self._thread_context, 'stack', None)
assert stack, 'no objects on stack'
return stack.pop()[1]
finally:
self._thread_context_lock.release()
def push_application(self, obj):
self._global.append((self._stackop(), obj))
self._cache.clear()
def pop_application(self):
assert self._global, 'no objects on application stack'
popped = self._global.pop()[1]
self._cache.clear()
return popped
|
kou/arrow
|
cpp/src/arrow/util/bpacking_simd_codegen.py
|
Python
|
apache-2.0
| 6,630 | 0.001056 |
#!/usr/bin/env python
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# Usage:
# python bpacking_simd_codegen.py 128 > bpacking_simd128_generated.h
# python bpacking_simd_codegen.py 256 > bpacking_simd256_generated.h
# python bpacking_simd_codegen.py 512 > bpacking_simd512_generated.h
from functools import partial
import sys
from textwrap import dedent, indent
class UnpackGenerator:
def __init__(self, simd_width):
self.simd_width = simd_width
if simd_width % 32 != 0:
raise("SIMD bit width should be a multiple of 32")
self.simd_byte_width = simd_width // 8
def print_unpack_bit0_func(self):
print(
"inline static const uint32_t* unpack0_32(const u
|
int32_t* in, uint32_t* out) {")
print(" memset(out, 0x0, 32 * sizeof(*out));")
print(" out += 32;")
print("")
print(" return in;")
print("}")
def print_unpack_bit32_func(self):
print(
"inline static const uint32_t* unpack32_32(const uint32_t* in, uint32_t* out) {")
print(" memcpy(out, in, 32 * sizeof(*out));")
print(" in += 32;")
print(" out += 32;")
print("")
print(" return in;")
|
print("}")
def print_unpack_bit_func(self, bit):
def p(code):
print(indent(code, prefix=' '))
shift = 0
shifts = []
in_index = 0
inls = []
mask = (1 << bit) - 1
bracket = "{"
print(f"inline static const uint32_t* unpack{bit}_32(const uint32_t* in, uint32_t* out) {{")
p(dedent(f"""\
uint32_t mask = 0x{mask:0x};
simd_batch masks(mask);
simd_batch words, shifts;
simd_batch results;
"""))
def safe_load(index):
return f"SafeLoad<uint32_t>(in + {index})"
for i in range(32):
if shift + bit == 32:
shifts.append(shift)
inls.append(safe_load(in_index))
in_index += 1
shift = 0
elif shift + bit > 32: # cross the boundary
inls.append(
f"{safe_load(in_index)} >> {shift} | {safe_load(in_index + 1)} << {32 - shift}")
in_index += 1
shift = bit - (32 - shift)
shifts.append(0) # zero shift
else:
shifts.append(shift)
inls.append(safe_load(in_index))
shift += bit
bytes_per_batch = self.simd_byte_width
words_per_batch = bytes_per_batch // 4
one_word_template = dedent("""\
words = simd_batch{{ {words} }};
shifts = simd_batch{{ {shifts} }};
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += {words_per_batch};
""")
for start in range(0, 32, words_per_batch):
stop = start + words_per_batch;
p(f"""// extract {bit}-bit bundles {start} to {stop - 1}""")
p(one_word_template.format(
words=", ".join(inls[start:stop]),
shifts=", ".join(map(str, shifts[start:stop])),
words_per_batch=words_per_batch))
p(dedent(f"""\
in += {bit};
return in;"""))
print("}")
def print_copyright():
print(dedent("""\
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
"""))
def print_note():
print("// Automatically generated file; DO NOT EDIT.")
print()
def main(simd_width):
print_copyright()
print_note()
struct_name = f"UnpackBits{simd_width}"
# NOTE: templating the UnpackBits struct on the dispatch level avoids
# potential name collisions if there are several UnpackBits generations
# with the same SIMD width on a given architecture.
print(dedent(f"""\
#pragma once
#include <cstdint>
#include <cstring>
#include <xsimd/xsimd.hpp>
#include "arrow/util/dispatch.h"
#include "arrow/util/ubsan.h"
namespace arrow {{
namespace internal {{
namespace {{
using ::arrow::util::SafeLoad;
template <DispatchLevel level>
struct {struct_name} {{
using simd_batch = xsimd::make_sized_batch_t<uint32_t, {simd_width//32}>;
"""))
gen = UnpackGenerator(simd_width)
gen.print_unpack_bit0_func()
print()
for i in range(1, 32):
gen.print_unpack_bit_func(i)
print()
gen.print_unpack_bit32_func()
print()
print(dedent(f"""\
}}; // struct {struct_name}
}} // namespace
}} // namespace internal
}} // namespace arrow
"""))
if __name__ == '__main__':
usage = f"""Usage: {__file__} <SIMD bit-width>"""
if len(sys.argv) != 2:
raise ValueError(usage)
try:
simd_width = int(sys.argv[1])
except ValueError:
raise ValueError(usage)
main(simd_width)
|
yosi-dediashvili/SubiT
|
src/SubChoosers/FirstInCertainSubStagesChooser.py
|
Python
|
gpl-3.0
| 2,895 | 0.005181 |
from SubChoosers.ISubStagesChooser import ISubStagesChooser
from SubRankers.ByFullNameSubStagesRanker import ByFullNameSubStagesRanker
from Utils import WriteDebug
class FirstInCertainSubStagesChooser(ISubStagesChooser):
""" Implementation of ISubStagesChooser. This chooser return results after
ranking them in the ByFullNameSubStagesRanker,
In practice, the chooser uses the ByFullNameSubStagesRanker ranker, and
return the first SubStages after the rank, regardless of the value in
first_is_certain.
"""
@classmethod
def chooseMovieSubStageFromMoviesSubStages(cls, movie_sub_stages, query):
""" Choose the first MovieSubStage after ranking the Results using the
most accurate ranker avaliable (ByFullNameSubStagesRanker).
The function will return MovieSubStage even if first_is_certain is
False for the first MovieSubStage that the ranker returned. Will
return None if movie_sub_stages is empty.
"""
movie_sub_stage = None
if movie_sub_stages:
WriteDebug('Got results in movie_sub_stages, sending them to the ranker')
(movie_sub_stages, first_is_ceratin) = ByFullNameSubStagesRanker\
.rankMovieSubStages(movie_sub_stages, query)
WriteDebug('Ranker returned %s for first_is_certain, but we dont care' % first_is_ceratin)
movie_sub_stage = movie_sub_stages[0]
WriteDebug('MovieSubStage: %s' % movie_sub_stage.info())
else:
WriteDebug('There is not results in movie_sub_stages, returning None')
return movie_sub_stage
@classmethod
def chooseVersionSubStageFromVersionSubStages\
(cls, version_sub_stages, movie_sub_stages, query):
""" Choose the first VersionSubStage after ranking the Results using
the most accurate ranker avaliable (ByFullNameSubStagesRanker).
The function will return VersionSubStage even if first_is_certain
is False for the first VersionSubStage that the ranker returned.
Will return None if version_sub_stages is empty.
"""
version_sub_stage = None
if version_sub_stages:
|
WriteDebug('Got Versions in version_sub_stages, sending them to the ranker')
(version_sub_stages, first_is_ceratin) = ByFullNameSubStagesRanker\
.rankVersionSubStages(version_sub_stages, query)
WriteDebug('Ranker returned %s for first_is_certain, but we dont care' % first_is_ceratin)
version_sub_stage = version_sub_stages[0]
WriteDe
|
bug('VersionSubStage: %s' % version_sub_stage.info())
else:
WriteDebug('There is not results in version_sub_stages, returning None')
return version_sub_stage
|
lilydjwg/you-get
|
src/you_get/extractors/acfun.py
|
Python
|
mit
| 3,211 | 0.005308 |
#!/usr/bin/env python
__all__ = ['acfun_download']
from ..common import *
from .letv import letvcloud_download_by_vu
from .qq import qq_download_by_vid
from .sina import sina_download_by_vid
from .tudou import tudou_download_by_iid
from .youku import youku_download_by_vid
import json, re
def get_srt_json(id):
url = 'http://danmu.aixifan.com/V2/%s' % id
return get_html(url)
def acfun_download_by_vid(vid, title, output_dir='.', merge=True, info_only=False, **kwargs):
info = json.loads(get_html('http://www.acfun.tv/video/getVideo.aspx?id=' + vid))
sourceType = info['sourceType']
if 'sourceId' in info: sourceId = info['sourceId']
# danmakuId = info['danmakuId']
if sourceType == 'sina':
sina_download_by_vid(sourceId, title, output_dir=output_dir, merge=merge, info_only=info_only)
elif sourceType == 'youku':
youku_download_by_vid(sourceId, title=title, output_dir=output_dir, merge=merge, info_only=info_only, **kwargs)
elif sourceType == 'tudou':
tudou_download_by_iid(sourceId, title, output_dir=output_dir, merge=merge, info_only=info_only)
elif sourceType == 'qq':
qq_download_by_vid(sourceId, title, output_dir=output_dir, merge=merge, info_only=info_only)
elif sourceType == 'letv':
letvcloud_download_by_vu(sourceId, '2d8c027396', title, output_dir=output_dir, merge=merge, info_only=info_only)
elif sourceType == 'zhuzhan':
a = 'http://api.aixifan.com/plays/%s/realSource' % vid
s = json.loads(get_content(a, headers={'deviceType': '1'}))
|
urls = s['data']['files'][-1]['url']
size = urls_size(urls)
print_info(site_info, title, 'mp4', size)
if not info_only:
download_urls(urls, title, 'mp4', size,
output_dir=output_dir, merge=merge)
else:
raise NotImplementedError(sourceType)
if not info_only and not dry_run:
if not kwargs['caption']:
print('Skipping danmaku.')
return
try:
tit
|
le = get_filename(title)
print('Downloading %s ...\n' % (title + '.cmt.json'))
cmt = get_srt_json(vid)
with open(os.path.join(output_dir, title + '.cmt.json'), 'w', encoding='utf-8') as x:
x.write(cmt)
except:
pass
def acfun_download(url, output_dir='.', merge=True, info_only=False, **kwargs):
assert re.match(r'http://[^\.]+.acfun.[^\.]+/\D/\D\D(\d+)', url)
html = get_html(url)
title = r1(r'<h1 id="txt-title-view">([^<>]+)<', html)
title = unescape_html(title)
title = escape_file_path(title)
assert title
videos = re.findall("data-vid=\"(\d+)\".*href=\"[^\"]+\".*title=\"([^\"]+)\"", html)
for video in videos:
p_vid = video[0]
p_title = title + " - " + video[1] if video[1] != '删除标签' else title
acfun_download_by_vid(p_vid, p_title,
output_dir=output_dir,
merge=merge,
info_only=info_only,
**kwargs)
site_info = "AcFun.tv"
download = acfun_download
download_playlist = playlist_not_supported('acfun')
|
therealpyro/slave
|
slave/test/test_lakeshore.py
|
Python
|
gpl-3.0
| 488 | 0 |
# -*- coding: utf-8 -*-
#
# Slave, (c) 2
|
014, see AUTHORS. Licensed under the GNU GPL.
from __future__ import (absolute_import, division,
print_function, unicode_literals)
from future.builtins import *
from slave.lakeshore import LS340, LS370
from slave.transport import SimulatedTransport
def te
|
st_ls340():
# Test if instantiation fails
LS340(SimulatedTransport())
def test_ls370():
# Test if instantiation fails
LS370(SimulatedTransport())
|
kaarolch/ansible
|
lib/ansible/modules/system/osx_defaults.py
|
Python
|
gpl-3.0
| 14,430 | 0.003119 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2014, GeekChimp - Franck Nijhof <franck@geekchimp.com>
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['stableinterface'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: osx_defaults
author: Franck Nijhof (@frenck)
short_description: osx_defaults allows users to read, write, and delete Mac OS X user defaults from Ansible
description:
- osx_defaults allows users to read, write, and delete Mac OS X user defaults from Ansible scripts.
Mac OS X applications and other programs use the defaults system to record user preferences and other
information that must be maintained when the applications aren't running (such as default font for new
documents, or the position of an Info panel).
version_added: "2.0"
options:
domain:
description:
- The domain is a domain name of the form com.companyname.appname.
required: false
default: NSGlobalDomain
host:
description:
- The host on which the preference should apply. The special value "currentHost" corresponds to the
"-currentHost" switch of the defaults commandline tool.
required: false
default: null
version_added: "2.1"
key:
description:
- The key of the user preference
required: true
type:
description:
- The type of value to write.
required: false
default: string
choices: [ "array", "bool", "boolean", "date", "float", "int", "integer", "string" ]
array_add:
description:
- Add new elements to the array for a key which has an array as its value.
required: false
default: false
choices: [ "true", "false" ]
value:
description:
- The value to write. Only required when state = present.
required: false
default: null
state:
description:
- The state of the user defaults
required: false
default: present
choices: [ "present", "absent" ]
notes:
- Apple Mac caches defaults. You may need to logout and login to apply the changes.
'''
EXAMPLES = '''
- osx_defaults:
domain: com.apple.Safari
key: IncludeInternalDebugMenu
type: bool
value: true
state: present
- osx_defaults:
domain: NSGlobalDomain
key: AppleMeasurementUnits
type: string
value: Centimeters
state: present
- osx_defaults:
domain: com.apple.screensaver
host: currentHost
key: showClock
type: int
value: 1
- osx_defaults:
key: AppleMeasurementUnits
type: string
value: Centimeters
- osx_defaults:
key: AppleLanguages
type: array
value:
- en
- nl
- osx_defaults:
domain: com.geekchimp.macable
key: ExampleKeyToRemove
state: absent
'''
import datetime
from ansible.module_utils.basic import *
from ansible.module_utils.pycompat24 import get_exception
# exceptions --------------------------------------------------------------- {{{
class OSXDefaultsException(Exception):
pass
# /exceptions -------------------------------------------------------------- }}}
# class MacDefaults -------------------------------------------------------- {{{
class OSXDefaults(object):
""" Class to manage Mac OS user defaults """
# init ---------------------------------------------------------------- {{{
""" Initialize this module. Finds 'defaults' executable and preps the parameters """
def __init__(self, **kwargs):
# Initial var for storing current defaults value
self.current_value = None
# Just set all given parameters
for key, val in kwargs.items():
setattr(self, key, val)
# Try to find the defaults executable
self.executable = self.module.get_bin_path(
'defaults',
required=False,
opt_dirs=self.path.split(':'),
)
if not self.executable:
raise OSXDefaultsException("Unable to locate defaults executable.")
# When state is present, we require a parameter
if self.state == "present" and self.value is None:
raise OSXDefaultsException("Missing value parameter")
# Ensure the value is the correct type
self.value = self._convert_type(self.type, self.value)
# /init --------------------------------------------------------------- }}}
# tools --------------------------------------------------------------- {{{
""" Converts value to given type """
def _convert_type(self, type, value):
if type == "string":
return str(value)
elif type in ["bool", "boolean"]:
if isinstance(value, basestring):
value = value.lower()
if value in [True, 1, "true", "1", "yes"]:
return True
elif value in [False, 0, "false", "0", "no"]:
return False
raise OSXDefaultsException("Invalid boolean value: {0}".format(repr(value)))
elif type == "date":
try:
return datetime.datetime.strptime(value.split("+")[0].strip(), "%Y-%m-%d %H:%M:%S")
except ValueError:
raise OSXDefaultsException(
"Invalid date value: {0}. Required format yyy-mm-dd hh:mm:ss.".format(repr(value))
)
elif type in ["int", "integer"]:
if not str(value).isdigit():
raise OSXDefaultsException("Invalid integer value: {0}".format(repr(value)))
return int(value)
elif type == "float":
try:
|
value = float(value)
except ValueError:
raise OSXDefaultsException("Invalid float value: {0}".format(repr(value)))
return value
elif type == "array":
if not isinstance(value, list):
raise OSXDefaultsException("Invalid value. Expected value to be an array")
return value
raise OSXDefaultsException('Type is not supported: {0}'.format(type))
""" Re
|
turns a normalized list of commandline arguments based on the "host" attribute """
def _host_args(self):
if self.host is None:
return []
elif self.host == 'currentHost':
return ['-currentHost']
else:
return ['-host', self.host]
""" Returns a list containing the "defaults" executable and any common base arguments """
def _base_command(self):
return [self.executable] + self._host_args()
""" Converts array output from defaults to an list """
@staticmethod
def _convert_defaults_str_to_list(value):
# Split output of defaults. Every line contains a value
value = value.splitlines()
# Remove first and last item, those are not actual values
value.pop(0)
value.pop(-1)
# Remove extra spaces and comma (,) at the end of values
value = [re.sub(',$', '', x.strip(' ')) for x in value]
return value
# /tools -------------------------------------------------------------- }}}
# commands ------------------------------------------------------------ {{{
""" Reads value of this domain & key from defaults """
def read(self):
# First try to find out the type
rc, out, err = self.module.run_command(self._base_command() + ["read-type", self.domain, self.key])
# If RC is 1, the key does not exists
if rc == 1:
return None
# If the RC is not 0, then terrible happened! Ooooh nooo!
if rc != 0:
raise OSXDefaultsExcepti
|
Johnetordoff/osf.io
|
api_tests/conftest.py
|
Python
|
apache-2.0
| 378 | 0.002646 |
from __future__ import print_function
import pytest
from website.app import init_app
from tests.json_api_test_app import JSONAPITestApp
@pytest.fixture()
def app():
return JSONAPITestApp()
# NOTE: autouse so that ADDONS_REQUESTED gets set on website.settings
@pytest.fixture(autouse=True, scope='session')
def ap
|
p_init()
|
:
init_app(routes=False, set_backends=False)
|
TelematicaUSM/EduRT
|
src/wsclass.py
|
Python
|
agpl-3.0
| 8,134 | 0 |
# -*- coding: UTF-8 -*-
# COPYRIGHT (c) 2016 Cristóbal Ganter
#
# GNU AFFERO GENERAL PUBLIC LICENSE
# Version 3, 19 November 2007
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from functools import partialmethod
from weakref import finalize
from tornado.gen import coroutine
from src import messages as msg
from src.db import message_broker as mb
from src.pub_sub import MalformedMessageError, \
UnrecognizedOwnerError
_path = 'src.swclass'
class WSClass(object):
"""Attaches its methods to a controller.MSGHandler.
.. todo::
* Explain this class better XD.
"""
_path = '.'.join((_path, 'WSClass'))
def __init__(self, handler):
_path = msg.join_path(self._path, '__init__')
self.handler = handler
self.pub_subs = {
|
'w': self.handler.ws_pub_sub,
'd': mb,
'l': self.handler.local_pub_sub,
}
for attr_name in dir(self):
attribute = getattr(self, attr_name)
if hasattr(attribute, 'msg_types'):
for _type, channels in a
|
ttribute.msg_types:
msg.code_debug(
_path,
'Adding action: %r ...' % attribute
)
self.register_action_in(
msg_type=_type, action=attribute,
channels=channels)
finalize(
self, msg.code_debug, self._path,
'Deleting WSClass {0} from {0.handler} '
'...'.format(self)
)
@property
def channels(self):
return self.pub_subs.keys()
def redirect_to(self, channel, message, content=False):
"""Redirect ``message`` through ``channel``.
If ``content`` is ``True``, then the the object
corresponding to the ``'content'`` key of
``message`` is sent.
:param str channel:
The channel through which ``message`` will be
sent.
:param dict message:
The message to be sent.
:param bool content:
If ``True``, just the object corresponding to
the ``'content'`` key of ``message`` will be
sent.
If ``False``, the whole message will be sent.
:raises MalformedMessageError:
If ``content`` is ``True``, but ``message``
doesn't have the ``'content'`` key.
:raises BadChannelArgumentError:
If ``channel`` is not one of ``self.pub_subs``
keys.
:raises NotDictError:
If ``message`` is not a dictionary.
:raises NoMessageTypeError:
If the message or it's content doesn't have the
``'type'`` key.
:raises NoActionForMsgTypeError:
If ``send_function`` of the ``PubSub`` object
wasn't specified during object creation and
there's no registered action for this message
type.
"""
try:
m = message['content'] if content else message
self.pub_subs[channel].send_message(m)
except KeyError as ke:
if 'content' not in message:
mme = MalformedMessageError(
"If content=True, then message must "
"have the 'content' key."
)
raise mme from ke
elif channel not in self.pub_subs:
raise \
BadChannelArgumentError(self.channels) \
from ke
else:
raise
redirect_content_to = partialmethod(redirect_to,
content=True)
def register_action_in(self, msg_type, action,
channels):
"""Register ``action`` in a set of channels.
:param str msg_type:
The message type to which ``action`` will be
subscribed.
:param callable action:
The action to be registered in ``channels``.
:param set channels:
Set of strings, which identify all the channels
to which ``action`` will be registered.
:raises BadChannelArgumentError:
If any channel is not one of ``self.pub_subs``
keys.
"""
try:
for channel in channels:
ps = self.pub_subs[channel]
ps.register(msg_type, action, self)
except KeyError as ke:
if not all(c in self.pub_subs
for c in channels):
raise \
BadChannelArgumentError(self.channels) \
from ke
else:
raise
def unregister(self):
for ps in self.pub_subs.values():
try:
ps.remove_owner(self)
except UnrecognizedOwnerError:
pass
@coroutine
def end(self):
self.unregister()
class subscribe(object):
"""Append the ``msg_types`` attribute to a method.
Each parameter should have one of the following forms:
``type``, ``(type, channel)`` or
``(type, {channel, ...})``. Where ``type`` is a string
containing the message_type to which you want the method
to be subscribed and ``channel`` is one of this strings:
``'w'``, ``'d'``, ``'l'``. The channel strings mean:
Websocket, Database and Local.
If there are only 2 string parameters and the second is
one character long then this parameters are interpreted
as ``subscribe(type, channel)``.
This class should be used as a decorator.
:raises TypeError:
If any element of ``msg_types`` is not a tuple or a
string.
:raises ValueError:
If any tuple in ``msg_types`` has a length different
than 2.
"""
_path = '.'.join((_path, 'subscribe'))
def __init__(self, *msg_types,
channels={'w', 'd', 'l'}):
if len(msg_types) == 2 and \
isinstance(msg_types[0], str) and \
isinstance(msg_types[1], str) and \
len(msg_types[1]) == 1:
msg_types = ((msg_types[0], msg_types[1]),)
for t in msg_types:
if not isinstance(t, (tuple, str)):
raise TypeError(
'msg_types has an element that is not '
'a tuple or a string.'
)
if isinstance(t, tuple) and len(t) != 2:
raise ValueError(
'msg_types has a tuple that has a '
'length different than 2.'
)
self.msg_types = [(t, channels)
for t in msg_types
if isinstance(t, str)]
self.msg_types.extend(
(t[0], {t[1]})
if isinstance(t[1], str)
else t
for t in msg_types
if isinstance(t, tuple)
)
def __call__(self, method):
_path = '.'.join((self._path, '__call__'))
msg.code_debug(
_path,
'Subscribing method {!r} to {!r} message types '
'...'.format(method, self.msg_types)
)
method.msg_types = self.msg_types
return method
class BadChannelArgumentError(ValueError):
def __init__(channels, *args):
super().__init__(
'The channel argument must be one of the '
'following strings: {}.'.format(channels),
*args
)
|
CompassionCH/compassion-switzerland
|
partner_communication_switzerland/__init__.py
|
Python
|
agpl-3.0
| 468 | 0 |
##############################################################################
#
# Copyright (C) 2016 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
#
|
@author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from . import models
from . import wizards
from . import controllers
f
|
rom . import forms
|
retoo/pystructure
|
s101g/examples/simple/simple.py
|
Python
|
lgpl-2.1
| 195 | 0.025641 |
class Foo(o
|
bject):
def set(self, value):
self.field = value
def get(self):
return self.field
a = Foo()
a.set("hello world")
z = a.get()
pri
|
nt z
z
a
|
atodorov/anaconda
|
pyanaconda/ui/gui/xkl_wrapper.py
|
Python
|
gpl-2.0
| 15,372 | 0.003578 |
#
# Copyright (C) 2012-2014 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
"""
This module include functions and classes for dealing with multiple layouts in
Anaconda. It wraps the libxklavier functionality to protect Anaconda from
dealing with its "nice" API that looks like a Lisp-influenced "good old C" and
also systemd-localed functionality.
It provides a XklWrapper class with several methods that can be used for listing
and various modifications of keyboard layouts settings.
"""
import gi
gi.require_version("GdkX11", "3.0")
gi.require_version("Xkl", "1.0")
from gi.repository import GdkX11, Xkl
import threading
import gettext
from collections import namedtuple
from pyanaconda.core import util
from pyanaconda.core.configuration.anaconda import conf
from pyanaconda.core.constants import DEFAULT_KEYBOARD
from pyanaconda.keyboard import join_layout_variant, parse_layout_variant, KeyboardConfigError, InvalidLayoutVariantSpec
from pyanaconda.core.async_utils import async_action_wait
from pyanaconda.anaconda_loggers import get_module_logger
log = get_module_logger(__name__)
Xkb_ = lambda x: gettext.translation("xkeyboard-config", fallback=True).gettext(x)
iso_ = lambda x: gettext.translation("iso_639", fallback=True).gettext(x)
# namedtuple for information about a keyboard layout (its language and description)
LayoutInfo = namedtuple("LayoutInfo", ["lang", "desc"])
class XklWrapperError(KeyboardConfigError):
"""Exception class for reporting libxklavier-related problems"""
pass
class XklWrapper(object):
"""
Class wrapping the libxklavier functionality
Use this class as a singleton class because it provides read-only data
and initialization (that takes quite a lot of time) reads always the
same data. It doesn't have sense to make multiple instances
"""
_instance = None
_instance_lock = threading.Lock()
@staticmethod
def get_instance():
with XklWrapper._instance_lock:
if not XklWrapper._instance:
XklWrapper._instance = XklWrapper()
return XklWrapper._instance
def __init__(self):
#initialize Xkl-related stuff
display = GdkX11.x11_get_default_xdisplay()
self._engine = Xkl.Engine.get_instance(display)
self._rec = Xkl.ConfigRec()
if not self._rec.get_from_server(self._engine):
raise XklWrapperError("Failed to get configuration from server")
#X is probably initialized to the 'us' layout without any variant and
#since we want to add layouts with variants we need the layouts and
#variants lists to have the same length. Add "" padding to variants.
#See docstring of the add_layout method for details.
diff = len(self._rec.layouts) - len(self._rec.variants)
if diff > 0 and conf.system.can_activate_layouts:
self._rec.set_variants(self._rec.variants + (diff * [""]))
if not self._rec.activate(self._engine):
# failed to activate layouts given e.g. by a kickstart (may be
# invalid)
lay_var_str = ",".join(map(join_layout_variant,
self._rec.layouts,
self._rec.variants))
log.error("Failed to activate layouts: '%s', "
"falling back to default %s", lay_var_str, DEFAULT_KEYBOARD)
self._rec.set_layouts([DEFAULT_KEYBOARD])
self._rec.set_variants([""])
if not self._rec.activate(self._engine):
# failed to activate even the default layout, something is
# really wrong
raise XklWrapperError("Failed to initialize layouts")
#needed also for Gkbd.KeyboardDrawingDialog
self.configreg = Xkl.ConfigRegistry.get_instance(self._engine)
self.configreg.load(False)
self._layout_infos = dict()
self._layout_infos_lock = threading.RLock()
self._switch_opt_infos = dict()
self._switch_opt_infos_lock = threading.RLock()
#this might take quite a long time
self.configreg.forea
|
ch_language(self._get_language_variants, None)
self.configreg.foreach_country(self._get_country_variants, None)
#'grp' means that we want layout (group) switching options
self.configreg.foreach_option('grp', self._get_switch_option, None)
def _get_lang_variant(self, c_reg, item, subi
|
tem, lang):
if subitem:
name = item.get_name() + " (" + subitem.get_name() + ")"
description = subitem.get_description()
else:
name = item.get_name()
description = item.get_description()
#if this layout has already been added for some other language,
#do not add it again (would result in duplicates in our lists)
if name not in self._layout_infos:
with self._layout_infos_lock:
self._layout_infos[name] = LayoutInfo(lang, description)
def _get_country_variant(self, c_reg, item, subitem, country):
if subitem:
name = item.get_name() + " (" + subitem.get_name() + ")"
description = subitem.get_description()
else:
name = item.get_name()
description = item.get_description()
# if the layout was not added with any language, add it with a country
if name not in self._layout_infos:
with self._layout_infos_lock:
self._layout_infos[name] = LayoutInfo(country, description)
def _get_language_variants(self, c_reg, item, user_data=None):
lang_name, lang_desc = item.get_name(), item.get_description()
c_reg.foreach_language_variant(lang_name, self._get_lang_variant, lang_desc)
def _get_country_variants(self, c_reg, item, user_data=None):
country_name, country_desc = item.get_name(), item.get_description()
c_reg.foreach_country_variant(country_name, self._get_country_variant,
country_desc)
def _get_switch_option(self, c_reg, item, user_data=None):
"""Helper function storing layout switching options in foreach cycle"""
desc = item.get_description()
name = item.get_name()
with self._switch_opt_infos_lock:
self._switch_opt_infos[name] = desc
def get_current_layout(self):
"""
Get current activated X layout and variant
:return: current activated X layout and variant (e.g. "cz (qwerty)")
"""
# ported from the widgets/src/LayoutIndicator.c code
self._engine.start_listen(Xkl.EngineListenModes.TRACK_KEYBOARD_STATE)
state = self._engine.get_current_state()
cur_group = state.group
num_groups = self._engine.get_num_groups()
# BUG?: if the last layout in the list is activated and removed,
# state.group may be equal to n_groups
if cur_group >= num_groups:
cur_group = num_groups - 1
layout = self._rec.layouts[cur_group] # pylint: disable=unsubscriptable-object
try:
variant = self._rec.variants[cur_group] # pylint: disable=unsubscriptable-object
except IndexError:
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.