repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
---|---|---|---|---|---|---|---|---|
WoLpH/celery
|
celery/app/task/__init__.py
|
Python
|
bsd-3-clause
| 27,392 | 0.000548 |
# -*- coding: utf-8 -*-"
import sys
import threading
from celery.datastructures import ExceptionInfo
from celery.exceptions import MaxRetriesExceededError, RetryTaskError
from celery.execute.trace import TaskTrace
from celery.registry import tasks, _unpickle_task
from celery.result import EagerResult
from celery.utils import mattrgetter, gen_unique_id, fun_takes_kwargs
extract_exec_options = mattrgetter("queue", "routing_key",
"exchange", "immediate",
"mandatory", "priority",
"serializer", "delivery_mode",
"compression")
class Context(threading.local):
# Default context
logfile = None
loglevel = None
id = None
args = None
kwargs = None
retries = 0
is_eager = False
delivery_info = None
taskset = None
chord = None
def update(self, d, **kwargs):
self.__dict__.update(d, **kwargs)
def clear(self):
self.__dict__.clear()
def get(self, key, default=None):
try:
return getattr(self, key)
except AttributeError:
return default
class TaskType(type):
"""Meta class for tasks.
Automatically registers the task in the task registry, except
if the `abstract` attribute is set.
If no `name` attribute is provided, then no name is automatically
set to the name of the module it was defined in, and the class name.
"""
def __new__(cls, name, bases, attrs):
new = super(TaskType, cls).__new__
task_module = attrs.get("__module__") or "__main__"
# Abstract class: abstract attribute should not be inherited.
if attrs.pop("abstract", None) or not attrs.get("autoregister", True):
return new(cls, name, bases, attrs)
# Automatically generate missing/empty name.
autoname = False
if not attrs.get("name"):
try:
module_name = sys.modules[task_module].__name__
except KeyError: # pragma: no cover
# Fix for manage.py shell_plus (Issue #366).
module_name = task_module
attrs["name"] = '.'.join([module_name, name])
autoname = True
# Because of the way import happens (recursively)
# we may or may not be the first time the task tries to register
# with the framework. There should only be one class for each task
# name, so we always return the registered version.
task_name = attrs["name"]
if task_name not in tasks:
task_cls = new(cls, name, bases, attrs)
if autoname and task_module == "__main__" and task_cls.app.main:
task_name = task_cls.name = '.'.join([task_cls.app.main, name])
tasks.register(task_cls)
task = tasks[task_name].__class__
return task
def __repr__(cls):
return "<class Task of %s>" % (cls.app, )
class BaseTask(object):
"""Task base class.
When called tasks apply the :meth:`run` method. This method must
be defined by all tasks (that is unless the :meth:`__call__` method
is overridden).
"""
__metaclass__ = TaskType
MaxRetriesExceededError = MaxRetriesExceededError
|
#: The application instance associated with this task class.
app = None
#: Name of the task.
name = None
#: If :const:`True` the task is an abstract base class.
abstract = True
#: If disabled the worker will not forward magic keyword arguments.
|
#: Deprecated and scheduled for removal in v3.0.
accept_magic_kwargs = False
#: Request context (set when task is applied).
request = Context()
#: Destination queue. The queue needs to exist
#: in :setting:`CELERY_QUEUES`. The `routing_key`, `exchange` and
#: `exchange_type` attributes will be ignored if this is set.
queue = None
#: Overrides the apps default `routing_key` for this task.
routing_key = None
#: Overrides the apps default `exchange` for this task.
exchange = None
#: Overrides the apps default exchange type for this task.
exchange_type = None
#: Override the apps default delivery mode for this task. Default is
#: `"persistent"`, but you can change this to `"transient"`, which means
#: messages will be lost if the broker is restarted. Consult your broker
#: manual for any additional delivery modes.
delivery_mode = None
#: Mandatory message routing.
mandatory = False
#: Request immediate delivery.
immediate = False
#: Default message priority. A number between 0 to 9, where 0 is the
#: highest. Note that RabbitMQ does not support priorities.
priority = None
#: Maximum number of retries before giving up. If set to :const:`None`,
#: it will **never** stop retrying.
max_retries = 3
#: Default time in seconds before a retry of the task should be
#: executed. 3 minutes by default.
default_retry_delay = 3 * 60
#: Rate limit for this task type. Examples: :const:`None` (no rate
#: limit), `"100/s"` (hundred tasks a second), `"100/m"` (hundred tasks
#: a minute),`"100/h"` (hundred tasks an hour)
rate_limit = None
#: If enabled the worker will not store task state and return values
#: for this task. Defaults to the :setting:`CELERY_IGNORE_RESULT`
#: setting.
ignore_result = False
#: When enabled errors will be stored even if the task is otherwise
#: configured to ignore results.
store_errors_even_if_ignored = False
#: If enabled an email will be sent to :setting:`ADMINS` whenever a task
#: of this type fails.
send_error_emails = False
disable_error_emails = False # FIXME
#: List of exception types to send error emails for.
error_whitelist = ()
#: The name of a serializer that are registered with
#: :mod:`kombu.serialization.registry`. Default is `"pickle"`.
serializer = "pickle"
#: Hard time limit.
#: Defaults to the :setting:`CELERY_TASK_TIME_LIMIT` setting.
time_limit = None
#: Soft time limit.
#: Defaults to the :setting:`CELERY_TASK_SOFT_TIME_LIMIT` setting.
soft_time_limit = None
#: The result store backend used for this task.
backend = None
#: If disabled this task won't be registered automatically.
autoregister = True
#: If enabled the task will report its status as "started" when the task
#: is executed by a worker. Disabled by default as the normal behaviour
#: is to not report that level of granularity. Tasks are either pending,
#: finished, or waiting to be retried.
#:
#: Having a "started" status can be useful for when there are long
#: running tasks and there is a need to report which task is currently
#: running.
#:
#: The application default can be overridden using the
#: :setting:`CELERY_TRACK_STARTED` setting.
track_started = False
#: When enabled messages for this task will be acknowledged **after**
#: the task has been executed, and not *just before* which is the
#: default behavior.
#:
#: Please note that this means the task may be executed twice if the
#: worker crashes mid execution (which may be acceptable for some
#: applications).
#:
#: The application default can be overridden with the
#: :setting:`CELERY_ACKS_LATE` setting.
acks_late = False
#: Default task expiry time.
expires = None
#: The type of task *(no longer used)*.
type = "regular"
def __call__(self, *args, **kwargs):
return self.run(*args, **kwargs)
def __reduce__(self):
return (_unpickle_task, (self.name, ), None)
def run(self, *args, **kwargs):
"""The body of the task executed by workers."""
raise NotImplementedError("Tasks must define the run method.")
@classmethod
def get_logger(self, loglevel=None, logfile=None, propagate=False,
**kwargs):
"""Get task-aware logger object."""
return self.app.log.setup_task_logger(
log
|
cellml/libcellml
|
tests/resources/generator/hodgkin_huxley_squid_axon_model_1952/model.external.py
|
Python
|
apache-2.0
| 5,138 | 0.00506 |
# The content of this file was generated using the Python profile of libCellML 0.2.0.
from enum import Enum
from math import *
__version__ = "0.3.0"
LIBCELLML_VERSION = "0.2.0"
STATE_COUNT = 3
VARIABLE_COUNT = 19
class VariableType(Enum):
VARIABLE_OF_INTEGRATION = 1
STATE = 2
CONSTANT = 3
COMPUTED_CONSTANT = 4
ALGEBRAIC = 5
EXTERNAL = 6
VOI_INFO = {"name": "time", "units": "millisecond", "component": "environment", "type": VariableType.VARIABLE_OF_INTEGRATION}
STATE_INFO = [
{"name": "m", "units": "dimensionless", "component": "sodium_channel_m_gate", "type": VariableType.STATE},
{"name": "h", "units": "dimensionless", "component": "sodium_channel_h_gate", "type": VariableType.STATE},
{"name": "n", "units": "dimensionless", "component": "potassium_channel_n_gate", "type": VariableType.STATE}
]
VARIABLE_INFO = [
{"name": "V", "units": "millivolt", "component": "membrane", "type": VariableType.EXTERNAL},
{"name": "g_L", "units": "milliS_per_cm2", "component": "leakage_current", "type": VariableType.CONSTANT},
{"name": "Cm", "units": "microF_per_cm2", "component": "membrane", "type": VariableType.CONSTANT},
{"name": "E_R", "units": "millivolt", "component": "membrane", "type": VariableType.CONSTANT},
{"name": "g_K", "units": "milliS_per_cm2", "component": "potassium_channel", "type": VariableType.CONSTANT},
{"name": "g_Na", "units": "milliS_per_cm2", "component": "sodium_channel", "type": VariableType.CONSTANT},
{"name": "i_Stim", "units": "microA_per_cm2", "component": "membrane", "type": VariableType.ALGEBRAIC},
{"name": "E_L", "units": "millivolt", "component": "leakage_current", "type": VariableType.COMPUTED_CONSTANT},
{"name": "i_L", "units": "microA_per_cm2", "component": "leakage_current", "type": VariableType.ALGEBRAIC},
{"name": "E_Na", "units": "millivolt", "component": "sodium_channel", "type": VariableType.COMPUTED_CONSTANT},
{"name": "i_Na", "units": "microA_per_cm2", "component": "sodium_channel", "type": VariableType.EXTERNAL},
{"name": "alpha_m", "units": "per_millisecond", "component": "sodium_channel_m_gate", "type": VariableType.ALGEBRAIC},
{"name": "beta_m", "units": "per_millisecond", "component": "sodium_channel_m_gate", "type": VariableType.ALGEBRAIC},
{"name": "alpha_h", "units": "per_millisecond", "component": "sodium_channel_h_gate", "type": VariableType.ALGEBRAIC},
{"name": "beta_h", "units": "per_millisecond", "component": "sodium_channel_h_gate", "type": VariableType.ALGEBRAIC},
{"name": "E_K", "units": "millivolt", "component": "potassium_channel", "type": VariableType.COMPUTED_CONSTANT},
{"name": "i_K", "units": "microA_per_cm2", "component": "potassium_channel", "type": VariableType.ALGEBRAIC},
{"name": "alpha_n", "units": "per_millisecond", "component": "potassium_channel_n_gate", "type": VariableType.EXTERNAL},
{"name": "beta_n", "units": "per_millisecond", "component": "potassium_channel_n_gate", "type": VariableType.ALGEBRAIC}
]
def leq_func(x, y):
return 1.0 if x <= y else 0.0
def geq_func(x, y):
return 1.0 if x >= y else 0.0
def and_func(x, y):
return 1.0 if bool(x) & bool(y) else 0.0
def create_states_array():
return [nan]*STATE_COUNT
def create_variables_array():
return [nan]*VARIABLE_COUNT
def initialise_variables(voi, states, variables, external_variable):
variables[1] = 0.3
variables[2] = 1.0
variables[3] = 0.0
variables[4] = 36.0
variables[5] = 120.0
states[0] = 0.05
states[1] = 0.6
states[2] = 0.325
variables[0] = external_variable(voi, states, variables, 0)
variables[17] = external_variable(voi, states, variables, 17)
variables[10] = external_variable(voi, states, variables, 10)
def compute_computed_constants(variables):
variables[7] = variables[3]-10.613
variables[9] = variables[3]-115.0
variables[15] = variables[3]+12.0
def compute_rates(voi, states, rates, variables, external_variable):
variables[0] = external_variable(voi, states, variables, 0)
variables[11] = 0.1*(variables[0]+25.0)/(exp((variables[0
|
]+25.0)/10.0)-1.0)
variables[12] = 4.0*exp(variables[0]/18.0)
rates[0] = variables[11]*(1.0-states[0])-variables[12]*states[0]
variables[13] = 0.07*exp(variables[0]/20.0)
variables[14] = 1.0/(exp((variables[0]+30.0)/10.0)+1.0)
rates[1] = variables[13]*(1.0-states[1])-variables[14]*states[1]
variables[17] = external_variable(voi, states, variables, 17)
variables[18] = 0.125*exp(va
|
riables[0]/80.0)
rates[2] = variables[17]*(1.0-states[2])-variables[18]*states[2]
def compute_variables(voi, states, rates, variables, external_variable):
variables[0] = external_variable(voi, states, variables, 0)
variables[6] = -20.0 if and_func(geq_func(voi, 10.0), leq_func(voi, 10.5)) else 0.0
variables[8] = variables[1]*(variables[0]-variables[7])
variables[17] = external_variable(voi, states, variables, 17)
variables[10] = external_variable(voi, states, variables, 10)
variables[16] = variables[4]*pow(states[2], 4.0)*(variables[0]-variables[15])
|
riveridea/gnuradio
|
gr-digital/python/digital/psk_constellations.py
|
Python
|
gpl-3.0
| 6,937 | 0.006631 |
#!/usr/bin/env python
#
# Copyright 2013 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
import numpy
from constellation_map_generator import *
'''
Note on the naming scheme. Each constellation is named using a prefix
for the type of constellation, the order of the constellation, and a
distinguishing feature, which comes in three modes:
- No extra feature: the basic Gray-coded constellation map; others
will be derived from this type.
- A single number: an indexed number to uniquely identify different
constellation maps.
- 0xN_x0_x1..._xM: A permutation of the base constellation, explained
below.
For rectangular constellations (BPSK, QPSK, QAM), we can define a
hyperspace and look for all symmetries. This is also known as the
automorphism group of the hypercube, aka the hyperoctahedral
group. What this means is that we can easily define all possible
rotations in terms of the first base mapping by creating the mapping:
f(x) = k XOR pi(x)
The x is the bit string for the symbol we are altering. Then k is a
bit string of n bits where n is the number of bits per symbol in the
constellation (e.g., 2 for QPSK or 6 for QAM64). The pi is a
permutation function specified as pi_0, pi_1..., pi_n-1. This permutes
the bits from the base constellation symbol to a new code, which is
then xor'd by k.
The value of k is from 0 to 2^n-1 and pi is a list of all bit
positions.
The total number of Gray coded modulations is (2^n)*(n!).
We create aliases for all possible naming schemes for the
constellations. So if a hyperoctahedral group is defined, we also set
this function equal to a function name using a unique ID number, and
we always select one rotation as our basic rotation that the other
rotations are based off of.
'''
# BPSK Constellation Mappings
def psk_2_0x0():
'''
0 | 1
'''
const_points = [-1, 1]
symbols = [0, 1]
return (const_points, symbols)
psk_2 = psk_2_0x0 # Basic BPSK rotation
psk_2_0 = psk_2 # First ID for BPSK rotations
def psk_2_0x1():
'''
1 | 0
'''
const_points = [-1, 1]
symbols = [1, 0]
return (const_points, symbols)
psk_2_1 = psk_2_0x1
############################################################
# BPSK Soft bit LUT generators
############################################################
def sd_psk_2_0x0(x, Es=1):
'''
0 | 1
'''
x_re = x.real
dist = Es*numpy.sqrt(2)
return [dist*x_re,]
sd_psk_2 = sd_psk_2_0x0 # Basic BPSK rotation
sd_psk_2_0 = sd_psk_2 # First ID for BPSK rotations
def sd_psk_2_0x1(x, Es=1):
'''
1 | 0
'''
x_re = [x.real,]
dist = Es*numpy.sqrt(2)
return -dist*x_re
sd_psk_2_1 = sd_psk_2_0x1
############################################################
# QPSK Constellation Mappings
############################################################
def psk_4_0x0_0_1():
'''
10 | 11
-------
00 | 01
'''
const_points = [-1-1j, 1-1j,
-1+1j, 1+1j]
symbols = [0, 1, 2, 3]
return (const_points, symbols)
psk_4 = psk_4_0x0_0_1
psk_4_0 = psk_4
def psk_4_0x1_0_1():
'''
11 | 10
-------
01 | 00
'''
k = 0x1
pi = [0, 1]
return constellation_map_generator(psk_4, k, pi)
psk_4_1 = psk_4_0x1_0_1
def psk_4_0x2_0_1():
'''
00 | 01
-------
10 | 11
'''
k = 0x2
pi = [0, 1]
return constellation_map_generator(psk_4, k, pi)
psk_4_2 = psk_4_0x2_0_1
def psk_4_0x3_0_1():
'''
01 | 00
-------
11 | 10
'''
k = 0x3
pi = [0, 1]
return constellation_map_generator(psk_4, k, pi)
psk_4_3 = psk_4_0x3_0_1
def psk_4_0x0_1_0():
'''
01 | 11
-------
00 | 10
'''
k = 0x0
pi = [1, 0]
return constellation_map_generator(psk_4, k, pi)
psk_4_4 = psk_4_0x0_1_0
def psk_4_0x1_1_0():
'''
00 | 10
-------
01 | 11
'''
k = 0x1
pi = [1, 0]
return constellation_map_generator(psk_4, k, pi)
psk_4_5 = psk_4_0x1_1_0
def psk_4_0x2_1_0():
'''
11 | 01
-------
10 | 00
'''
k = 0x2
pi = [1, 0]
return constellation_map_generator(psk_4, k, pi)
psk_4_6 = psk_4_0x2_1_0
def psk_4_0x3_1_0():
'''
10 | 00
-------
11 | 01
'''
k = 0x3
pi = [1, 0]
return constellation_map_generator(psk_4, k, pi)
psk_4_7 = psk_4_0x3_1_0
############################################################
# QPSK Constellation Softbit LUT generators
############################################################
def sd_psk_4_0x0_0_1(x, Es=1):
'''
10 | 11
-------
00 | 01
'''
x_re = x.real
x_im = x.imag
dist = Es*numpy.sqrt(2)
return [dist*x_im, dist*x_re]
sd_psk_4 = sd_psk_4_0x0_0_1
sd_psk_4_0 = sd_psk_4
def sd_psk_4_0x1_0_1(x, Es=1):
'''
11 | 10
-------
01 | 00
'''
x_re = x.real
x_im = x.imag
dist = Es*numpy.sqrt(2)
return [dist*x_im, -dist*x_re]
sd_psk_4_1 = sd_psk_4_0x1_0_1
def sd_psk_4_0x2_0_1(x, Es=1):
'''
00 | 01
-------
10 | 11
'''
x_re = x.real
x_im = x.imag
dist = Es*numpy.sqrt(2)
return [-dist*x_im, dist*x_re]
sd_psk_4_2 = sd_psk_4_0x2_0_1
def sd_psk_4_0x3_0_1(x, Es=1):
'''
01 | 00
-------
11 | 10
'''
x_re = x.real
x_im = x.imag
dist = Es*numpy.sqrt(2)
return [-dist*x_im, -dist*x_re]
sd_psk_4_3 = sd_psk_4_0
|
x3_0_1
def sd_psk_4_0x0_1_0(x, Es=1):
'''
01 | 11
-------
00 | 10
'''
x_re = x.real
x_im = x.imag
dist = Es*numpy.sqrt(2)
return [dist*x_re, dist*x_im]
sd_psk_4_4 = sd_psk_4_0x0_1_0
def sd_psk_4_0x1_1_0(x, Es=1):
'''
00 | 10
-------
01 |
|
11
'''
x_re = x.real
x_im = x.imag
dist = Es*numpy.sqrt(2)
return [dist*x_re, -dist*x_im]
sd_psk_4_5 = sd_psk_4_0x1_1_0
def sd_psk_4_0x2_1_0(x, Es=1):
'''
11 | 01
-------
10 | 00
'''
x_re = x.real
x_im = x.imag
dist = Es*numpy.sqrt(2)
return [-dist*x_re, dist*x_im]
sd_psk_4_6 = sd_psk_4_0x2_1_0
def sd_psk_4_0x3_1_0(x, Es=1):
'''
10 | 00
-------
11 | 01
'''
x_re = x.real
x_im = x.imag
dist = Es*numpy.sqrt(2)
return [-dist*x_re, -dist*x_im]
sd_psk_4_7 = sd_psk_4_0x3_1_0
|
h2oai/h2o-3
|
h2o-py/tests/testdir_algos/infogram/pyunit_PUBDEV_8075_safe_infogram_personal_loan_x_att.py
|
Python
|
apache-2.0
| 1,365 | 0.01685 |
from __future__ import print_function
import os
import sys
sys.path.insert(1, os.path.join("..","..",".."))
import h2o
from h2o.estimators.infogram import H2OInfogram
from h2o.estimators.glm import H2OGeneralizedLinearEstimator
from tests import pyunit_utils
def test_infogram_personal_loan():
"""
Test to make sure predictor can be specified using infogram model.
"""
fr = h2o.import_file(p
|
ath=pyunit_utils.locate("smalldata/admissibleml_test/Bank_Personal_Loan_Modelling.csv"))
target = "Personal Loan"
fr[target] = fr[target].asfactor()
x = ["Experience","Income","Family","CCAvg","Education","Mortgage",
"Securities Account","CD Account","Online","CreditCard"]
infogram_model = H2OInfogram(seed = 12345, protected_columns=["Age","ZIP Code"])
infogram_model.train(x=x, y=target, training_frame=fr)
glm_model1 = H2OGen
|
eralizedLinearEstimator()
glm_model1.train(x=infogram_model._extract_x_from_model(), y=target, training_frame=fr)
coef1 = glm_model1.coef()
glm_model2 = H2OGeneralizedLinearEstimator()
glm_model2.train(x=infogram_model, y=target, training_frame=fr)
coef2 = glm_model2.coef()
pyunit_utils.assertCoefDictEqual(coef1, coef2, tol=1e-6)
if __name__ == "__main__":
pyunit_utils.standalone_test(test_infogram_personal_loan)
else:
test_infogram_personal_loan()
|
meren/anvio
|
anvio/data/misc/MODELLER/scripts/pir_to_fasta.py
|
Python
|
gpl-3.0
| 298 | 0.030201 |
"""
Positional arguments:
1. INPUT - file path to FASTA file
2. OUTPUT - file path of output PIR file
"""
import sys
PIR = sys.argv[1]
FASTA = sys.argv[2]
from modeller import *
e = environ()
a =
|
alignment(e, file
|
= PIR, alignment_format = 'PIR')
a.write(file = FASTA, alignment_format = 'FASTA')
|
plotly/python-api
|
packages/python/plotly/plotly/validators/histogram/unselected/marker/__init__.py
|
Python
|
mit
| 325 | 0 |
import sys
if sys.version_info < (3, 7):
from ._opacity import OpacityValidator
from ._color import ColorValidator
else:
from _plotly_utils.importers import relative_import
__all__
|
, __getattr__, __dir__ = relative_import(
__name__, [], ["._opacit
|
y.OpacityValidator", "._color.ColorValidator"]
)
|
centic9/subversion-ppa
|
subversion/tests/cmdline/svntest/actions.py
|
Python
|
apache-2.0
| 86,069 | 0.011468 |
#
# actions.py: routines that actually run the svn client.
#
# Subversion is a tool for revision control.
# See http://subversion.tigris.org for more information.
#
# ====================================================================
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
######################################################################
import os, shutil, re, sys, errno
import difflib, pprint, logging
import xml.parsers.expat
from xml.dom.minidom import parseString
if sys.version_info[0] >= 3:
# Python >=3.0
from io import StringIO
else:
# Python <3.0
from cStringIO import StringIO
import svntest
from svntest import main, verify, tree, wc, sandbox
from svntest import Failure
logger = logging.getLogger()
# (abbreviation)
Item = svntest.wc.StateItem
def _log_tree_state(msg, actual, subtree=""):
if subtree:
subtree += os.sep
o = StringIO()
o.write(msg + '\n')
tree.dump_tree_script(actual, subtree, stream=o)
logger.warn(o.getvalue())
o.close()
def no_sleep_for_timestamps():
os.environ['SVN_I_LOVE_CORRUPTED_WORKING_COPIES_SO_DISABLE_SLEEP_FOR_TIMESTAMPS'] = 'yes'
def do_sleep_for_timestamps():
os.environ['SVN_I_LOVE_CORRUPTED_WORKING_COPIES_SO_DISABLE_SLEEP_FOR_TIMESTAMPS'] = 'no'
def no_relocate_validation():
os.environ['SVN_I_LOVE_CORRUPTED_WORKING_COPIES_SO_DISABLE_RELOCATE_VALIDATION'] = 'yes'
def do_relocate_validation():
os.environ['SVN_I_LOVE_CORRUPTED_WORKING_COPIES_SO_DISABLE_RELOCATE_VALIDATION'] = 'no'
def setup_pristine_greek_repository():
"""Create the pristine repository and 'svn import' the greek tree"""
# these directories don't exist out of the box, so we may have to create them
if not os.path.exists(main.general_wc_dir):
os.makedirs(main.general_wc_dir)
if not os.path.exists(main.general_repo_dir):
os.makedirs(main.general_repo_dir) # this also creates all the intermediate dirs
# If there's no pristine repos, create one.
if not os.path.exists(main.pristine_greek_repos_dir):
main.create_repos(main.pristine_greek_repos_dir)
# if this is dav, gives us access rights to import the greek tree.
if main.is_ra_type_dav():
authz_file = os.path.join(main.work_dir, "authz")
main.file_write(authz_file, "[/]\n* = rw\n")
# dump the greek tree to disk.
main.greek_state.write_to_disk(main.greek_dump_dir)
# import the greek tree, using l:foo/p:bar
### todo: svn should not be prompting for auth info when using
### repositories with no auth/auth requirements
_, output, _ = main.run_svn(None, 'import', '-m',
'Log message for revision 1.',
main.greek_dump_dir,
main.pristine_greek_repos_url)
# verify the printed output of 'svn import'.
lastline = output.pop().strip()
match = re.search("(Committed|Imported) revision [0-9]+.", lastline)
if not match:
logger.error("import did not succeed, while creating greek repos.")
logger.error("The final line from 'svn import' was:")
logger.error(lastline)
sys.exit(1)
output_tree = wc.State.from_commit(output)
expected_output_tree = main.greek_state.copy(main.greek_dump_dir)
expected_output_tree.tweak(verb='Adding',
contents=None)
try:
expected_output_tree.compare_and_display('output', output_tree)
except tree.SVNTreeUnequal:
verify.display_trees("ERROR: output of import command is unexpected.",
"OUTPUT TREE",
expected_output_tree.old_tree(),
output_tree.old_tree())
sys.exit(1)
# Finally, disallow any changes to the "pristine" repos.
error_msg = "Don't modify the pristine repository"
create_failing_hook(main.pristine_greek_repos_dir, 'start-commit', error_msg)
create_failing_hook(main.pristine_greek_repos_dir, 'pre-lock', error_msg)
create_failing_hook(main.pristine_greek_repos_dir, 'pre-revprop-change', error_msg)
######################################################################
def guarantee_empty_repository(path):
"""Guarantee that a local svn repository exists at PATH, containing
nothing."""
if path == main.pristine_greek_repos_dir:
logger.error("attempt to overwrite the pristine repos! Aborting.")
sys.exit(1)
# create an empty repository at PATH.
main.safe_rmtree(path)
main.create_repos(path)
# Used by every test, so that they can run independently of one
# another. Every time this routine is called,
|
it recursively copies
# the `pristine repos' to a new location.
# Note: make sure setup_pristine_greek_repository was called once before
# using this function.
def guarantee_greek_repository(path, minor_version):
"""Guarantee that a local svn repository exists at PATH, containing
nothing but the gre
|
ek-tree at revision 1."""
if path == main.pristine_greek_repos_dir:
logger.error("attempt to overwrite the pristine repos! Aborting.")
sys.exit(1)
# copy the pristine repository to PATH.
main.safe_rmtree(path)
if main.copy_repos(main.pristine_greek_repos_dir, path, 1, 1, minor_version):
logger.error("copying repository failed.")
sys.exit(1)
# make the repos world-writeable, for mod_dav_svn's sake.
main.chmod_tree(path, 0666, 0666)
def run_and_verify_atomic_ra_revprop_change(message,
expected_stdout,
expected_stderr,
expected_exit,
url, revision, propname,
old_propval, propval,
want_error):
"""Run atomic-ra-revprop-change helper and check its output and exit code.
Transforms OLD_PROPVAL and PROPVAL into a skel.
For HTTP, the default HTTP library is used."""
KEY_OLD_PROPVAL = "old_value_p"
KEY_NEW_PROPVAL = "value"
def skel_make_atom(word):
return "%d %s" % (len(word), word)
def make_proplist_skel_part(nick, val):
if val is None:
return ""
else:
return "%s %s" % (skel_make_atom(nick), skel_make_atom(val))
skel = "( %s %s )" % (make_proplist_skel_part(KEY_OLD_PROPVAL, old_propval),
make_proplist_skel_part(KEY_NEW_PROPVAL, propval))
exit_code, out, err = main.run_atomic_ra_revprop_change(url, revision,
propname, skel,
want_error)
verify.verify_outputs("Unexpected output", out, err,
expected_stdout, expected_stderr)
verify.verify_exit_code(message, exit_code, expected_exit)
return exit_code, out, err
def run_and_verify_svnlook(message, expected_stdout,
expected_stderr, *varargs):
"""Like run_and_verify_svnlook2, but the expected exit code is
assumed to be 0 if no output is expected on stderr, and 1 otherwise."""
expected_exit = 0
if expected_stderr is not None and expected_stderr != []:
expected_exit = 1
return run_and_verify_svnlook2(message, expected_stdout, expected_stderr,
expected_exit, *varargs)
def run_and_verify_svnlook2(message, expected_stdout, expected_stderr,
|
samuelarm/A-Level_2016-18
|
general/cash register.py
|
Python
|
gpl-3.0
| 45 | 0.088889 |
#cash registe
|
r
#Samuel Armstrong
| |
docusign/docusign-python-client
|
docusign_esign/models/notary_journal_list.py
|
Python
|
mit
| 9,255 | 0.000108 |
# coding: utf-8
"""
DocuSign REST API
The DocuSign REST API provides you with a powerful, convenient, and simple Web services API for interacting with DocuSign. # noqa: E501
OpenAPI spec version: v2.1
Contact: devcenter@docusign.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class NotaryJournalList(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'end_position': 'str',
'next_uri': 'str',
'notary_journals': 'list[NotaryJournal]',
'previous_uri': 'str',
'result_set_size': 'str',
'start_position': 'str',
'total_set_size': 'str'
}
attribute_map = {
'end_position': 'endPosition',
'next_uri': 'nextUri',
'notary_journals': 'notaryJournals',
'previous_uri': 'previousUri',
'result_set_size': 'resultSetSize',
'start_position': 'startPosition',
'total_set_size': 'totalSetSize'
}
def __init__(self, end_position=None, next_uri=None, notary_journals=None, previous_uri=None, result_set_size=None, start_position=None, total_set_size=None): # noqa: E501
"""NotaryJournalList - a model defined in Swagger""" # noqa: E501
self._end_position = None
self._next_uri = None
self._notary_journals = None
self._previous_uri = None
self._result_set_size = None
self._start_position = None
self._total_set_size = None
self.discriminator = None
if end_position is not None:
self.end_position = end_position
if next_uri is not None:
self.next_uri = next_uri
if notary_journals is not None:
|
self.notary_journals = notary_journals
if previous_uri is not None:
self.previous_uri = previous_uri
if result_set_size is not None:
self.result_set_size = result_set_size
if start_position is not
|
None:
self.start_position = start_position
if total_set_size is not None:
self.total_set_size = total_set_size
@property
def end_position(self):
"""Gets the end_position of this NotaryJournalList. # noqa: E501
The last position in the result set. # noqa: E501
:return: The end_position of this NotaryJournalList. # noqa: E501
:rtype: str
"""
return self._end_position
@end_position.setter
def end_position(self, end_position):
"""Sets the end_position of this NotaryJournalList.
The last position in the result set. # noqa: E501
:param end_position: The end_position of this NotaryJournalList. # noqa: E501
:type: str
"""
self._end_position = end_position
@property
def next_uri(self):
"""Gets the next_uri of this NotaryJournalList. # noqa: E501
The URI to the next chunk of records based on the search request. If the endPosition is the entire results of the search, this is null. # noqa: E501
:return: The next_uri of this NotaryJournalList. # noqa: E501
:rtype: str
"""
return self._next_uri
@next_uri.setter
def next_uri(self, next_uri):
"""Sets the next_uri of this NotaryJournalList.
The URI to the next chunk of records based on the search request. If the endPosition is the entire results of the search, this is null. # noqa: E501
:param next_uri: The next_uri of this NotaryJournalList. # noqa: E501
:type: str
"""
self._next_uri = next_uri
@property
def notary_journals(self):
"""Gets the notary_journals of this NotaryJournalList. # noqa: E501
# noqa: E501
:return: The notary_journals of this NotaryJournalList. # noqa: E501
:rtype: list[NotaryJournal]
"""
return self._notary_journals
@notary_journals.setter
def notary_journals(self, notary_journals):
"""Sets the notary_journals of this NotaryJournalList.
# noqa: E501
:param notary_journals: The notary_journals of this NotaryJournalList. # noqa: E501
:type: list[NotaryJournal]
"""
self._notary_journals = notary_journals
@property
def previous_uri(self):
"""Gets the previous_uri of this NotaryJournalList. # noqa: E501
The postal code for the billing address. # noqa: E501
:return: The previous_uri of this NotaryJournalList. # noqa: E501
:rtype: str
"""
return self._previous_uri
@previous_uri.setter
def previous_uri(self, previous_uri):
"""Sets the previous_uri of this NotaryJournalList.
The postal code for the billing address. # noqa: E501
:param previous_uri: The previous_uri of this NotaryJournalList. # noqa: E501
:type: str
"""
self._previous_uri = previous_uri
@property
def result_set_size(self):
"""Gets the result_set_size of this NotaryJournalList. # noqa: E501
The number of results returned in this response. # noqa: E501
:return: The result_set_size of this NotaryJournalList. # noqa: E501
:rtype: str
"""
return self._result_set_size
@result_set_size.setter
def result_set_size(self, result_set_size):
"""Sets the result_set_size of this NotaryJournalList.
The number of results returned in this response. # noqa: E501
:param result_set_size: The result_set_size of this NotaryJournalList. # noqa: E501
:type: str
"""
self._result_set_size = result_set_size
@property
def start_position(self):
"""Gets the start_position of this NotaryJournalList. # noqa: E501
Starting position of the current result set. # noqa: E501
:return: The start_position of this NotaryJournalList. # noqa: E501
:rtype: str
"""
return self._start_position
@start_position.setter
def start_position(self, start_position):
"""Sets the start_position of this NotaryJournalList.
Starting position of the current result set. # noqa: E501
:param start_position: The start_position of this NotaryJournalList. # noqa: E501
:type: str
"""
self._start_position = start_position
@property
def total_set_size(self):
"""Gets the total_set_size of this NotaryJournalList. # noqa: E501
The total number of items available in the result set. This will always be greater than or equal to the value of the property returning the results in the in the response. # noqa: E501
:return: The total_set_size of this NotaryJournalList. # noqa: E501
:rtype: str
"""
return self._total_set_size
@total_set_size.setter
def total_set_size(self, total_set_size):
"""Sets the total_set_size of this NotaryJournalList.
The total number of items available in the result set. This will always be greater than or equal to the value of the property returning the results in the in the response. # noqa: E501
:param total_set_size: The total_set_size of this NotaryJournalList. # noqa: E501
:type: str
"""
self._total_set_size = total_set_size
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "t
|
googleads/google-ads-python
|
google/ads/googleads/v10/enums/types/seasonality_event_status.py
|
Python
|
apache-2.0
| 1,255 | 0.000797 |
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing per
|
missions and
# limitations under the License.
#
import proto # type: ignore
__protobuf__ = proto.module(
package="google.ads.googleads.v10.enums",
marshal="google.ads.googleads.v10",
manifest={"SeasonalityEventStatusEnum",},
)
class SeasonalityEventStatusEnum(proto.Message):
r"""Message describing seasonality event statuses. The two types
of seasonality events are BiddingSeason
|
alityAdjustments and
BiddingDataExclusions.
"""
class SeasonalityEventStatus(proto.Enum):
r"""The possible statuses of a Seasonality Event."""
UNSPECIFIED = 0
UNKNOWN = 1
ENABLED = 2
REMOVED = 4
__all__ = tuple(sorted(__protobuf__.manifest))
|
zstackio/zstack-woodpecker
|
integrationtest/vm/vpc_ha/suite_teardown.py
|
Python
|
apache-2.0
| 1,332 | 0.005255 |
'''
Integration Test Teardown case
@author: Youyk
'''
import zstacklib.utils.linux as linux
import zstacklib.utils.http as http
import zstackwoodpecker.setup_actions as setup_actions
import zstackwoodpecker.test_util as test_util
|
import zstackwoodpecker.clean_util as clean_util
import zstackwoodpecker.test_lib as test_lib
import zstacktestagent.plugins.host as host_plugin
import zstacktestagent.testagent as testagent
def test():
clean_util.cleanup_all_vms_violently()
clean_util.cleanup_none_vm_volumes_violently()
clean_util.umount_all_primary_storages_violently()
clean_util.cleanup_backup_storage()
#linux.remove_vlan_eth("eth0", 10)
#linux.remove_vlan_eth("eth0", 11)
cmd = host_plugin.DeleteVlanD
|
eviceCmd()
cmd.vlan_ethname = 'eth0.10'
hosts = test_lib.lib_get_all_hosts_from_plan()
if type(hosts) != type([]):
hosts = [hosts]
for host in hosts:
http.json_dump_post(testagent.build_http_path(host.managementIp_, host_plugin.DELETE_VLAN_DEVICE_PATH), cmd)
cmd.vlan_ethname = 'eth0.11'
for host in hosts:
http.json_dump_post(testagent.build_http_path(host.managementIp_, host_plugin.DELETE_VLAN_DEVICE_PATH), cmd)
test_lib.setup_plan.stop_node()
test_lib.lib_cleanup_host_ip_dict()
test_util.test_pass('VPC Teardown Success')
|
matthieu-meaux/DLLM
|
examples/broken_wing_validation/valid_dpR_dpiAoA.py
|
Python
|
gpl-2.0
| 2,233 | 0.012987 |
# -*-mode: python; py-indent-offset: 4; tab-width: 8; coding: iso-8859-1 -*-
# DLLM (non-linear Differentiated Lifting Line Model, open source software)
#
# Copyright (C) 2013-2015 Airbus Group SAS
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# https://github.com/matthieu-meaux/DLLM.git
#
# @author : Matthieu MEAUX
#
from MDOTools.ValidGrad.FDValidGrad import FDValidGrad
from DLLM.DLLMGeom.wing_broken import Wing_Broken
from DLLM.DLLMKernel.DLLMSolver import DLLMSolver
|
from MDOTools.OC.operating_condition import OperatingCondition
import numpy
OC=OperatingCondition('cond1')
OC.set_Mach(0.8)
OC.set_AoA(3.5)
OC.set_altitude(10000.)
OC.set_T0_deg(15.)
OC.set_P0(101325.)
OC.set_humidity(0.)
OC.compute_atmosphere()
wing_param=Wing_Broken('broken_wing',n_sect
|
=20)
wing_param.import_BC_from_file('input_parameters.par')
wing_param.build_linear_airfoil(OC, AoA0=0.0, set_as_ref=True)
wing_param.build_airfoils_from_ref()
wing_param.update()
print wing_param
DLLM = DLLMSolver('Simple',wing_param,OC)
DLLM.run_direct()
iAoA0=DLLM.get_iAoA()
print 'iAoA0 shape',iAoA0.shape
print 'iAoA0=',iAoA0
def f(x):
func=DLLM.comp_R(x)
return func
def df(x):
func_grad=DLLM.comp_dpR_dpiAoA(x)
return func_grad
val_grad=FDValidGrad(2,f,df,fd_step=1.e-8)
ok,df_fd,df=val_grad.compare(iAoA0,treshold=1.e-6,return_all=True)
print '\n****************************************************'
if ok:
print 'dpR_dpiAoA is valid.'
else:
print '!!!! dpR_dpiAoA is NOT valid !!!!'
print '****************************************************'
|
masayukig/tempest
|
tempest/api/compute/test_extensions.py
|
Python
|
apache-2.0
| 2,098 | 0 |
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
from tempest.api.compute import base
from tempest.common import utils
from tempest import config
from tempest.lib import decorators
CONF = config.CONF
LOG = logging.getLogger(__name__)
class ExtensionsTest(base.BaseV2ComputeTest):
@decorators.idempotent_id('3bb27738-b759-4e0d-a5fa-37d7a6df07d1')
def test_list_extensions(self):
# List of all extensions
if not CONF.compute_feature_enabled.api_extensions:
raise self.skipException('There are not any extensions configured')
extensions = self.extensions_client.list_extensions()['extensions']
ext = CONF.compute_feature_enabled.api_extensions[0]
# Log extensions list
extension_list = [x['alias'] for x in extensions]
LOG.debug("Nova extensions: %s", ','.join(extension_list))
if ext == 'all':
self.assertIn('Hosts', map(lambda x: x['na
|
me'], extensions))
elif ext:
self.ass
|
ertIn(ext, extension_list)
else:
raise self.skipException('There are not any extensions configured')
@decorators.idempotent_id('05762f39-bdfa-4cdb-9b46-b78f8e78e2fd')
@utils.requires_ext(extension='os-consoles', service='compute')
def test_get_extension(self):
# get the specified extensions
extension = self.extensions_client.show_extension('os-consoles')
self.assertEqual('os-consoles', extension['extension']['alias'])
|
fangohr/oommf-python
|
joommf/odtreader.py
|
Python
|
bsd-2-clause
| 1,380 | 0 |
"""
odtreader.py
Contains class ODTFile for reading OOMMF ODT data into a Pandas dataframe
Author: Ryan Pepper (2016)
University of Southampton
"""
import pandas as pd
import tempfile
import re
class ODTFile(object):
def __init__(self, filename):
f = open(filename)
# Can't use 'w+b' for compatibility with Py2
temporary_file
|
= tempfile.NamedTemporaryFile(mode='w')
metadata = []
for line in f:
if line[0] == '#':
metadata.append(line)
else:
new_line = re.sub(r'\s+', ',', line.lstrip().rstrip()) + '\n'
temporar
|
y_file.write(new_line)
temporary_file.flush()
self.dataframe = pd.read_csv(temporary_file.name, header=None)
header = []
for column in metadata[3].split('Oxs_')[1:]:
column = column.replace('{', '')
column = column.replace('}', '')
column = column.rstrip().replace(' ', '_')
column = column.replace('::', '_')
column = column.replace(':', '_')
column = column.replace('RungeKuttaEvolve_evolve_', '')
column = column.replace('TimeDriver_', '')
column = column.replace('Simulation_', '')
header.append(column)
self.dataframe.columns = header
temporary_file.close()
self.df = self.dataframe
|
ianfhunter/LoLss
|
secondscreen/synch/migrations/0002_auto__chg_field_screen_page_id.py
|
Python
|
gpl-2.0
| 3,281 | 0.007315 |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'Screen.page_id'
db.alter_column(u'synch_screen', 'page_id', self.gf('django.db.models.fields.CharField')(max_length=5))
def backwards(self, orm):
# Changing field 'Screen.page_id'
db.alter_column(u'synch_screen', 'page_id', self.gf('django.db.models.fields.IntegerField')(max_length=3, null=True))
models = {
u'synch.screen': {
'Meta': {'object_name': 'Screen'},
'baron_timer': ('django.db.models.fields.IntegerField', [], {'default': '0', 'max_length': '3', 'null': 'True'}),
'bottom_blue_timer': ('django.db.models.fields.IntegerField', [], {'default': '0', 'max_length': '3', 'null': 'True'}),
'bottom_inhib_blane': ('django.db.models.fields.IntegerField', [], {'default': '0', 'max_length': '3', 'null': 'True'}),
'bottom_inhib_mlane': ('django.db.models.fields.IntegerField', [], {'default': '0', 'max_length': '3', 'null': 'True'}),
'bottom_inhib_tlane': ('django.db.models.fields.IntegerField', [], {'default': '0', 'max_length': '3', 'null': 'True'}),
'bottom_red_timer': ('django.db.models.fields.IntegerField', [], {'default': '0', 'max_length': '3', 'null': 'True'}),
'dragon_timer': ('django.db.models.fields.IntegerField', [], {'default': '0', 'max_length': '3', 'null': 'True'}),
'drawing': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '30000'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'page_id': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '5'}),
'top_blue_timer': ('django.db.models.fields.IntegerField', [], {'default': '0', 'max_length': '3', 'null': 'True'}),
'top_inhib_blane': ('django.db.models.fields.IntegerField', [], {'default': '0', 'max_length': '3', 'null': 'True'}),
'top_inhib_mlane': ('django.db.models.fields.IntegerField', [], {'default': '0', 'max_length': '3', 'null': 'True'}),
'top_inhib_tlane': ('django.db.models.fields.IntegerField', [], {'default': '0', 'max_length': '3', 'null': 'True'}),
'top_red_timer': ('django.db.models.fields.IntegerField', [], {'default': '0', 'max_length': '3', 'null': 'True'})
},
u'synch.ward': {
'Meta': {'object_name': 'Ward'}
|
,
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'position_x': ('django.db.models.fields.IntegerField', [], {'max_length': '3', 'null': 'True'}),
'position_y': ('django.db.models.fields.IntegerField', [], {'max_length': '3', 'null': 'True'}),
|
'screen': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['synch.Screen']", 'null': 'True', 'blank': 'True'}),
'team': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'timer': ('django.db.models.fields.IntegerField', [], {'max_length': '3', 'null': 'True'})
}
}
complete_apps = ['synch']
|
99cloud/keystone_register
|
horizon/test/tests/tables.py
|
Python
|
apache-2.0
| 31,356 | 0.000128 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django import http
from django import shortcuts
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from mox import IsA
from horizon import tables
from horizon.tables import views as table_views
from horizon.test import helpers as test
class FakeObject(object):
def __init__(self, id, name, value, status, optional=None, excluded=None):
self.id = id
self.name = name
self.value = value
self.status = status
self.optional = optional
self.excluded = excluded
self.extra = "extra"
def __repr__(self):
return "<%s: %s>" % (self.__class__.__name__, self.name)
TEST_DATA = (
FakeObject('1', 'object_1', 'value_1', 'up', 'optional_1', 'excluded_1'),
FakeObject('2', 'object_2', '<strong>evil</strong>', 'down', 'optional_2'),
FakeObject('3', 'object_3', 'value_3', 'up'),
)
TEST_DATA_2 = (
FakeObject('1', 'object_1', 'value_1', 'down', 'optional_1', 'excluded_1'),
)
TEST_DATA_3 = (
FakeObject('1', 'object_1', 'value_1', '
|
up', 'optional_1', 'excluded_1'),
)
TEST_DATA_4 = (
FakeObject('1', 'object_1', 2,
|
'up'),
FakeObject('2', 'object_2', 4, 'up'),
)
TEST_DATA_5 = (
FakeObject('1', 'object_1', 'A Value That is longer than 35 characters!',
'down', 'optional_1'),
)
class MyLinkAction(tables.LinkAction):
name = "login"
verbose_name = "Log In"
url = "login"
attrs = {
"class": "ajax-modal",
}
def get_link_url(self, datum=None, *args, **kwargs):
return reverse(self.url)
class MyAction(tables.Action):
name = "delete"
verbose_name = "Delete Me"
verbose_name_plural = "Delete Them"
def allowed(self, request, obj=None):
return getattr(obj, 'status', None) != 'down'
def handle(self, data_table, request, object_ids):
return shortcuts.redirect('http://example.com/?ids=%s'
% ",".join(object_ids))
class MyColumn(tables.Column):
pass
class MyRow(tables.Row):
ajax = True
@classmethod
def get_data(cls, request, obj_id):
return TEST_DATA_2[0]
class MyBatchAction(tables.BatchAction):
name = "batch"
action_present = _("Batch")
action_past = _("Batched")
data_type_singular = _("Item")
data_type_plural = _("Items")
def action(self, request, object_ids):
pass
class MyToggleAction(tables.BatchAction):
name = "toggle"
action_present = (_("Down"), _("Up"))
action_past = (_("Downed"), _("Upped"))
data_type_singular = _("Item")
data_type_plural = _("Items")
def allowed(self, request, obj=None):
if not obj:
return False
self.down = getattr(obj, 'status', None) == 'down'
if self.down:
self.current_present_action = 1
return self.down or getattr(obj, 'status', None) == 'up'
def action(self, request, object_ids):
if self.down:
#up it
self.current_past_action = 1
class MyFilterAction(tables.FilterAction):
def filter(self, table, objs, filter_string):
q = filter_string.lower()
def comp(obj):
if q in obj.name.lower():
return True
return False
return filter(comp, objs)
def get_name(obj):
return "custom %s" % obj.name
def get_link(obj):
return reverse('login')
class MyTable(tables.DataTable):
id = tables.Column('id', hidden=True, sortable=False)
name = tables.Column(get_name, verbose_name="Verbose Name", sortable=True)
value = tables.Column('value',
sortable=True,
link='http://example.com/',
attrs={'class': 'green blue'},
summation="average",
truncate=35,
link_classes=('link-modal',))
status = tables.Column('status', link=get_link)
optional = tables.Column('optional', empty_value='N/A')
excluded = tables.Column('excluded')
class Meta:
name = "my_table"
verbose_name = "My Table"
status_columns = ["status"]
columns = ('id', 'name', 'value', 'optional', 'status')
row_class = MyRow
column_class = MyColumn
table_actions = (MyFilterAction, MyAction, MyBatchAction)
row_actions = (MyAction, MyLinkAction, MyBatchAction, MyToggleAction)
class NoActionsTable(tables.DataTable):
id = tables.Column('id')
class Meta:
name = "no_actions_table"
verbose_name = _("No Actions Table")
table_actions = ()
row_actions = ()
class DataTableTests(test.TestCase):
def test_table_instantiation(self):
""" Tests everything that happens when the table is instantiated. """
self.table = MyTable(self.request, TEST_DATA)
# Properties defined on the table
self.assertEqual(self.table.data, TEST_DATA)
self.assertEqual(self.table.name, "my_table")
# Verify calculated options that weren't specified explicitly
self.assertTrue(self.table._meta.actions_column)
self.assertTrue(self.table._meta.multi_select)
# Test for verbose_name
self.assertEqual(unicode(self.table), u"My Table")
# Column ordering and exclusion.
# This should include auto-columns for multi_select and actions,
# but should not contain the excluded column.
# Additionally, auto-generated columns should use the custom
# column class specified on the table.
self.assertQuerysetEqual(self.table.columns.values(),
['<MyColumn: multi_select>',
'<Column: id>',
'<Column: name>',
'<Column: value>',
'<Column: optional>',
'<Column: status>',
'<MyColumn: actions>'])
# Actions (these also test ordering)
self.assertQuerysetEqual(self.table.base_actions.values(),
['<MyBatchAction: batch>',
'<MyAction: delete>',
'<MyFilterAction: filter>',
'<MyLinkAction: login>',
'<MyToggleAction: toggle>'])
self.assertQuerysetEqual(self.table.get_table_actions(),
['<MyFilterAction: filter>',
'<MyAction: delete>',
'<MyBatchAction: batch>'])
self.assertQuerysetEqual(self.table.get_row_actions(TEST_DATA[0]),
['<MyAction: delete>',
'<MyLinkAction: login>',
'<MyBatchAction: batch>',
'<MyToggleAction: toggle>'])
# Auto-generated columns
multi_select = self.table.columns['multi_select']
self.assertEqual(multi_select.auto, "multi_select")
self.assertEqual(multi_select.get_final_attrs().get('class', ""),
"multi_select_column")
actions = self.table.columns['actions']
self.assertEqual(actions.auto, "actions")
self.assertEqual(actions.get_final_attrs().get('class', ""),
"actions_column")
def test_table_force_no_multiselect
|
zqfan/leetcode
|
algorithms/216. Combination Sum III/solution2.py
|
Python
|
gpl-3.0
| 557 | 0 |
class Solution(object):
def combinationSum3(self, k, n):
"""
|
:type k: int
:type n: int
:rtype: List[List[int]]
"""
def dfs(s, n):
if len(path) >= k:
if n == 0:
result.append(path[:])
return
for i in xrange(s, 10):
if n < i:
return
path.append(i)
dfs(i + 1, n - i)
path.pop()
result, path = [], []
|
dfs(1, n)
return result
|
daite/JAVImageDownloader
|
jav_image_download.py
|
Python
|
mit
| 3,951 | 0.026069 |
#!/usr/bin/env python2
# -*- coding: utf-8 -*
#The MIT License (MIT)
# Copyright (c) 2015 daite
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from scrapy import Selector
import codecs
import requests
import argparse
import os
class ImageDownloader:
def __init__(self, root_dir_name):
'''
:: init function
:: set root directory path when given root dir name
'''
self.root_dir_name = root_dir_name
self.root_dir_path = os.path.join(os.getcwd(), root_dir_name)
if not os.path.exists(self.root_dir_path):
os.mkdir(self.root_dir_path)
def get_image_urls(self, res_text):
'''
:: getting image urls from response_text
'''
self.img_urls = Selector(text=res_text).xpath('//a/@href').re('.*jpg$')
return self.img_urls
def get_description(self, res_text):
'''
:: getting description from response_text
'''
self.desc_contents = Selector(text=res_text).xpath('//blockquote/text()').extract()
return self.desc_contents
def save_stuff(self, sub_dir_name, img_urls, desc_contents, text_file_name='description.txt'):
'''
:: save images and description each subdir
'''
self.sub_dir_path = os.path.join(self.root_dir_path, sub_dir_name)
self.sub_dir_desc_file_name = os.path.join(self.sub_dir_path, text_file_name)
if not os.path.exists(self.sub_dir_path):
os.mkdir(self.sub_dir_path)
os.chdir(self.sub_dir_path)
with codecs.open(self.sub_dir_desc_file_name, 'a', encoding='utf-8') as f:
for content in desc_contents:
f.write(content)
for img_url in img_urls:
cmd = 'wget -nc -t 1 %s &' %img_url
os.system(cmd)
os.chdir(self.root_dir_path)
def multi_save_stuff(self, urlgen, start_num, end_num):
'''
:: multi save function
'''
for movie_num in range(start_num, end_num + 1):
url = urlgen(movie_num)
res_text = requests.get(url).text
img_urls = self.get_image_urls(res_text)
desc_contents = self.get_descrip
|
tion(res_text)
if not img_urls:
print('No images!!!!')
continue
sub_dir_name = url.split('/')[-1].strip('.html')
self.save_stuff(sub_dir_name, img_urls, desc_contents)
if __name__ == '__main__':
gana_urlgen = lambda x : 'http://blog.livedoor.jp/kirekawa39-siro/archives/200GANA-%d.html' %x
siro_urlgen = lambda x : 'http://blog.livedoor.jp/kirekawa39-siro/archives/siro-%d.html' %x
parser = argparse.ArgumentParser()
parser.add_argument("start", type=int, help='start n
|
umber')
parser.add_argument("end", type=int, help='end number')
parser.add_argument('-g', '--gana',
help='download image from gana200',
action="store_true")
parser.add_argument('-s', '--siro',
help='download image from siro',
action="store_true")
args = parser.parse_args()
if args.gana:
i = ImageDownloader('GANA200')
i.multi_save_stuff(gana_urlgen, args.start, args.end)
elif args.siro:
i = ImageDownloader('SIRO')
i.multi_save_stuff(siro_urlgen, args.start, args.end)
else:
parser.print_help()
exit(1)
|
kennethlove/django_bookmarks
|
dj_bookmarks/bookmarks/migrations/0006_bookmark_collections.py
|
Python
|
bsd-3-clause
| 476 | 0 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-09-15 17:18
from __futu
|
re__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('bookmarks', '0005_auto_20170915_1015'),
]
|
operations = [
migrations.AddField(
model_name='bookmark',
name='collections',
field=models.ManyToManyField(to='bookmarks.Collection'),
),
]
|
loomchild/icoin
|
test/integration_test/test_mail.py
|
Python
|
agpl-3.0
| 727 | 0.009629 |
from
|
time import sleep
from icoin import app
from icoin.core.mail import mail, send
class TestMail:
def test_send_sync(self):
with app.app_context(), mail.record_messages() as outbox:
send("test@test.com", "subjectnow",
|
"test", async=False)
assert len(outbox) == 1
assert outbox[0].subject == "subjectnow"
def test_send_async(self):
with app.app_context(), mail.record_messages() as outbox:
send("test@test.com", "subject", "test")
# message is not sent immediately
assert len(outbox) == 0
sleep(0.1)
assert len(outbox) == 1
assert outbox[0].subject == "subject"
|
jumpserver/jumpserver
|
apps/assets/tasks/account_connectivity.py
|
Python
|
gpl-3.0
| 2,991 | 0.000335 |
# ~*~ coding: utf-8 ~*~
from celery import shared_task
from django.utils.translation import ugettext as _, gettext_noop
from common.utils import get_logger
from orgs.utils import org_aware_func
from ..models import Connectivity
from . import const
from .utils import check_asset_can_run_ansible
logger = get_logger(__file__)
__all__ = [
'test_account_connectivity_util', 'test_accounts_connectivity_manual',
'get_test_account_connectivity_tasks', 'test_user_connectivity',
'run_adhoc',
]
def get_test_account_connectivity_tasks(asset):
if asset.is_unixlike():
tasks = const.PING_UNIXLIKE_TASKS
elif asset.is_windows():
tasks = const.PING_WINDOWS_TASKS
else:
msg = _(
"The asset {} system platform {} does not "
"support run Ansible tasks".format(asset.hostname, asset.platform)
)
logger.info(msg)
tasks = []
return tasks
def run_adhoc(task_name, tasks, inventory):
"""
:param task_name
:param tasks
:param inventory
"""
from ops.ansible.runner import AdHocRunner
runner = AdHocRunner(inventory, options=const.TASK_OPTIONS)
result = runner.run(tasks, 'all', task_name)
return result.results_raw, result.results_summary
def test_user_connectivity(task_name, asset, username, password=None, private_key=None):
"""
:param task_name
:param asset
:param username
:param password
:param private_key
"""
from ops.inventory import JMSCustomInventory
tasks = get_test_account_connectivity_tasks(asset)
if not tasks:
logger.debug("No tasks ")
return {}, {}
inventory = JMSCustomInventory(
assets=[asset], username=username, password=password,
private_key=private_key
)
raw, summary = run_adhoc(
task_name=task_name, tasks=tasks, inventory=inventory
)
return raw, summary
@org_aware_func("account")
def test_account_connectivity_util(account, task_name):
"""
:param account: <AuthBook>对象
:param task_name:
:return:
"""
if not check_asset_can_run_ansible(account.asset):
return
account.load_auth()
try:
|
raw, summary = test_user_connectivity(
task_name=task_name, asset=account.asset,
username=account.username, password=account.password,
private_key=account.private_key_file
)
except Exception as e:
logger.warn("Failed run adhoc {}, {}".format(task_name, e))
return
if summary.get('success'):
account.set_connectivity(Connectivity.ok)
else:
account.set_connectivity(Connectivity.failed)
@shared_task(queue="ansible"
|
)
def test_accounts_connectivity_manual(accounts):
"""
:param accounts: <AuthBook>对象
"""
for account in accounts:
task_name = gettext_noop("Test account connectivity: ") + str(account)
test_account_connectivity_util(account, task_name)
print(".\n")
|
Elico-Corp/openerp-7.0
|
account_alternate_invoice/__openerp__.py
|
Python
|
agpl-3.0
| 580 | 0.003454 |
# -*- coding: utf-8 -*-
# © 2014 Elico Corp (https://www.elico-corp.com)
# Licence AGPL-3.0 or later(http://www.gnu.org/licenses/agpl.html)
{
'name': 'Account Alternate Invoice',
'version': '7.0.1.0.0',
'author': 'Elico Corp',
'website': 'https://www.elico-corp.com',
'description': """
Account Altern
|
ate Invoice
""",
'depends': ['base', 'account', ],
'sequence': 10,
'data': [
'account_invoice_view.xml',
'report.xml',
],
'installable': True,
'application':
|
False,
'auto_install': False,
}
|
terrycojones/dark-matter
|
test/test_local_align.py
|
Python
|
mit
| 11,532 | 0 |
import six
from unittest import TestCase
from dark.reads import Read
from dark.local_align import LocalAlignment
class TestLocalAlign(TestCase):
"""
Test the LocalAlignment class.
With match +1, mismatch -1, gap open -1, gap extend -1 and
gap extend decay 0.0.
"""
def testPositiveMismatch(self):
"""
If the mismatch value passed is positive, an exception
must be raised.
"""
seq1 = Read('seq1', 'a')
seq2 = Read('seq2', 'a')
six.assertRaisesRegex(self, ValueError, 'Mismatch must be negative',
LocalAlignment, seq1, seq2, mismatch=3)
def testZeroMismatch(self):
"""
If the mismatch value passed is zero, an exception
must be raised.
"""
seq1 = Read('seq1', 'a')
seq2 = Read('seq2', 'a')
six.assertRaisesRegex(self, ValueError, 'Mismatch must be negative',
LocalAlignment, seq1, seq2, mismatch=0)
def testPositiveGap(self):
"""
If the gap value passed is positive, an exception
must be raised.
"""
seq1 = Read('seq1', 'a')
seq2 = Read('seq2', 'a')
six.assertRaisesRegex(self, ValueError, 'Gap must be negative',
LocalAlignment, seq1, seq2, gap=3)
def testZeroGap(self):
"""
If the gap value passed is zero, an exception
must be raised.
"""
seq1 = Read('seq1', 'a')
seq2 = Read('seq2', 'a')
six.assertRaisesRegex(self, ValueError, 'Gap must be negative',
LocalAlignment, seq1, seq2, gap=0)
def testPositiveGapExtend(self):
"""
If the gap extend value passed is positive, an exception
must be raised.
"""
seq1 = Read('seq1', 'a')
seq2 = Read('seq2', 'a')
six.assertRaisesRegex(self, ValueError,
'Gap extension penalty cannot be positive',
LocalAlignment, seq1, seq2, gapExtend=3)
def testFirstSequenceEmpty(self):
"""
If the first sequence passed is empty, an exception must be raised.
"""
seq1 = Read('seq1', '')
seq2 = Read('seq2', 'agtcagtcagtc')
six.assertRaisesRegex(self, ValueError, 'Empty sequence: seq1',
LocalAlignment, seq1, seq2)
def testSecondSequenceEmpty(self):
"""
If the second sequence passed is empty, an exception must be raised.
"""
seq1 = Read('seq1', 'agtcagtcagtc')
seq2 = Read('seq2', '')
six.assertRaisesRegex(self, ValueError, 'Empty sequence: seq2',
LocalAlignment, seq1, seq2)
def testBothSequencesEmpty(self):
"""
If two empty sequences are passed, an exception must be raised.
"""
seq1 = Read('seq1', '')
seq2 = Read('seq2', '')
six.assertRaisesRegex(self, ValueError, 'Empty sequence: seq1',
LocalAlignment, seq1, seq2)
def testGapAtStartOfSeq1(self):
seq1 = Read('seq1', 'gaatcg')
seq2 = Read('seq2', 'cgaatcg')
align = LocalAlignment(seq1, seq2)
result = align.createAlignment(resultFormat=str)
alignment = ('\nCigar string of aligned region: 6=\n'
'seq1 Match start: 1 Match end: 6\n'
'seq2 Match start: 2 Match end: 7\n'
'seq1 1 GAATCG 6\n'
' ||||||\n'
'seq2 2 GAATCG 7')
self.assertEqual(result, alignment)
def testGapAtStartOfSeq2(self):
seq1 = Read('seq1', 'cgaatcg')
seq2 = Read('seq2', 'gaatcg')
align = LocalAlignment(seq1, seq2)
|
result = align.createAlignment
|
(resultFormat=str)
alignment = ('\nCigar string of aligned region: 6=\n'
'seq1 Match start: 2 Match end: 7\n'
'seq2 Match start: 1 Match end: 6\n'
'seq1 2 GAATCG 7\n'
' ||||||\n'
'seq2 1 GAATCG 6')
self.assertEqual(result, alignment)
def testGapAtEndOfSeq1(self):
seq1 = Read('seq1', 'cgaatc')
seq2 = Read('seq2', 'cgaatcg')
align = LocalAlignment(seq1, seq2)
result = align.createAlignment(resultFormat=str)
alignment = ('\nCigar string of aligned region: 6=\n'
'seq1 Match start: 1 Match end: 6\n'
'seq2 Match start: 1 Match end: 6\n'
'seq1 1 CGAATC 6\n'
' ||||||\n'
'seq2 1 CGAATC 6')
self.assertEqual(result, alignment)
def testGapAtEndOfSeq2(self):
seq1 = Read('seq1', 'cgaatcg')
seq2 = Read('seq2', 'cgaatc')
align = LocalAlignment(seq1, seq2)
result = align.createAlignment(resultFormat=str)
alignment = ('\nCigar string of aligned region: 6=\n'
'seq1 Match start: 1 Match end: 6\n'
'seq2 Match start: 1 Match end: 6\n'
'seq1 1 CGAATC 6\n'
' ||||||\n'
'seq2 1 CGAATC 6')
self.assertEqual(result, alignment)
def testGapAtBothEndsOfSeq1(self):
seq1 = Read('seq1', 'gaatc')
seq2 = Read('seq2', 'cgaatcg')
align = LocalAlignment(seq1, seq2)
result = align.createAlignment(resultFormat=str)
alignment = ('\nCigar string of aligned region: 5=\n'
'seq1 Match start: 1 Match end: 5\n'
'seq2 Match start: 2 Match end: 6\n'
'seq1 1 GAATC 5\n'
' |||||\n'
'seq2 2 GAATC 6')
self.assertEqual(result, alignment)
def testGapAtBothEndsOfSeq2(self):
seq1 = Read('seq1', 'cgaatcg')
seq2 = Read('seq2', 'gaatc')
align = LocalAlignment(seq1, seq2)
result = align.createAlignment(resultFormat=str)
align = LocalAlignment(seq1, seq2)
result = align.createAlignment(resultFormat=str)
alignment = ('\nCigar string of aligned region: 5=\n'
'seq1 Match start: 2 Match end: 6\n'
'seq2 Match start: 1 Match end: 5\n'
'seq1 2 GAATC 6\n'
' |||||\n'
'seq2 1 GAATC 5')
self.assertEqual(result, alignment)
def testAlignmentWithGapInMiddle(self):
seq1 = Read('seq1', 'agtcagtcagtc')
seq2 = Read('seq2', 'cgaatcg')
align = LocalAlignment(seq1, seq2)
result = align.createAlignment(resultFormat=str)
alignment = ('\nCigar string of aligned region: 2=1D1=\n'
'seq1 Match start: 7 Match end: 10\n'
'seq2 Match start: 5 Match end: 7\n'
'seq1 7 TCAG 10\n'
' || |\n'
'seq2 5 TC-G 7')
self.assertEqual(result, alignment)
def testTwoEqualSequences(self):
"""
When two identical sequences are given, the result should
show that the sequences completely match.
"""
seq1 = Read('seq1', 'cgaatcg')
seq2 = Read('seq2', 'cgaatcg')
align = LocalAlignment(seq1, seq2)
result = align.createAlignment(resultFormat=str)
alignment = ('\nCigar string of aligned region: 7=\n'
'seq1 Match start: 1 Match end: 7\n'
'seq2 Match start: 1 Match end: 7\n'
'seq1 1 CGAATCG 7\n'
' |||||||\n'
'seq2 1 CGAATCG 7')
self.assertEqual(result, alignment)
def testTwoCompletelyDifferentSequences(self):
"""
When two completely different sequences are given, the result
should be the two sequences with an empty alignment.
"""
seq1 = Read('seq1', 'aaaaaa')
seq2 = Read('seq2', 'gggggg')
align = LocalAlignment(seq1, seq2)
result = align.createAlignment(resu
|
hitsl/bouser_simargl
|
bouser_simargl/web.py
|
Python
|
isc
| 577 | 0 |
# -*- coding: utf-8 -*-
from twisted.web.resource import Resource
from bouser.helpers.plugin_helpers import Dependency
__author__ = 'viruzzz-kun'
class SimarglResource(Resource):
web = Dep
|
endency('bouser.web')
es = Dependency('bouser.ezekiel.eventsource', optional=True)
rpc = Dependency('bouser.ezekiel.rest', optional=True)
@web.on
def web_on(self, web):
web.root_re
|
source.putChild('ezekiel', self)
@es.on
def es_on(self, es):
self.putChild('es', es)
@rpc.on
def rpc_on(self, rpc):
self.putChild('rpc', rpc)
|
TriOptima/tri.form
|
tests/settings.py
|
Python
|
bsd-3-clause
| 710 | 0 |
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
TEMPLATE_DIRS = [
os.path.join(BASE_DIR, 'tests'),
os.path.join(BASE_DIR, 'tri_form/templates'),
]
TEMPLATE_DEBUG = True
# Django
|
>=1.9
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': TEMPLATE_DIRS,
'APP_DIRS': True,
'OPTIONS': {
'debug': TEMPLATE_DEBUG,
}
}
]
SECRET_KEY = "foobar"
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'tri_form',
'tests'
]
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR,
|
'db.sqlite3'),
}
}
|
jerome-jacob/selenium
|
py/test/selenium/webdriver/common/page_loading_tests.py
|
Python
|
apache-2.0
| 5,014 | 0.007579 |
# Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
import pytest
from selenium.webdriver.common.by import By
class PageLoadingTests(unittest.TestCase):
def testShouldWaitForDocumentToBeLoaded(self):
self._loadSimplePage()
self.assertEqual(self.driver.title, "Hello WebDriver")
# Disabled till Java WebServer is used
#def testShouldFollowRedirectsSentInTheHttpResponseHeaders(self):
# self.driver.get(pages.redirectPage);
# self.assertEqual(self.driver.title, "We Arrive Here")
# Disabled till the Java WebServer is used
#def testShouldFollowMetaRedirects(self):
# self._loadPage("metaRedirect")
# self.assertEqual(self.driver.title, "We Arrive Here")
def testShouldBeAbleToGetAFragmentOnTheCurrentPage(self):
self._loadPage("xhtmlTest")
location = self.driver.current_url
self.driver.get(location + "#text")
self.driver.find_element(by=By.ID, value="id1")
@pytest.mark.ignore_safari
def testShouldReturnWhenGettingAUrlThatDoesNotResolve(self):
try:
# Of course, we're up the creek if this ever does get registered
self.driver.get("http://www.thisurldoesnotexist.comx/")
except ValueError:
pass
@pytest.mark.ignore_safari
def testShouldReturnWhenGettingAUrlThatDoesNotConnect(self):
# Here's hoping that there's nothing here. There shouldn't be
self.driver.get("http://localhost:3001")
#@Ignore({IE, IPHONE, SELENESE})
#def testShouldBeAbleToLoadAPageWithFramesetsAndWaitUntilAllFramesAreLoaded() {
# self.driver.get(pages.framesetPage);
# self.driver.switchTo().frame(0);
# WebElement pageNumber = self.driver.findElement(By.xpath("#span[@id='pageNumber']"));
# self.assertEqual((pageNumber.getText().trim(), equalTo("1"));
# self.driver.switchTo().defaultContent().switchTo().frame(1);
# pageNumber = self.driver.findElement(By.xpath("#span[@id='pageNumber']"));
# self.assertEqual((pageNumber.getText().trim(), equalTo("2"));
#Need to implement this decorator
#@NeedsFreshDriver
#def testSouldDoNothingIfThereIsNothingToGoBackTo() {
# String originalTitle = self.driver.getTitle();
# self.driver.get(pages.formPage);
# self.driver.back();
# We may have returned to the browser's home page
# self.assertEqual(self.driver.title, anyOf(equalTo(originalTitle), equalTo("We Leave From Here")));
def
|
testShouldBeAbleToNavigateBackInTheBrowserHistory(self):
self._loadPage("formPage")
self.driver.find_element(by=By.ID, value="imageButton").submit()
self.assertEqual(self.driver.title, "We Arrive Here")
self.driver.back()
self.assertEqual(sel
|
f.driver.title, "We Leave From Here")
def testShouldBeAbleToNavigateBackInTheBrowserHistoryInPresenceOfIframes(self):
self._loadPage("xhtmlTest")
self.driver.find_element(by=By.NAME,value="sameWindow").click()
self.assertEqual(self.driver.title, "This page has iframes")
self.driver.back()
self.assertEqual(self.driver.title, "XHTML Test Page")
def testShouldBeAbleToNavigateForwardsInTheBrowserHistory(self):
self._loadPage("formPage")
self.driver.find_element(by=By.ID, value="imageButton").submit()
self.assertEqual(self.driver.title, "We Arrive Here")
self.driver.back()
self.assertEqual(self.driver.title, "We Leave From Here")
self.driver.forward()
self.assertEqual(self.driver.title, "We Arrive Here")
@pytest.mark.ignore_ie
def testShouldNotHangifDocumentOpenCallIsNeverFollowedByDocumentCloseCall(self):
self._loadPage("document_write_in_onload")
self.driver.find_element(By.XPATH, "//body")
def testShouldBeAbleToRefreshAPage(self):
self._loadPage("xhtmlTest")
self.driver.refresh()
self.assertEqual(self.driver.title, "XHTML Test Page")
def _pageURL(self, name):
return self.webserver.where_is(name + '.html')
def _loadSimplePage(self):
self._loadPage("simpleTest")
def _loadPage(self, name):
self.driver.get(self._pageURL(name))
|
testing-cabal/mock
|
mock/tests/testmock.py
|
Python
|
bsd-2-clause
| 72,280 | 0.002048 |
import copy
import re
import sys
import tempfile
import unittest
from mock.tests.support import ALWAYS_EQ
from mock.tests.support import is_instance
from mock import (
call, DEFAULT, patch, sentinel,
MagicMock, Mock, NonCallableMock,
NonCallableMagicMock, AsyncMock,
create_autospec, mock
)
from mock.mock import _Call, _CallList
import mock.mock as mock_module
class Iter(object):
def __init__(self):
self.thing = iter(['this', 'is', 'an', 'iter'])
def __iter__(self):
return self
def next(self):
return next(self.thing)
__next__ =
|
next
class Something(object):
def meth(self, a, b, c, d=None): pass
@classmethod
def cmeth(cls, a, b, c, d=None): pass
@staticmethod
def smeth(a, b, c, d=None): pass
def something(a): pass
class MockTest(unittest.TestCase):
def test_all(self):
# if __all__ is badly define
|
d then import * will raise an error
# We have to exec it because you can't import * inside a method
# in Python 3
exec("from mock.mock import *")
def test_constructor(self):
mock = Mock()
self.assertFalse(mock.called, "called not initialised correctly")
self.assertEqual(mock.call_count, 0,
"call_count not initialised correctly")
self.assertTrue(is_instance(mock.return_value, Mock),
"return_value not initialised correctly")
self.assertEqual(mock.call_args, None,
"call_args not initialised correctly")
self.assertEqual(mock.call_args_list, [],
"call_args_list not initialised correctly")
self.assertEqual(mock.method_calls, [],
"method_calls not initialised correctly")
# Can't use hasattr for this test as it always returns True on a mock
self.assertNotIn('_items', mock.__dict__,
"default mock should not have '_items' attribute")
self.assertIsNone(mock._mock_parent,
"parent not initialised correctly")
self.assertIsNone(mock._mock_methods,
"methods not initialised correctly")
self.assertEqual(mock._mock_children, {},
"children not initialised incorrectly")
def test_return_value_in_constructor(self):
mock = Mock(return_value=None)
self.assertIsNone(mock.return_value,
"return value in constructor not honoured")
def test_change_return_value_via_delegate(self):
def f(): pass
mock = create_autospec(f)
mock.mock.return_value = 1
self.assertEqual(mock(), 1)
def test_change_side_effect_via_delegate(self):
def f(): pass
mock = create_autospec(f)
mock.mock.side_effect = TypeError()
with self.assertRaises(TypeError):
mock()
def test_repr(self):
mock = Mock(name='foo')
self.assertIn('foo', repr(mock))
self.assertIn("'%s'" % id(mock), repr(mock))
mocks = [(Mock(), 'mock'), (Mock(name='bar'), 'bar')]
for mock, name in mocks:
self.assertIn('%s.bar' % name, repr(mock.bar))
self.assertIn('%s.foo()' % name, repr(mock.foo()))
self.assertIn('%s.foo().bing' % name, repr(mock.foo().bing))
self.assertIn('%s()' % name, repr(mock()))
self.assertIn('%s()()' % name, repr(mock()()))
self.assertIn('%s()().foo.bar.baz().bing' % name,
repr(mock()().foo.bar.baz().bing))
def test_repr_with_spec(self):
class X(object):
pass
mock = Mock(spec=X)
self.assertIn(" spec='X' ", repr(mock))
mock = Mock(spec=X())
self.assertIn(" spec='X' ", repr(mock))
mock = Mock(spec_set=X)
self.assertIn(" spec_set='X' ", repr(mock))
mock = Mock(spec_set=X())
self.assertIn(" spec_set='X' ", repr(mock))
mock = Mock(spec=X, name='foo')
self.assertIn(" spec='X' ", repr(mock))
self.assertIn(" name='foo' ", repr(mock))
mock = Mock(name='foo')
self.assertNotIn("spec", repr(mock))
mock = Mock()
self.assertNotIn("spec", repr(mock))
mock = Mock(spec=['foo'])
self.assertNotIn("spec", repr(mock))
def test_side_effect(self):
mock = Mock()
def effect(*args, **kwargs):
raise SystemError('kablooie')
mock.side_effect = effect
self.assertRaises(SystemError, mock, 1, 2, fish=3)
mock.assert_called_with(1, 2, fish=3)
results = [1, 2, 3]
def effect():
return results.pop()
mock.side_effect = effect
self.assertEqual([mock(), mock(), mock()], [3, 2, 1],
"side effect not used correctly")
mock = Mock(side_effect=sentinel.SideEffect)
self.assertEqual(mock.side_effect, sentinel.SideEffect,
"side effect in constructor not used")
def side_effect():
return DEFAULT
mock = Mock(side_effect=side_effect, return_value=sentinel.RETURN)
self.assertEqual(mock(), sentinel.RETURN)
def test_autospec_side_effect(self):
# Test for issue17826
results = [1, 2, 3]
def effect():
return results.pop()
def f(): pass
mock = create_autospec(f)
mock.side_effect = [1, 2, 3]
self.assertEqual([mock(), mock(), mock()], [1, 2, 3],
"side effect not used correctly in create_autospec")
# Test where side effect is a callable
results = [1, 2, 3]
mock = create_autospec(f)
mock.side_effect = effect
self.assertEqual([mock(), mock(), mock()], [3, 2, 1],
"callable side effect not used correctly")
def test_autospec_side_effect_exception(self):
# Test for issue 23661
def f(): pass
mock = create_autospec(f)
mock.side_effect = ValueError('Bazinga!')
self.assertRaisesRegex(ValueError, 'Bazinga!', mock)
def test_reset_mock(self):
parent = Mock()
spec = ["something"]
mock = Mock(name="child", parent=parent, spec=spec)
mock(sentinel.Something, something=sentinel.SomethingElse)
something = mock.something
mock.something()
mock.side_effect = sentinel.SideEffect
return_value = mock.return_value
return_value()
mock.reset_mock()
self.assertEqual(mock._mock_name, "child",
"name incorrectly reset")
self.assertEqual(mock._mock_parent, parent,
"parent incorrectly reset")
self.assertEqual(mock._mock_methods, spec,
"methods incorrectly reset")
self.assertFalse(mock.called, "called not reset")
self.assertEqual(mock.call_count, 0, "call_count not reset")
self.assertEqual(mock.call_args, None, "call_args not reset")
self.assertEqual(mock.call_args_list, [], "call_args_list not reset")
self.assertEqual(mock.method_calls, [],
"method_calls not initialised correctly: %r != %r" %
(mock.method_calls, []))
self.assertEqual(mock.mock_calls, [])
self.assertEqual(mock.side_effect, sentinel.SideEffect,
"side_effect incorrectly reset")
self.assertEqual(mock.return_value, return_value,
"return_value incorrectly reset")
self.assertFalse(return_value.called, "return value mock not reset")
self.assertEqual(mock._mock_children, {'something': something},
"children reset incorrectly")
self.assertEqual(mock.something, something,
"children incorrectly cleared")
self.assertFalse(mock.something.called, "child not reset")
def test_reset_mock_recursion(self):
mock = Mock()
mock.return_value = mock
# used to cause recursion
mock.reset_moc
|
pixelated-project/pixelated-user-agent
|
service/test/unit/adapter/test_mailbox_indexer_listener.py
|
Python
|
agpl-3.0
| 2,690 | 0.003717 |
#
# Copyright (c) 2014 ThoughtWorks, Inc.
#
# Pixelated is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Pixelated is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Pixelated. If not, see <http://www.gnu.org/licenses/>.
from twisted.trial import unittest
from mockito import mock, when, verify, any as ANY
from pixelated.adapter.listeners.mailbox_indexer_listener import MailboxIndexerListener
from twisted.internet import defer
from pixelated.adapter.listeners.mailbox_indexer_listener import logger
class MailboxListenerTest(unittest.TestCase):
def setUp(self):
self.mail_store = mock()
self.account = mock()
self.account.mailboxes = []
def test_add_itself_to_mailbox_listeners(self):
self.account.mailboxes = ['INBOX']
mailbox = mock()
when(self.account).get_collection_by_mailbox('INBOX').thenReturn(mailbox)
mailbox.listeners = set()
when(mailbox).addListener = lambda x: mailbox.listeners.add(x)
self.assertNotIn(MailboxIndexerListener('INBOX', self.mail_store, mock()), mailbox.listeners)
MailboxI
|
ndexerListene
|
r.listen(self.account, 'INBOX', self.mail_store, mock())
self.assertIn(MailboxIndexerListener('INBOX', self.mail_store, mock()), mailbox.listeners)
def test_reindex_missing_idents(self):
mail = mock()
search_engine = mock()
when(search_engine).search('tag:inbox', all_mails=True).thenReturn(['ident1', 'ident2'])
listener = MailboxIndexerListener('INBOX', self.mail_store, search_engine)
when(self.mail_store).get_mailbox_mail_ids('INBOX').thenReturn({'ident1', 'ident2', 'missing_ident'})
when(self.mail_store).get_mails({'missing_ident'}, include_body=True).thenReturn([mail])
listener.notify_new()
verify(self.mail_store, times=1).get_mails({'missing_ident'}, include_body=True)
verify(search_engine).index_mails([mail])
@defer.inlineCallbacks
def test_catches_exceptions_to_not_break_other_listeners(self):
when(logger).error(ANY()).thenReturn(None)
listener = MailboxIndexerListener('INBOX', self.mail_store, mock())
yield listener.notify_new()
verify(logger).error(ANY())
|
kartta-labs/noter-backend
|
noter_backend/main/migrations/0003_create_public_group.py
|
Python
|
apache-2.0
| 664 | 0.003012 |
from django.db import models, migr
|
ations
import uuid
from django.contrib.auth.hashers import make_password
PUBLIC_ID = 1
def apply_migration(apps, schema_editor):
Group = apps.get_model('auth', 'Group')
public_group = Group()
public_group.name = "public"
public_group.id = PUBLIC_ID
public_group.save()
def revert_migration(apps, schema_editor):
Group = apps.get_model('auth', 'Group')
Group.objects.filter(id=PUBLIC_ID).delete()
class Migration(migrations.Migration):
dependencies = [
|
('main', '0002_auto_20200821_0710'),
]
operations = [
migrations.RunPython(apply_migration, revert_migration)
]
|
sailfish-sdk/sailfish-qtcreator
|
scripts/common.py
|
Python
|
gpl-3.0
| 7,269 | 0.003027 |
############################################################################
#
# Copyright (C) 2016 The Qt Company Ltd.
# Contact: https://www.qt.io/licensing/
#
# This file is part of Qt Creator.
#
# Commercial License Usage
# Licensees holding valid commercial Qt licenses may use this file in
# accordance with the commercial license agreement provided with the
# Software or, alternatively, in accordance with the terms contained in
# a written agreement between you and The Qt Company. For licensing terms
# and conditions see https://www.qt.io/terms-conditions. For further
# information use the contact form at https://www.qt.io/contact-us.
#
# GNU General Public License Usage
# Alternatively, this file may be used under the terms of the GNU
# General Public License version 3 as published by the Free Software
# Foundation with exceptions as appearing in the file LICENSE.GPL3-EXCEPT
# included in the packaging of this file. Please review the following
# information to ensure the GNU General Public License requirements will
# be met: https://www.gnu.org/licenses/gpl-3.0.html.
#
############################################################################
import os
import locale
import shutil
import subprocess
import sys
encoding = locale.getdefaultlocale()[1]
def is_windows_platform():
return sys.platform.startswith('win')
def is_linux_platform():
return sys.platform.startswith('linux')
def is_mac_platform():
return sys.platform.startswith('darwin')
# copy of shutil.copytree that does not bail out if the target directory already exists
# and that does not create empty directories
def copytree(src, dst, symlinks=False, ignore=None):
def ensure_dir(destdir, ensure):
if ensure and not os.path.isdir(destdir):
os.makedirs(destdir)
return False
names = os.listdir(src)
if ignore is not None:
ignored_names = ignore(src, names)
else:
ignored_names = set()
needs_ensure_dest_dir = True
errors = []
for name in names:
if name in ignored_names:
continue
srcname = os.path.join(src, name)
dstname = os.path.join(dst, name)
try:
if symlinks and os.path.islink(srcname):
needs_ensure_dest_dir = ensure_dir(dst, needs_ensure_dest_dir)
linkto = os.readlink(srcname)
os.symlink(linkto, dstname)
elif os.path.isdir(srcname):
copytree(srcname, dstname, symlinks, ignore)
else:
needs_ensure_dest_dir = ensure_dir(dst, needs_ensure_dest_dir)
shutil.copy2(srcname, dstname)
# XXX What about devices, sockets etc.?
except (IOError, os.error) as why:
errors.append((srcname, dstname, str(why)))
# catch the Error from the recursive copytree so that we can
# continue with other files
except shutil.Error as err:
errors.extend(err.args[0])
try:
if os.path.exists(dst):
shutil.copystat(src, dst)
except shutil.WindowsError:
# can't copy file access times on Windows
pass
except OSError as why:
errors.extend((src, dst, str(why)))
if errors:
raise shutil.Error(errors)
def get_qt_install_info(qmake_bin):
output = subprocess.check_output([qmake_bin, '-query'])
lines = output.decode(encoding).strip().split('\n')
info = {}
for line in lines:
(var, sep, value) = line.partition(':')
info[var.strip()] = value.strip()
return info
def get_rpath(libfilepath, chrpath=None):
if chrpath is None:
chrpath = 'chrpath'
try:
output = subprocess.check_output([chrpath, '-l', libfilepath]).strip()
except subprocess.CalledProcessError: # no RPATH or RUNPATH
return []
marker = 'RPATH='
index = output.decode(encoding).find(marker)
if index < 0:
marker = 'RUNPATH='
index = output.find(marker)
if index < 0:
return []
return output[index + len(marker):].split(':')
def fix_rpaths(path, qt_deploy_path, qt_install_info, chrpath=None):
if chrpath is None:
chrpath = 'chrpath'
qt_install_prefix = qt_install_info['QT_INSTALL_PREFIX']
qt_install_libs = qt_install_info['QT_INSTALL_LIBS']
def fix_rpaths_helper(filepath):
rpath = get_rpath(filepath, chrpath)
if len(rpath) <= 0:
return
# remove previous Qt RPATH
new_rpath = filter(lambda path: not path.startswith(qt_install_prefix) and not path.startswith(qt_install_libs),
rpath)
# check for Qt linking
lddOutput = subprocess.check_output(['ldd', filepath])
if lddOutput.decode(encoding).find('libQt5') >= 0 or lddOutput.find('libicu') >= 0:
# add Qt RPATH if necessary
relative_path = os.path.relpath(qt_deploy_path, os.path.dirname(filepath))
if relative_path == '.':
relative_path = ''
else:
relative_path = '/' + relative_path
qt_rpath = '$ORIGIN' + relative_path
if not any((path == qt_rpath) for path in rpath):
new_rpath.append(qt_rpath)
# change RPATH
if len(new_rpath) > 0:
subprocess.check_call([chrpath, '-r', ':'.join(new_rpath), filepath])
else: # no RPATH / RUNPATH left. delete.
subprocess.check_call([chrpath, '-d', filepath])
def is_unix_executable(filepath):
# Whether a file is really a binary executable and not a script and not a symlink (unix only)
if os.path.exists(filepath) and os.access(filepath, os.X_OK) and not os.path.islink(filepath):
with open(filepath) as f:
return f.read(2) != "#!"
def is_unix_library(filepath):
# Whether a file is really a library and not a symlink (unix only)
return os.path.basename(filepath).find('.so') != -1 and not os.path.islink(filepath)
for dirpath, dirnames, filenames in os.walk(path):
for filename in filenames:
filepath = os.path.join(dirpath, filename)
if is_unix_executable(filepath) or is_unix_library(filepath):
fix_rpaths_helper(filepath)
def is_debug_file(filepath):
if is_mac_platform():
return filepath.endswith('.dSYM') or '.dSYM/' in filepath
elif is_linux_platform():
return filepath.endswith('.debug')
else:
return filepath.endswith('.pdb')
def is_debug(path, filenames):
return [fn for fn in filenames if is_debug_file(os.path.join(path, fn))]
def is_not_debug(path, filenames):
files = [fn for fn in filenames if os.path.isfile(os.path.join(path, fn))]
return [fn for fn in files if n
|
ot is_debug_file(os.path.join(path, fn))]
def codesign(app_path):
sig
|
ning_identity = os.environ.get('SIGNING_IDENTITY')
if is_mac_platform() and signing_identity:
codesign_call = ['codesign', '--force', '--deep', '-s', signing_identity, '-v']
signing_flags = os.environ.get('SIGNING_FLAGS')
if signing_flags:
codesign_call.extend(signing_flags.split())
codesign_call.append(app_path)
subprocess.check_call(codesign_call)
|
ebigelow/LOTlib
|
LOTlib/Testing/old/Examples/SymbolicRegression/old/test_simple_functionTest.py
|
Python
|
gpl-3.0
| 522 | 0.007663 |
"""
cl
|
ass to test test_simple_function.py
follows the standards in https://docs.python.org/2/library/unittest.html
"""
import unittest
from LOTlib.Examples.SymbolicRegression.old.test_simple_function import *
class test_simple_functionTest(unittest.TestCase):
# initialization that happens before each test is carried out
def setUp(self):
pass
|
# function that is executed after each test is carried out
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
|
shash/IconDB
|
django_extensions/management/commands/print_user_for_session.py
|
Python
|
agpl-3.0
| 1,826 | 0 |
from django.core.management.base import BaseCommand, CommandError
try:
from django.contrib.auth import get_user_model # Django 1.5
except ImportError:
from django_extensions.future_1_5 import get_user_model
from django.contrib.sessions.models import Session
import re
SESSION_RE = re.compile("^[0-9a-f]{20,40}$")
class Command(BaseCommand):
help = ("print the user information for the provided session key. "
"this is very helpful when trying to track down the person who "
"experienced a site crash.")
args = "session_key"
label = 'session key for the user'
requires_model_validation = True
can_import_settings = True
def handle(self, *args, **options):
if len(args) > 1:
raise CommandError("extra arguments supplied")
if len(args) < 1:
raise CommandError("session_key argument missing")
key = args[0].lower()
if not SESSION_RE.match(key):
raise CommandError("malformed session key")
try:
session = Session.objects.get(pk=key)
except Session.DoesNotExist:
print("Session Key does not exist. Expired?")
return
data = session.get_
|
decoded()
print('Session to Expire: %s' % session.expire_date)
print('Raw Data: %s' % data)
uid = data.get('_auth_user_id', None)
if uid is None:
print('No user associated with session')
return
print("User id: %s" % uid)
User = get_user_model()
try:
user = User.objects.get(pk=uid)
except User.DoesNotExist:
print("No user associated wi
|
th that id.")
return
for key in ['username', 'email', 'first_name', 'last_name']:
print("%s: %s" % (key, getattr(user, key)))
|
SurielRuano/Orientador-Legal
|
colaboradores/forms.py
|
Python
|
mit
| 287 | 0.031359 |
from django import f
|
orms
from django.contrib.auth.models import User
from .models import Perfil,SolicitudColaboracion
class SolicitudColaboracionForm(forms.ModelForm):
class Meta:
model = SolicitudColaboracio
|
n
fields = ('name','licenciatura_leyes','telefono','fecha_nacimiento')
|
UCRoboticsLab/BaxterTictactoe
|
src/baxter_face_animation/src/baxter_face_animation/hear_words.py
|
Python
|
apache-2.0
| 926 | 0.009719 |
#!/usr/bin/env python
import glob
import copy
import cv2
import cv_bridge
import rospy
from sensor_msgs.msg import Image
from std_msgs.msg import Int32, Float32, String
import rospkg
cla
|
ss Hear_orders:
def __init__(self):
self.speech_subscriber = rospy.Subscriber("/speech_recognition", String, self.publish_emotion)
self.emotion_publisher = rospy.Publisher("/emotion", String, queue_size=10)
# self.timer = rospy.Timer(rospy.Duration(self.velocity), self.timer_cb)
def publish_emotion(self, data):
self.emotion_publisher.publish("heard_an_order")
def main():
rospy.init_node('hearing_node', anonymous=True)
rate =
|
rospy.Rate(30)
rospack = rospkg.RosPack()
# path = rospack.get_path('baxter_face_animation') + "/data/"
Hear_orders()
while not rospy.is_shutdown():
rate.sleep()
if __name__ == "__main__":
main()
|
collectiveacuity/pocketLab
|
pocketlab/methods/service.py
|
Python
|
mit
| 8,378 | 0.005132 |
__author__ = 'rcj1492'
__created__ = '2016.10'
__license__ = 'MIT'
def retrieve_service_name(service_root):
service_name = ''
# construct registry client
from os import path
from pocketlab import __module__
from labpack.storage.appdata import appdataClient
registry_client = appdataClient(collection_name='Registry Data', prod_name=__module__)
# walk registry for
from labpack.records.settings import load_settings
for file_path in registry_client.localhost.walk(registry_client.collection_folder):
try:
details = load_settings(file_path)
if details['service_root'] == path.abspath(service_root):
service_name = details['service_name']
break
except:
pass
return service_name
def retrieve_service_root(service_name, command_context=''):
# construct registry client
from os import path
from pocketlab import __module__
from labpack.storage.appdata import appdataClient
registry_client = appdataClient(collection_name='Registry Data', prod_name=__module__)
# validate service name exists in registry
file_name = '%s.yaml' % service_name
filter_function = registry_client.conditional_filter([{0:{'discrete_values':[file_name]}}])
service_list = registry_client.list(filter_function=filter_function)
if not file_name in service_list:
error_msg = '"%s" not found in the registry.' % service_name
if command_context:
error_msg += ' %s' % command_context
raise ValueError(error_msg)
# retrieve root path to service
import yaml
service_data = registry_client.load(file_name)
service_details = yaml.full_load(service_data.decode())
if not 'service_root' in service_details.keys():
error_msg = 'Record for project "%s" has been corrupted.' % service_name
if command_context:
error_msg += ' %s' % command_context
raise ValueError(error_msg)
service_root = service_details['service_root']
if not path.exists(service_root):
error_msg = 'Path %s to project "%s" no longer exists.' % (service_root, service_name)
if command_context:
error_msg += ' %s' % command_context
raise ValueError(error_msg)
return service_root
def retrieve_services(service_list=None, all=False):
'''
a method to generate the root path for one or more services
:param service_list: list of strings with name of services
:param all: boolean to indicate the retrieve all paths in registry
:return: list of dictionaries, string with exit message insert
'''
# define default returns
path_list = []
msg_insert = 'local service'
# add named service to service list
if service_list:
from labpack.parsing.grammar import join_words
word_list = []
for service in service_list:
service_root = retrieve_service_root(service)
service_details = {
'name': service,
'path': service_root
}
path_list.append(service_details)
word_list.append('"%s"' % service)
msg_insert = join_words(word_list)
# add all services in registry to service list
elif all:
msg_insert = 'all services'
from pocketlab import __module__
from labpack.storage.appdata import appdataClient
registry_client = appdataClient(collection_name='Registry Data', prod_name=__module__)
from labpack.records.settings import load_settings
for file_path in registry_client.localhost.walk(registry_client.collection_folder):
try:
details = load_settings(file_path)
service_details = {
'name': details['service_name'],
'path': details['service_root']
}
path_list.append(service_details)
except:
pass
#
|
add local path to service list
else:
path_list.append({'name': '', 'path': './'})
return path_list, msg_insert
def retrieve_service_config(service_root, service_name, command_title):
from os import path
from pocketlab.methods.validation import validate_compose
from pocketlab import __module__
from jsonmodel.loader import jsonLo
|
ader
from jsonmodel.validators import jsonModel
compose_schema = jsonLoader(__module__, 'models/compose-config.json')
service_schema = jsonLoader(__module__, 'models/service-config.json')
compose_model = jsonModel(compose_schema)
service_model = jsonModel(service_schema)
compose_path = path.join(service_root, 'docker-compose.yaml')
compose_details = validate_compose(compose_model, service_model, compose_path, service_name)
service_config = {}
if service_name:
service_config = compose_details['services'][service_name]
elif len(compose_details['services'].keys()) > 1:
raise ValueError('docker-compose.yaml file in working directory contains more than one service.\nTry: lab %s [SERVICE]' % command_title)
else:
for key, value in compose_details['services'].items():
service_config = value
service_name = key
break
return service_config, service_name
def compile_services(registry_only=False):
# construct registry client
from pocketlab import __module__
from labpack.storage.appdata import appdataClient
registry_client = appdataClient(collection_name='Registry Data', prod_name=__module__)
# walk registry to compile list of services
service_list = []
path_list = []
from labpack.records.settings import load_settings
for file_path in registry_client.localhost.walk(registry_client.collection_folder):
try:
details = load_settings(file_path)
service_list.append({
'name': details['service_name'],
'path': details['service_root']
})
path_list.append(details['service_root'])
except:
pass
# add current directory
if not registry_only:
from os import path
current_path = path.abspath('./')
if current_path not in path_list:
try:
file_path = path.join(current_path, 'docker-compose.yaml')
from pocketlab.methods.validation import validate_compose
from jsonmodel.loader import jsonLoader
from jsonmodel.validators import jsonModel
compose_model = jsonModel(jsonLoader(__module__, 'models/compose-config.json'))
service_model = jsonModel(jsonLoader(__module__, 'models/service-config.json'))
compose_details = validate_compose(compose_model, service_model, file_path, '')
if len(compose_details['services'].keys()) == 1:
for key in compose_details['services'].keys():
service_list.append({
'name': key,
'path': current_path
})
except:
pass
return service_list
def compile_ports(service_config):
service_ports = []
# validate ports are available
if 'ports' in service_config.keys():
for i in range(len(service_config['ports'])):
port_string = service_config['ports'][i]
port_split = port_string.split(':')
sys_port = port_split[0]
range_split = sys_port.split('-')
port_start = range_split[0]
port_end = ''
if len(range_split) > 1:
port_end = range_split[1]
if not port_end:
service_ports.append(int(port_start))
else:
for j in range(int(port_start),int(port_end) + 1):
service_ports.append(j)
return service_ports
if __name__ == '__main
|
willu47/SALib
|
src/SALib/test_functions/Sobol_G.py
|
Python
|
mit
| 1,685 | 0.003561 |
from __future__ import division
import numpy as np
# Non-monotonic Sobol G Function (8 parameters)
# First-order indices:
# x1: 0.7165
# x2: 0.1791
# x3: 0.0237
# x4: 0.0072
# x5-x8: 0.0001
def evaluate(values, a=None):
if type(values) != np.ndarray:
raise TypeError("The argument `values` must be a numpy ndarray")
if a is None:
a = [0, 1, 4.5, 9, 99, 99, 99, 99]
ltz = values < 0
gto = values > 1
if ltz.any() == True:
raise ValueError("Sobol G function called with values less than zero")
elif gto.any() == True:
raise ValueError("Sobol G function called with values greater than one")
Y = np.ones([values.shape[0]])
len_a = len(a)
for i, row in enumerate(values):
for j in range(len_a):
x = row[j]
a_j = a[j]
Y[i] *= (np.abs(4 * x - 2) + a_j) / (1 + a_j)
return Y
def partial_first_order_variance(a=None):
if a is None:
a = [0, 1, 4.5, 9, 99, 99, 99, 99]
a = np.array(a)
return np.divide(1, np.multiply(3, np.square(1 + a)))
def total_variance(a=None):
if a is None:
a = [0, 1, 4.5, 9, 99, 99, 99, 99]
a = np.array(a)
return np.add(-1, np.product(1 + partial_first_order_variance(a), axis=0))
def sensitivity_index(a):
a = np.array(a)
return np.divide(partial_first_order_variance(a), total_variance(a))
def total_sensitivity_index(a):
a = np.array(a)
pv = partial_first_order_variance(a)
tv = total_variance(a)
sum_pv = pv.sum(axis=0)
ret
|
urn np.subtract(1, np.divide(np.subtract
|
(sum_pv, pv.T), tv))
|
Ginfung/FSSE
|
Algorithms/WORTHY.py
|
Python
|
mit
| 7,356 | 0.000952 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2019, Jianfeng Chen <jchen37@ncsu.edu>
# vim: set ts=4 sts=4 sw=4 expandtab smartindent:
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, _distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import division
from deap import algorithms
from deap import tools
from deap.tools import emo
from deap.tools.emo import sortNondominated
from sklearn.cluster import KMeans
from sklearn.tree import DecisionTreeRegressor
from sklearn.linear_model import LinearRegression, LogisticRegression
from sklearn.neighbors import KNeighborsRegressor
from sklearn.neighbors.nearest_centroid import Ne
|
arestCentroid
from sklearn.model_selection
|
import train_test_split
from mpl_toolkits import mplot3d
from matplotlib.pyplot import figure
from matplotlib.ticker import PercentFormatter
import matplotlib.ticker as mtick
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import time
import sys
import os
import random
import random
import pdb
def _emo_sortNondominated_idx(pop, first_front_only=False):
fronts = emo.sortNondominated(
pop, len(pop), first_front_only=first_front_only)
return [[pop.index(i) for i in f] for f in fronts]
def random_pop(model, N):
pop = list()
for _ in range(N):
pop.append(
model.Individual([random.random() for _ in range(model.decsNum)]))
return pop
def action_expr(model):
startat = time.time()
samples = random_pop(model, 100)
for p in samples:
model.eval(p, normalized=False)
print("100 init pop evaluated.")
for round_ in range(10):
samples.extend(random_pop(model, 20))
for p in samples[-20:]:
model.eval(p, normalized=False)
D = pd.DataFrame(data=samples, columns=model.decs)
O = pd.DataFrame(data=list(map(lambda i: i.fitness.values, samples)))
front_idx = _emo_sortNondominated_idx(
samples, first_front_only=True)[0]
next_pop = list()
for fi in front_idx:
dist_order = (D - D.loc[fi]).abs().pow(2).sum(
axis=1).sort_values().index[1:int(len(samples) * 0.1) +
1] # fetch the top 10% of samples
dD, dO = list(), list()
for i in dist_order:
for j in dist_order:
if i == j: continue
dD.append(D.iloc[i] - D.iloc[j])
dO.append(O.iloc[i] - O.iloc[j])
dD = pd.DataFrame(dD, index=range(len(dD)))
dO = pd.DataFrame(dO, index=range(len(dO)))
assert not (dO.std() < 0).any()
regr = list()
for oi, obj in enumerate(dO.columns):
regr_tmp = KNeighborsRegressor(n_neighbors=4).fit(dD, dO[obj])
regr.append(regr_tmp)
mut_dD = list()
for _ in range(D.shape[1] * 2):
mut_dD.append(D.loc[fi] * np.random.normal(0, 0.5, D.shape[1]))
mut_dD = pd.DataFrame(mut_dD, index=range(len(mut_dD)))
mut_dO = pd.DataFrame(columns=dO.columns)
for oi, obj in enumerate(mut_dO.columns):
mut_dO[obj] = regr[oi].predict(mut_dD)
filtered = (mut_dO < -1 * mut_dO.std()).any(axis=1)
new_decs = D.loc[fi] + mut_dD[filtered]
print('new eval = ', str(new_decs.shape[0]))
for nd in new_decs.index:
candidate = model.Individual(new_decs.loc[nd])
model.eval(candidate, normalized=False)
next_pop.append(candidate)
samples.extend(emo.sortNondominated(next_pop, len(next_pop), True)[0])
print(f'Round {round_} done. Sample size = {len(samples)}')
return emo.sortNondominated(
samples, len(samples), first_front_only=True)[0]
def action_expr2(model):
startat = time.time()
samples = random_pop(model, 100)
for p in samples:
model.eval(p, normalized=False)
print("100 init pop evaluated.")
for round_ in range(10):
samples.extend(random_pop(model, 20))
for p in samples[-20:]:
model.eval(p, normalized=False)
D = pd.DataFrame(data=samples, columns=model.decs)
O = pd.DataFrame(data=list(map(lambda i: i.fitness.values, samples)))
front_idx = _emo_sortNondominated_idx(
samples, first_front_only=True)[0]
next_pop = list()
for fi in front_idx:
dist_order = (D - D.loc[fi]).abs().pow(2).sum(
axis=1).sort_values().index[1:int(len(samples) * 0.1) +
1] # fetch the top 10% of samples
dD, dO = list(), list()
for i in dist_order:
for j in dist_order:
if i == j: continue
dD.append(D.iloc[i] - D.iloc[j])
dO.append(O.iloc[i] - O.iloc[j])
dD = pd.DataFrame(dD, index=range(len(dD)))
dO = pd.DataFrame(dO, index=range(len(dO)))
assert not (dO.std() < 0).any()
regr = list()
for oi, obj in enumerate(dO.columns):
regr_tmp = KNeighborsRegressor(n_neighbors=4).fit(dD, dO[obj])
regr.append(regr_tmp)
mut_dD = list()
for _ in range(D.shape[1] * 2):
mut_dD.append(D.loc[fi] * np.random.normal(0, 0.5, D.shape[1]))
mut_dD = pd.DataFrame(mut_dD, index=range(len(mut_dD)))
mut_dO = pd.DataFrame(columns=dO.columns)
for oi, obj in enumerate(mut_dO.columns):
mut_dO[obj] = regr[oi].predict(mut_dD)
filtered = (mut_dO < -1 * mut_dO.std()).any(axis=1)
new_decs = D.loc[fi] + mut_dD[filtered]
print('new eval = ', str(new_decs.shape[0]))
for nd in new_decs.index:
candidate = model.Individual(new_decs.loc[nd])
candidate.fitness.values = O.loc[fi] + mut_dO.loc[nd]
next_pop.append(candidate)
tmp_pf = emo.sortNondominated(next_pop, len(next_pop), True)[0]
for p in tmp_pf:
model.eval(p, normalized=False)
samples.extend(tmp_pf)
print(f'Round {round_} done. Sample size = {len(samples)}')
return emo.sortNondominated(
samples, len(samples), first_front_only=True)[0]
|
neildhir/DCBO
|
tests/test_root.py
|
Python
|
mit
| 5,136 | 0.003118 |
import unittest
from numpy import arange, linspace
from numpy.random import seed
from src.bases.root import Root
from src.examples.example_setups import setup_stat_scm
from src.utils.sem_utils.toy_sems import StationaryDependentSEM as StatSEM
from src.utils.sequential_intervention_functions import get_interventional_grids
from src.utils.sequential_sampling import sequentially_sample_model
from src.utils.utilities import convert_to_dict_of_temporal_lists, powerset
seed(seed=0)
class TestRoot(unittest.TestCase):
# Do NOT change the setUp method -- setUp is reserved by unittest.
def setUp(self):
# Use STAT DAG to test Root class
self.T = 3 # Time-steps in DAG
self.n = 4 # Number of observational samples per variable per time-step
self.N = 5 # Number of trials per time-step for method
(
self.init_sem,
self.sem,
_,
self.G,
self.exploration_sets,
self.intervention_domain,
self.true_objective_values,
) = setup_stat_scm(T=self.T)
# Sample observational data using SEM
D_O = sequentially_sample_model(
self.init_sem, self.sem, total_timesteps=self.T, sample_count=self.n, epsilon=None,
)
root_inputs = {
"G": self.G,
"sem": StatSEM,
"base_target_variable": "Y",
"observation_samples": D_O, # Observational samples
"intervention_domain": self.intervention_domain,
"number_of_trials": self.N,
}
self.root = Root(**root_inputs)
def test_setup_STAT_function(self):
self.assertEqual(self.exploration_sets, [("X",), ("Z",), ("X", "Z")])
self.assertEqual(self.intervention_domain, {"X": [-4, 1], "Z": [-3, 3]})
self.assertAlmostEqual(
self.true_objective_values, [-2.1518267393287287, -4.303653478657457, -6.455480217986186], places=7
)
self.assertEqual(self.init_sem.keys(), self.sem.keys())
def test_root_methods(self):
self.assertEqual(
self.root.node_pars,
{
"X_0": (),
"Z_0": ("X_0",),
"Y_0": ("Z_0",),
"X_1": ("X_0",),
"Z_1": ("Z_0", "X_1"),
"Y_1": ("Y_0", "Z_1"),
"X_2": ("X_1",),
"Z_2": ("Z_1", "X_2"),
"Y_2": ("Y_1", "Z_2"),
},
)
self.assertEqual(self.root.outcome_values, {0: [10000000.0], 1: [10000000.0], 2: [10000000.0]})
self.assertEqual(
self.root.sorted_nodes,
{"X_0": 0, "Z_0": 1, "X_1": 2, "Y_0": 3, "Z_1": 4, "X_2": 5, "Y_1": 6, "Z_2": 7, "Y_2": 8},
)
self.assertEqual(self.root.interventional_variable_limits, {"X": [-4, 1], "Z": [-3, 3]})
# If we do not pass any exploration set, then by default the Root class will assign all manipulative variables as the intervention set.
self.assertEqual(self.root.exploration_sets, [("X", "Z")])
self.assertEqual(
self.root.interventional_data_y, {0: {("X", "Z"): None}, 1: {("X", "Z"): None}, 2: {("X", "Z"): None}}
)
self.assertEqual(
self.root.interventional_data_x, {0: {("X", "Z"): None}, 1: {("X", "Z"): None}, 2: {("X", "Z"): None}}
)
def test_dict_to_list_conversion_of_observational_samples(self):
observational_samples = {
"X": arange(0, 9).reshape(3, -1),
"Y": arange(3, 12).reshape(3, -1),
"Z": arange(6, 15).reshape(3, -1),
}
out = convert_to_dict_of_temporal_lists(observational_samples)
self.assertEqual(len(out["X"]), 3)
self.assertEqual(len(out["Z"][0]), 3)
self.assertEqual(sum([len(out["Y"][t]) for t in range(3)]), 9)
def test_interventional_grids
|
(self):
nr_samples = 10
interventional_variable_limits = {"X": [-15, 3], "Z": [-1, 10]}
exploration_sets = list(powerset(self.root.manipulative_variables))
grids = get_interventional_grids(exploration_sets, interventional_variable_limits, nr_samples)
compare_vector = linspace(
interventional_variable_limits["X"][0], interventional_variable_limits["X"]
|
[1], num=nr_samples
).reshape(-1, 1)
self.assertEqual(compare_vector.shape, grids[exploration_sets[0]].shape)
self.assertTrue((compare_vector == grids[exploration_sets[0]]).all())
def test_target_variables(self):
self.assertEqual(self.root.all_target_variables, ["Y_0", "Y_1", "Y_2"])
def test_canonical_variables(self):
self.assertEqual(self.root.observational_samples.keys(), {"X", "Y", "Z"})
def test_number_of_nodes_per_time_slice(self):
# Number of nodes per time-slice
v_n = len(self.root.G.nodes()) / self.root.G.T
nodes = list(self.root.G.nodes())
self.assertEqual(v_n, 3)
for t in range(self.G.T):
self.assertEquak(len([v for v in nodes if v.split("_")[1] == str(t)]), v_n)
if __name__ == "__main__":
unittest.main()
|
cloew/TogglDriver
|
toggl_driver/args/project_arg.py
|
Python
|
mit
| 547 | 0.007313 |
from ..config import GlobalConfig, LocalConfig
from kao_command.args import FlagArg
class ProjectArg(FlagArg):
""" Represents an CLI Arg
|
ument that specifies a Project """
def __init__(self, *, help):
""" Initialize the Arg """
FlagArg.__init__(self, '-p', '--project', action="store", help=help)
def getValue(self, args):
""" Return the value from the args """
projectName = FlagArg.getValue(self, args)
return GlobalConfig.connection.projects.
|
withName(projectName).first
|
unioslo/cerebrum
|
Cerebrum/extlib/Plex/DFA.py
|
Python
|
gpl-2.0
| 5,539 | 0.015165 |
# -*- coding: utf-8 -*-
#=======================================================================
#
# Python Lexical Analyser
#
# Converting NFA to DFA
#
#=======================================================================
import Machines
from Machines import LOWEST_PRIORITY
from Transitions import TransitionMap
def nfa_to_dfa(old_machine, debug = None):
"""
Given a nondeterministic Machine, return a new equivalent
Machine which is deterministic.
"""
# We build a new machine whose states correspond to sets of states
# in the old machine. Initially we add a new state corresponding to
# the epsilon-closure of each initial old state. Then we give transitions
# to each new state which are the union of all tra
|
nsitions out of any
# of the correspon
|
ding old states. The new state reached on a given
# character is the one corresponding to the set of states reachable
# on that character from any of the old states. As new combinations of
# old states are created, new states are added as needed until closure
# is reached.
new_machine = Machines.FastMachine()
state_map = StateMap(new_machine)
# Seed the process using the initial states of the old machine.
# Make the corresponding new states into initial states of the new
# machine with the same names.
for (key, old_state) in old_machine.initial_states.items():
new_state = state_map.old_to_new(epsilon_closure(old_state))
new_machine.make_initial_state(key, new_state)
# Tricky bit here: we add things to the end of this list while we're
# iterating over it. The iteration stops when closure is achieved.
for new_state in new_machine.states:
transitions = TransitionMap()
for old_state in state_map.new_to_old(new_state).keys():
for event, old_target_states in old_state.transitions.items():
if event and old_target_states:
transitions.add_set(event, set_epsilon_closure(old_target_states))
for event, old_states in transitions.items():
new_machine.add_transitions(new_state, event, state_map.old_to_new(old_states))
if debug:
debug.write("\n===== State Mapping =====\n")
state_map.dump(debug)
return new_machine
def set_epsilon_closure(state_set):
"""
Given a set of states, return the union of the epsilon
closures of its member states.
"""
result = {}
for state1 in state_set.keys():
for state2 in epsilon_closure(state1).keys():
result[state2] = 1
return result
def epsilon_closure(state):
"""
Return the set of states reachable from the given state
by epsilon moves.
"""
# Cache the result
result = state.epsilon_closure
if result is None:
result = {}
state.epsilon_closure = result
add_to_epsilon_closure(result, state)
return result
def add_to_epsilon_closure(state_set, state):
"""
Recursively add to |state_set| states reachable from the given state
by epsilon moves.
"""
if not state_set.get(state, 0):
state_set[state] = 1
state_set_2 = state.transitions.get_epsilon()
if state_set_2:
for state2 in state_set_2.keys():
add_to_epsilon_closure(state_set, state2)
class StateMap:
"""
Helper class used by nfa_to_dfa() to map back and forth between
sets of states from the old machine and states of the new machine.
"""
new_machine = None # Machine
old_to_new_dict = None # {(old_state,...) : new_state}
new_to_old_dict = None # {id(new_state) : old_state_set}
def __init__(self, new_machine):
self.new_machine = new_machine
self.old_to_new_dict = {}
self.new_to_old_dict= {}
def old_to_new(self, old_state_set):
"""
Return the state of the new machine corresponding to the
set of old machine states represented by |state_set|. A new
state will be created if necessary. If any of the old states
are accepting states, the new state will be an accepting state
with the highest priority action from the old states.
"""
key = self.make_key(old_state_set)
new_state = self.old_to_new_dict.get(key, None)
if not new_state:
action = self.highest_priority_action(old_state_set)
new_state = self.new_machine.new_state(action)
self.old_to_new_dict[key] = new_state
self.new_to_old_dict[id(new_state)] = old_state_set
#for old_state in old_state_set.keys():
#new_state.merge_actions(old_state)
return new_state
def highest_priority_action(self, state_set):
best_action = None
best_priority = LOWEST_PRIORITY
for state in state_set.keys():
priority = state.action_priority
if priority > best_priority:
best_action = state.action
best_priority = priority
return best_action
# def old_to_new_set(self, old_state_set):
# """
# Return the new state corresponding to a set of old states as
# a singleton set.
# """
# return {self.old_to_new(old_state_set):1}
def new_to_old(self, new_state):
"""Given a new state, return a set of corresponding old states."""
return self.new_to_old_dict[id(new_state)]
def make_key(self, state_set):
"""
Convert a set of states into a uniquified
sorted tuple suitable for use as a dictionary key.
"""
lst = state_set.keys()
lst.sort()
return tuple(lst)
def dump(self, file):
from Transitions import state_set_str
for new_state in self.new_machine.states:
old_state_set = self.new_to_old_dict[id(new_state)]
file.write(" State %s <-- %s\n" % (
new_state['number'], state_set_str(old_state_set)))
|
MichelRottleuthner/RIOT
|
tests/trace/tests/01-run.py
|
Python
|
lgpl-2.1
| 676 | 0 |
#!/usr/bin/env python3
# Copyright (C) 2016 Freie Universität Berlin
#
# This file is subject to the terms and conditions of the GNU Lesser
# General Public License v2.1. See the file LICENSE in the top level
# directory for more details.
import os
import sys
def testfunc(child):
child.expect(r"TRACE_SIZE: (\d+)")
trace_size = int(child.match.group(1))
for i in range(trace_size):
child.expect("0x[0-9a-f]{7,8}")
print("All tests successful")
if __name__ == "__main__":
sys.path.append(os.path.join(os.environ['RIOTTOOLS'], 'testrun
|
ner'))
import testrunner
sys.exit(testrunner.run(testfun
|
c, timeout=1, echo=True, traceback=True))
|
ghtdak/pyaxo
|
examples/create_states.py
|
Python
|
gpl-3.0
| 539 | 0 |
#!/usr/bin/env python
import os
from pyaxo import Axolotl
# start with a fresh database
try:
os.remove('./alice.db')
os.remove('./bob.db')
except OSError:
pass
# unencrypted databases
a = Axolotl('alice', dbname='alice.db', dbpassphrase=None)
b = Axolotl('bob', dbname='bob.db', dbpassphrase=None)
a.initState('bob', b.state['DHIs'], b.handshakePKey,
b.state['DHRs'], verify=False)
b.initState('alice', a.state['DHIs'], a.handshakePKey,
a.state['DHRs'], ver
|
ify=False)
a.saveState()
b.saveS
|
tate()
|
SAlkhairy/trabd
|
voting/migrations/0005_add_is_city_results_due_for_SACYear.py
|
Python
|
agpl-3.0
| 1,341 | 0.002237 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2018-04-22 07:21
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
|
('voting'
|
, '0004_winner_annoucement'),
]
operations = [
migrations.AddField(
model_name='sacyear',
name='alahsa_results_datetime',
field=models.DateTimeField(blank=True, null=True, verbose_name='\u062a\u0627\u0631\u064a\u062e \u0625\u0639\u0644\u0627\u0646 \u0627\u0644\u0646\u062a\u0627\u0626\u062c \u0641\u064a \u0627\u0644\u0623\u062d\u0633\u0627\u0621'),
),
migrations.AddField(
model_name='sacyear',
name='jeddah_results_datetime',
field=models.DateTimeField(blank=True, null=True, verbose_name='\u062a\u0627\u0631\u064a\u062e \u0625\u0639\u0644\u0627\u0646 \u0627\u0644\u0646\u062a\u0627\u0626\u062c \u0641\u064a \u062c\u062f\u0629'),
),
migrations.AddField(
model_name='sacyear',
name='riyadh_results_datetime',
field=models.DateTimeField(blank=True, null=True, verbose_name='\u062a\u0627\u0631\u064a\u062e \u0625\u0639\u0644\u0627\u0646 \u0627\u0644\u0646\u062a\u0627\u0626\u062c \u0641\u064a \u0627\u0644\u0631\u064a\u0627\u0636'),
),
]
|
GuessWhoSamFoo/pandas
|
pandas/tests/scalar/interval/test_interval.py
|
Python
|
bsd-3-clause
| 7,179 | 0 |
from __future__ import division
import numpy as np
import pytest
from pandas import Interval, Timedelta, Timestamp
import pandas.core.common as com
@pytest.fixture
def interval():
return Interval(0, 1)
class TestInterval(object):
def test_properties(self, interval):
assert interval.closed == 'right'
assert interval.left == 0
assert interval.right == 1
assert interval.mid == 0.5
def test_repr(self, interval):
assert repr(interval) == "Interval(0, 1, closed='right')"
assert str(interval) == "(0, 1]"
interval_left = Interval(0, 1, closed='left')
assert repr(interval_left) == "Interval(0, 1, closed='left')"
assert str(interval_left) == "[0, 1)"
def test_contains(self, interval):
assert 0.5 in interval
assert 1 in interval
assert 0 not in interval
msg = "__contains__ not defined for two intervals"
with pytest.raises(TypeError, match=msg):
interval in interval
interval_both = Interval(0, 1, closed='both')
assert 0 in interval_both
assert 1 in interval_both
interval_neither = Interval(0, 1, closed='neither')
assert 0 not in interval_neither
assert 0.5 in interval_neither
assert 1 not in interval_neither
def test_equal(self):
assert Interval(0, 1) == Interval(0, 1, closed='right')
assert Interval(0, 1) != Interval(0, 1, closed='left')
assert Interval(0, 1) != 0
def test_comparison(self):
with pytest.raises(TypeError, match='unorderable types'):
Interval(0, 1) < 2
assert Interval(0, 1) < Interval(1, 2)
assert Interval(0, 1) < Interval(0, 2)
assert Interval(0, 1) < Interval(0.5, 1.5)
assert Interval(0, 1) <= Interval(0, 1)
assert Interval(0, 1) > Interval(-1, 2)
assert Interval(0, 1) >= Interval(0, 1)
def test_hash(self, interval):
# should not raise
hash(interval)
@pytest.mark.parametrize('left, right, expected', [
(0, 5, 5),
(-2, 5.5, 7.5),
(10, 10, 0),
(10, np.inf, np.inf),
(-np.inf, -5, np.inf),
(-np.inf, np.inf, np.inf),
(Timedelta('0 days'), Timedelta('5 days'), Timedelta('5 days')),
(Timedelta('10 days'), Timedelta('10 days'), Timedelta('0 days')),
(Timedelta('1H10M'), Timedelta('5H5M'), Timedelta('3H55M')),
(Timedelta('5S'), Timedelta('1H'), Timedelta('59M55S'))])
def test_length(self, left, right, expected):
# GH 18789
iv = Interval(left, right)
result = iv.length
assert result == expected
@pytest.mark.parametrize('left, right, expected', [
('2017-01-01', '2017-01-06', '5 days'),
('2017-01-01', '2017-01-01 12:00:00', '12 hours'),
('2017-01-01 12:00', '2017-01-01 12:00:00', '0 days'),
('2017-01-01 12:01', '2017-01-05 17:31:00', '4 days 5 hours 30 min')])
@pytest.mark.parametrize('tz', (None, 'UTC', 'CET', 'US/Eastern'))
def test_length_timestamp(self, tz, left, right, expected):
# GH 18789
iv = Interval(Timestamp(left, tz=tz), Timestamp(right, tz=tz))
result = iv.length
expected = Timedelta(expected)
assert result == expected
@pytest.mark.parametrize('left, right', [
('a', 'z'),
(('a', 'b'), ('c', 'd')),
(list('AB'), list('ab')),
(Interval(0, 1), Interval(1, 2))])
def test_length_errors(self, left, right):
# GH 18789
iv = Interval(left, right)
msg = 'cannot compute length between .* and .*'
with pytest.raises(TypeError, match=msg):
iv.length
def test_math_add(self, closed):
interval = Interval(0, 1, closed=closed)
expected = Interval(1, 2, closed=closed)
result = interval + 1
assert result == expected
result = 1 + interval
assert result == expected
result = interval
result += 1
assert result == expected
msg = r"unsupported operand type\(s\) for \+"
with pytest.raises(TypeError, match=msg):
interval + interval
with pytest.raises(TypeError, match=msg):
interval + 'foo'
def test_math_sub(self, closed):
interval = Interval(0, 1, closed=closed)
expected = Interval(-1, 0, closed=closed)
result = interval - 1
assert result == expected
result = interval
result -= 1
assert result == expected
msg = r"unsupported operand type\(s\) for -"
with pytest.raises(TypeError, match=msg):
interval - interval
with pytest.raises(TypeError, match=msg):
interval - 'foo'
def test_math_mult(self, closed):
interval = Interval(0, 1, closed=closed)
expected = Interval(0, 2, closed=closed)
result = interval * 2
assert result == expected
result = 2 * interval
assert result == expected
result = interval
result *= 2
assert result == expected
msg = r"unsupported operand type\(s\) for \*"
with pytest.raises(TypeError,
|
match=msg):
interval * interval
msg = r"can\'t multiply sequence by non-int"
with pytest.raises(TypeError, matc
|
h=msg):
interval * 'foo'
def test_math_div(self, closed):
interval = Interval(0, 1, closed=closed)
expected = Interval(0, 0.5, closed=closed)
result = interval / 2.0
assert result == expected
result = interval
result /= 2.0
assert result == expected
msg = r"unsupported operand type\(s\) for /"
with pytest.raises(TypeError, match=msg):
interval / interval
with pytest.raises(TypeError, match=msg):
interval / 'foo'
def test_math_floordiv(self, closed):
interval = Interval(1, 2, closed=closed)
expected = Interval(0, 1, closed=closed)
result = interval // 2
assert result == expected
result = interval
result //= 2
assert result == expected
msg = r"unsupported operand type\(s\) for //"
with pytest.raises(TypeError, match=msg):
interval // interval
with pytest.raises(TypeError, match=msg):
interval // 'foo'
def test_constructor_errors(self):
msg = "invalid option for 'closed': foo"
with pytest.raises(ValueError, match=msg):
Interval(0, 1, closed='foo')
msg = 'left side of interval must be <= right side'
with pytest.raises(ValueError, match=msg):
Interval(1, 0)
@pytest.mark.parametrize('tz_left, tz_right', [
(None, 'UTC'), ('UTC', None), ('UTC', 'US/Eastern')])
def test_constructor_errors_tz(self, tz_left, tz_right):
# GH 18538
left = Timestamp('2017-01-01', tz=tz_left)
right = Timestamp('2017-01-02', tz=tz_right)
error = TypeError if com._any_none(tz_left, tz_right) else ValueError
with pytest.raises(error):
Interval(left, right)
|
hep-cce/hpc-edge-service
|
argo/test_jobs/test_submit_alpgen.py
|
Python
|
bsd-3-clause
| 2,987 | 0.035152 |
#!/usr/bin/env python
import sys,logging,optparse
from AlpgenArgoJob import AlpgenArgoJob
sys.path.append('/users/hpcusers/balsam/argo_deploy/argo_core')
from MessageInterface import MessageInterface
def main():
parser = optparse.OptionParser(description='submit alpgen job to ARGO')
parser.add_option('-e','--evts-per-iter',dest='evts_per_iter',help='number of events per warmup iteration',type='int')
parser.add_option('-i','--num-iter',dest='numiters',help='number of iterations for the warmup',type='int')
parser.add_option('-w','--warmup-weighted',dest='num_warmup',help='number of event to in the warmup, after the iterations complete',type='int')
parser.add_option('-n','--num-weighted',dest='num_weighted',help='number of weighted events to generate.',type='int')
parser.add_option('-p','--process',dest='process',help='define the process to generate, 2Q,4Q,hjet,top,wjet,zjet,Njet,etc.')
parser.add_option('-o','--num-nodes',dest='numnodes',help='number of nodes to use on destination machine',type='int')
parser.add_option('-c','--cpus-per-node',dest='cpus_per_node',help='number of CPUs per node to use on destination machine',type='int')
parser.add_option('-a','--alpgen-input',dest='alpgen_input_file',help='The AlpGen input file which carries all the options for this generation job')
parser.add_option('-t','--wall-time',dest='walltime',help='The wall time to submit to the queue in minutes.',type='int')
options,args = parser.parse_args()
if options.numiters is None:
parser.error('Must define the number of warmup iterations')
if options.process is None:
parser.error('Must define the process to generate')
if options.numnodes is None:
parser.error('Must define the number of nodes to use')
if options.cpus_per_node is None:
parser.error('Must define the number of CPUs per node to use')
if options.evts_per_iter is None:
parser.error('Must define the number of events per warmup iteration')
if options.num_weighted is None:
par
|
ser.error('Must define the number of weighted events to produce')
if options.num_warmup is N
|
one:
parser.error('Must define the number of weighted events to produce in the warmup step.')
if options.alpgen_input_file is None:
parser.error('Must define the AlpGen input file')
if options.walltime is None:
parser.error('Must specify a wall time')
user = os.environ.get('USER','nobody')
if(user == 'apf'): # AutoPyFactory
user= os.environ.get('prodUserID','nobody')
jobID = taskID + '0'
if options.resubmitjobid is not None:
jobID = int(options.resubmitjobid)
TOP_PATH = os.getcwd() # directory in which script was run
RUNPATH = os.path.join(TOP_PATH,str(jobID)) # directory in which to store files
if not os.path.exists(RUNPATH):
os.makedirs(RUNPATH) # make directories recursively like 'mkdir -p'
logger.info('JobID: ' + str(jobID))
if __name__ == '__main__':
main()
|
oVirt/vdsm
|
tests/throttledlog_test.py
|
Python
|
gpl-2.0
| 3,280 | 0 |
#
# Copyright 2016 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Refer to the README and COPYING files for full details of the license
#
from __future__ import absolute_import
from __future__ import division
import logging
from vdsm import throttledlog
from monkeypatch import MonkeyPatch
from testlib import VdsmTestCase
class FakeLogger(object):
def __init__(self, level):
self.level = level
self.messages = []
def isEnabledFor(self, level):
return level >= self.level
def log(self, level, message, *args):
if not self.isEnabledFor(level):
return
self.messages.append(message % args)
class FakeTime(object):
def __init__(self):
self.time = 0.0
def __call__(self):
return self.time
class TestThrottledLogging(VdsmTestCase):
@MonkeyPatch(throttledlog, "_logger", FakeLogger(logging.DEBUG))
def test_throttled_logging(self):
throttledlog.throttle('test', 3)
for i in range(5):
throttledlog.debug('test', "Cycle: %s", i)
|
self.assertEqual(throttledlog._logger.messages,
['Cycle: 0', 'Cycle: 3'])
@MonkeyPatch(throttledlog, "_logger", FakeLogger(logging.INFO))
def test_no_logging(self):
throttledlog.throttle('test', 3)
for i in range(5):
throttledlog.debug('test', "Cycle: %s", i)
self.assertEqual(throttledlog._logger.messages, [])
@
|
MonkeyPatch(throttledlog, "_logger", FakeLogger(logging.DEBUG))
def test_default(self):
throttledlog.throttle('test', 3)
for i in range(5):
throttledlog.debug('other', "Cycle: %s", i)
self.assertEqual(throttledlog._logger.messages,
['Cycle: %s' % (i,) for i in range(5)])
@MonkeyPatch(throttledlog, "_logger", FakeLogger(logging.DEBUG))
@MonkeyPatch(throttledlog, "monotonic_time", FakeTime())
def test_timeout(self):
throttledlog.throttle('test', 10, timeout=7)
for i in range(12):
throttledlog.debug('test', "Cycle: %s", i)
throttledlog.monotonic_time.time += 1.0
self.assertEqual(throttledlog._logger.messages,
['Cycle: %s' % (i,) for i in (0, 7, 10,)])
@MonkeyPatch(throttledlog, "_logger", FakeLogger(logging.WARNING))
def test_logging_warning(self):
throttledlog.throttle('test', 4)
for i in range(7):
throttledlog.warning('test', "Cycle: %s", i)
self.assertEqual(throttledlog._logger.messages,
['Cycle: 0', 'Cycle: 4'])
|
GroupBank/global-server
|
rest_app/decorators.py
|
Python
|
agpl-3.0
| 1,619 | 0.001853 |
import logging
from django.http import HttpResponseBadRequest, HttpResponseForbidden
from functools import wraps
import groupbank_crypto.ec_secp256k1 as crypto
logger = logging.getLogger(__name__)
# decorator for verifying the payload is signed by the author of the request
def verify_author(view):
@wraps(view) # to get features like showing the original function name in trace backs
def wrapper(request):
# https://docs.djangoproject.com/en/1.11/topics/http/middleware/#process-view
# verify the JSON B64 string. return None if it's fine,
# return an HTTPResponse with an error if not
try:
|
author, signature, payload = request.POST['author'], request.POST['signature'], request.POST['payload']
except KeyError:
logger.info('Request with missing author, signature or payload')
return HttpResponseBadRequest()
# get user pubkey
# what if the author CAN'T already be registered? i.e.: group key
# maybe check view_func and ignore a few?
# or let the view itself verify if the author is registered...
# NOTE: This does
|
not verify if the signer is authorized for the operation.
# It only verifies if the signature matches the given pub key
try:
crypto.verify(author, signature, payload)
return view(request)
except (crypto.InvalidSignature, crypto.InvalidKey):
logger.info('Request with invalid author key or signature')
return HttpResponseForbidden()
# or 401 Unauthorized...
return wrapper
|
braysia/CellTK
|
celltk/utils/morphsnakes.py
|
Python
|
mit
| 11,905 | 0.011937 |
# -*- coding: utf-8 -*-
"""
morphsnakes
===========
This is a Python implementation of the algorithms introduced in the paper
Márquez-Neila, P., Baumela, L., Álvarez, L., "A morphological approach
to curvature-based evolution of curves and surfaces". IEEE Transactions
on Pattern Analysis and Machine Intelligence (PAMI), 2013.
This implementation is intended to be as brief, understandable and self-contained
as possible. It does not include any enhancement to make it fast or efficient.
Any practical implementation of this algorithm should work only over the
neighbor pixels of the 0.5-levelset, not over all the embedding function,
and perhaps should feature multi-threading or GPU capabilities.
The classes MorphGAC and MorphACWE provide most of the functionality of this
module. They implement the Morphological Geodesic Active Contours and the
Morphological Active Contours without Edges, respectively. See the
aforementioned paper for full details.
See test.py for examples of usage.
"""
__author__ = "P. Márquez Neila <p.mneila@upm.es>"
from itertools import cycle
import numpy as np
from scipy import ndimage
from scipy.ndimage import binary_dilation, binary_erosion, \
gaussian_filter, gaussian_gradient_magnitude
class fcycle(object):
def __init__(self, iterable):
"""Call functions from the iterable each time it is called."""
self.funcs = cycle(iterable)
def __call__(self, *args, **kwargs):
f = next(self.funcs)
return f(*args, **kwargs)
# SI and IS operators for 2D and 3D.
_P2 = [np.eye(3), np.array([[0,1,0]]*3), np.flipud(np.eye(3)), np.rot90([[0,1,0]]*3)]
_P3 = [np.zeros((3,3,3)) for i in range(9)]
_P3[0][:,:,1] = 1
_P3[1][:,1,:] = 1
_P3[2][1,:,:] = 1
_P3[3][:,[0,1,2],[0,1,2]] = 1
_P3[4][:,[0,1,2],[2,1,0]] = 1
_P3[5][[0,1,2],:,[0,1,2]] = 1
_P3[6][[0,1,2],:,[2,1,0]] = 1
_P3[7][[0,1,2],[0,1,2],:] = 1
_P3[8][[0,1,2],[2,1,0],:] = 1
_aux = np.zeros((0))
def SI(u):
"""SI operator."""
global _aux
if np.ndim(u) == 2:
P = _P2
elif np.ndim(u) == 3:
P = _P3
else:
raise ValueError("u has an invalid number of dimensions (should be 2 or 3)")
if u.shape != _aux.shape[1:]:
_aux = np.zeros((len(P),) + u.shape)
for i in range(len(P)):
_aux[i] = binary_erosion(u, P[i])
return _aux.max(0)
def IS(u):
"""IS operator."""
global _aux
if np.ndim(u) == 2:
P = _P2
elif np.ndim(u) == 3:
P = _P3
else:
raise ValueError("u has an invalid number of dimensions (should be 2 or 3)")
if u.shape != _aux.shape[1:]:
_aux = np.zeros((len(P),) + u.shape)
for i in range(len(P)):
_aux[i] = binary_dilation(u, P[i])
return _aux.min(0)
# SIoIS operator.
SIoIS = lambda u: SI(IS(u))
ISoSI = lambda u: IS(SI(u))
curvop = fcycle([SIoIS, ISoSI])
# Stopping factors (function g(I) in the paper).
def gborders(img, alpha=1.0, sigma=1.0):
"""Stopping criterion for image borders."""
# The norm of the gradient.
gradnorm = gaussian_gradient_magnitude(img, sigma, mode='constant')
return 1.0/np.sqrt(1.0 + alpha*gradnorm)
def glines(img, sigma=1.0):
"""Stopping criterion for image black lines."""
return gaussian_filter(img, sigma)
class MorphACWE(object):
"""Morphological ACWE based on the Chan-Vese energy functional."""
def __init__(self, data, smoothing=1, lambda1=1, lambda2=1):
"""Create a Morphological ACWE solver.
Parameters
----------
data : ndarray
The image data.
smoothing : scalar
The number of repetitions of the smoothing step (the
curv operator) in each iteration. In other terms,
this is the strength of the smoothing. This is the
parameter µ.
lambda1, lambda2 : scalars
Relative importance of the inside pixels (lambda1)
against the outside pixels (lambda2).
"""
self._u = None
self.smoothing = smoothing
self.lambda1 = lambda1
self.lambda2 = lambda2
self.data = data
def set_levelset(self, u):
self._u = np.double(u)
self._u[u>0] = 1
self._u[u<=0] = 0
levelset = property(lambda self: self._u,
set_levelset,
doc="The level set embedding function (u).")
def step(self):
"""Perform a single step of the morphological Chan-Vese evolution."""
# Assign attributes to local variables for convenience.
u = self._u
if u is None:
raise ValueError("the levelset function is not set (use set_levelset)")
|
data = self.data
# Determine c0 and c1.
inside = u>0
outside = u<=0
c0 = data[outside].sum() / float(outside.sum())
c1 = data[inside].sum() / float(inside.sum())
# Image attachment.
dres = np.array(np.gradient(u))
abs_dres = np.abs(dres).sum(0)
#aux = abs_dres * (c0 - c1) * (c0 + c1 - 2*data)
aux = abs_dres * (self.lambda1*(data - c1)**2 - self.lambda2*(data - c0)**2)
|
res = np.copy(u)
res[aux < 0] = 1
res[aux > 0] = 0
# Smoothing.
for i in range(self.smoothing):
res = curvop(res)
self._u = res
def run(self, iterations):
"""Run several iterations of the morphological Chan-Vese method."""
for i in range(iterations):
self.step()
class MorphGAC(object):
"""Morphological GAC based on the Geodesic Active Contours."""
def __init__(self, data, smoothing=1, threshold=0, balloon=0):
"""Create a Morphological GAC solver.
Parameters
----------
data : array-like
The stopping criterion g(I). See functions gborders and glines.
smoothing : scalar
The number of repetitions of the smoothing step in each
iteration. This is the parameter µ.
threshold : scalar
The threshold that determines which areas are affected
by the morphological balloon. This is the parameter θ.
balloon : scalar
The strength of the morphological balloon. This is the parameter ν.
"""
self._u = None
self._v = balloon
self._theta = threshold
self.smoothing = smoothing
self.set_data(data)
def set_levelset(self, u):
self._u = np.double(u)
self._u[u>0] = 1
self._u[u<=0] = 0
def set_balloon(self, v):
self._v = v
self._update_mask()
def set_threshold(self, theta):
self._theta = theta
self._update_mask()
def set_data(self, data):
self._data = data
self._ddata = np.gradient(data)
self._update_mask()
# The structure element for binary dilation and erosion.
self.structure = np.ones((3,)*np.ndim(data))
def _update_mask(self):
"""Pre-compute masks for speed."""
self._threshold_mask = self._data > self._theta
self._threshold_mask_v = self._data > self._theta/np.abs(self._v)
levelset = property(lambda self: self._u,
set_levelset,
doc="The level set embedding function (u).")
data = property(lambda self: self._data,
set_data,
doc="The data that controls the snake evolution (the image or g(I)).")
balloon = property(lambda self: self._v,
set_balloon,
doc="The morphological balloon parameter (ν (nu, not v)).")
threshold = property(lambda self: self._theta,
set_threshold,
doc="The threshold value (θ).")
def step(self):
"""Perform a single step of the morphological snake evolution."""
# Assign attributes to local variables for convenience.
u = self._u
gI = self._data
|
XTAv2/Enigma2
|
lib/python/Screens/SkinSelector.py
|
Python
|
gpl-2.0
| 5,279 | 0.028793 |
# -*- coding: utf-8 -*-
from Screens.Screen import Screen
from Screens.Standby import TryQuitMainloop
from Screens.MessageBox import MessageBox
from Components.ActionMap import NumberActionMap
from Components.Pixmap import Pixmap
from Components.Sources.StaticText import StaticText
from Components.MenuList import MenuList
from Components.config import config, configfile
from Tools.Directories import resolveFilename, SCOPE_ACTIVE_SKIN
from enigma import eEnv, ePicLoad
import os
class SkinSelectorBase:
def __init__(self, session, args = None):
self.skinlist = []
self.previewPath = ""
|
if self.SKINXML and os.path.exists(os.path.join(self.root, self.SKINXML)):
self.skinlist.append(self.DEFAULTSKIN)
if self.PICONSKINXML and os.path.exists(os.path.join(self.root, self.PICONSKINXML)):
self.skinlist.append(self.PICONDEFAULTSKIN)
for root, dirs, files in os.walk(self.root, followlinks=True):
for subdir in dirs:
dir = os.path.join(root,subdir)
if os.path.exi
|
sts(os.path.join(dir,self.SKINXML)):
self.skinlist.append(subdir)
dirs = []
self["key_red"] = StaticText(_("Close"))
self["key_green"] = StaticText(_("Save"))
self["introduction"] = StaticText(_("Press OK to activate the selected skin."))
self["SkinList"] = MenuList(self.skinlist)
self["Preview"] = Pixmap()
self.skinlist.sort()
self["actions"] = NumberActionMap(["SetupActions", "DirectionActions", "TimerEditActions", "ColorActions"],
{
"ok": self.ok,
"cancel": self.close,
"red": self.close,
"green": self.ok,
"up": self.up,
"down": self.down,
"left": self.left,
"right": self.right,
"log": self.info,
}, -1)
self.picload = ePicLoad()
self.picload.PictureData.get().append(self.showPic)
self.onLayoutFinish.append(self.layoutFinished)
def showPic(self, picInfo=""):
ptr = self.picload.getData()
if ptr is not None:
self["Preview"].instance.setPixmap(ptr.__deref__())
self["Preview"].show()
def layoutFinished(self):
self.picload.setPara((self["Preview"].instance.size().width(), self["Preview"].instance.size().height(), 0, 0, 1, 1, "#00000000"))
tmp = self.config.value.find("/"+self.SKINXML)
if tmp != -1:
tmp = self.config.value[:tmp]
idx = 0
for skin in self.skinlist:
if skin == tmp:
break
idx += 1
if idx < len(self.skinlist):
self["SkinList"].moveToIndex(idx)
self.loadPreview()
def ok(self):
if self["SkinList"].getCurrent() == self.DEFAULTSKIN:
skinfile = ""
skinfile = os.path.join(skinfile, self.SKINXML)
elif self["SkinList"].getCurrent() == self.PICONDEFAULTSKIN:
skinfile = ""
skinfile = os.path.join(skinfile, self.PICONSKINXML)
else:
skinfile = self["SkinList"].getCurrent()
skinfile = os.path.join(skinfile, self.SKINXML)
print "Skinselector: Selected Skin: "+self.root+skinfile
self.config.value = skinfile
self.config.save()
configfile.save()
restartbox = self.session.openWithCallback(self.restartGUI,MessageBox,_("GUI needs a restart to apply a new skin\nDo you want to restart the GUI now?"), MessageBox.TYPE_YESNO)
restartbox.setTitle(_("Restart GUI now?"))
def up(self):
self["SkinList"].up()
self.loadPreview()
def down(self):
self["SkinList"].down()
self.loadPreview()
def left(self):
self["SkinList"].pageUp()
self.loadPreview()
def right(self):
self["SkinList"].pageDown()
self.loadPreview()
def info(self):
aboutbox = self.session.open(MessageBox,_("Enigma2 skin selector"), MessageBox.TYPE_INFO)
aboutbox.setTitle(_("About..."))
def loadPreview(self):
if self["SkinList"].getCurrent() == self.DEFAULTSKIN:
pngpath = "."
pngpath = os.path.join(os.path.join(self.root, pngpath), "prev.png")
elif self["SkinList"].getCurrent() == self.PICONDEFAULTSKIN:
pngpath = "."
pngpath = os.path.join(os.path.join(self.root, pngpath), "piconprev.png")
else:
pngpath = self["SkinList"].getCurrent()
try:
pngpath = os.path.join(os.path.join(self.root, pngpath), "prev.png")
except:
pass
if not os.path.exists(pngpath):
pngpath = resolveFilename(SCOPE_ACTIVE_SKIN, "noprev.png")
if self.previewPath != pngpath:
self.previewPath = pngpath
self.picload.startDecode(self.previewPath)
def restartGUI(self, answer):
if answer is True:
self.session.open(TryQuitMainloop, 3)
class SkinSelector(Screen, SkinSelectorBase):
SKINXML = "skin.xml"
DEFAULTSKIN = "< Default >"
PICONSKINXML = None
PICONDEFAULTSKIN = None
skinlist = []
root = os.path.join(eEnv.resolve("${datadir}"),"enigma2")
def __init__(self, session, args = None):
Screen.__init__(self, session)
SkinSelectorBase.__init__(self, args)
Screen.setTitle(self, _("Skin setup"))
self.skinName = "SkinSelector"
self.config = config.skin.primary_skin
class LcdSkinSelector(Screen, SkinSelectorBase):
SKINXML = "skin_display.xml"
DEFAULTSKIN = "< Default >"
PICONSKINXML = "skin_display_picon.xml"
PICONDEFAULTSKIN = "< Default with Picon >"
skinlist = []
root = os.path.join(eEnv.resolve("${datadir}"),"enigma2/display/")
def __init__(self, session, args = None):
Screen.__init__(self, session)
SkinSelectorBase.__init__(self, args)
Screen.setTitle(self, _("Skin setup"))
self.skinName = "SkinSelector"
self.config = config.skin.display_skin
|
eHealthAfrica/rapidsms_textit
|
rapidsms_textit/views.py
|
Python
|
bsd-3-clause
| 3,591 | 0.004456 |
# vim: ai ts=4 sts=4 et sw=4 encoding=utf-8
from __future__ import print_function, unicode_literals
import logging
from django.http import HttpResponse, HttpResponseServerError, HttpResponseBadRequest
from django.conf import settings
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_POST
logger = logging.getLogger('textit.views')
if 'rapidsms' in settings.INSTALLED_APPS:
from rapidsms.router import receive, lookup_connections
else:
print('NOTE: loading test stub for RapidSMS.')
from tests.rapidsms_stub import receive, lookup_connections
@csrf_exempt
@require_POST
def message_received(request, backend_name):
"""Handle HTTP requests from TextIt.
"""
try:
|
backend = settings.INSTALLED_BACKENDS[backend_name]
except KeyError:
logger.error('Name "{}" not found in settings INSTALLED_BACKENDS.'.format(backend_name))
return HttpResponseBa
|
dRequest('Name "{}" not found in settings INSTALLED_BACKENDS.'.format(backend_name))
try:
if request.META['QUERY_STRING'] != backend['config']['query_key']:
r = 'query_key "{}" does not match configured value from django settings "{}"'.format(
request.META['QUERY_STRING'], backend['config']['query_key'])
logger.error(r)
return HttpResponseBadRequest(r)
except KeyError:
logger.error("No query_key set up in settings INSTALLED_BACKENDS[backend_name]")
return HttpResponseBadRequest("No query_key set up in settings INSTALLED_BACKENDS[backend_name]")
post = request.POST
logger.debug("@@ request from TextIt - Decoded data: %r" % post)
try:
post_event = post['event']
except KeyError:
logger.error('No "Event" key in POST request')
return HttpResponseBadRequest("No Event key in POST request")
if post_event == 'mo_sms':
# Must have received a message
logger.debug("@@Got a text message")
try:
fa = post['phone']
from_address = fa[1:] if fa.startswith('+') else fa # strip off the plus sign
text = post['text']
logger.debug("@@Received message from %s: %s" % (from_address, text))
except KeyError:
logger.exception('Malformed POST message')
return HttpResponseBadRequest("Malformed POST message")
try:
# get (or create) a connections object for this backend and from_address
connections = lookup_connections(backend_name, [from_address])
except Exception as e:
r = "Error finding connection for backend_name={}, from={}, err={}".format(
backend_name, from_address, e)
logger.error(r)
return HttpResponseServerError(r)
try:
# pass the message to RapidSMS
receive(text, connections[0])
except Exception as e:
r = "Error receiving message. backend_name={}, from={}, err={}".format(
backend_name, from_address, e)
logger.error(r)
return HttpResponseServerError(r)
# Respond nicely to TextIt
return HttpResponse("OK")
# elif:
if post_event in ['mt_sent', 'mt_dlvd']:
return HttpResponse("thanks") # confirmation messages are ignored
# else:
logger.error("@@No recognized command in request from TextIt")
return HttpResponseBadRequest("Unexpected event code='{}'".format(post_event))
def index(request):
return HttpResponse("Hello, world. You're at the TextIt_test index.")
|
sr-gi/paysense
|
utils/tor/tools.py
|
Python
|
bsd-3-clause
| 2,930 | 0.001706 |
# Copyright (c) <2015> <Sergi Delgado Segura>
# Distributed under the BSD software license, see the accompanying file LICENSE
import pycurl
import stem.process
from stem.control import Controller
from stem.util import term
from StringIO import StringIO
__author__ = 'sdelgado'
SOCKS_PORT = 9050
CONTROL_PORT = 9051
def tor_query(url, method='GET', data=None, headers=None, socks_port=None):
|
""" Performs a http query using tor.
:param url: server address.
:type url: str
:param method: request method (GET, POST, ...).
:type method: str
:param data: data to be sent to the server.
:param data: JSON dumped object
:param head
|
ers: headers of the request.
:type headers: str array
:param socks_port: local socket port where tor is listening to requests (configurable in tor.rc).
:type socks_port: int
:return: response code and some server response data.
:rtype: str, str
"""
output = StringIO()
if socks_port is None:
socks_port = SOCKS_PORT
query = pycurl.Curl()
query.setopt(pycurl.URL, url)
query.setopt(pycurl.PROXY, 'localhost')
query.setopt(pycurl.PROXYPORT, socks_port)
query.setopt(pycurl.PROXYTYPE, pycurl.PROXYTYPE_SOCKS5_HOSTNAME)
query.setopt(pycurl.WRITEFUNCTION, output.write)
if method == 'POST':
if data is None or headers is None:
return "Not enough parameters for POST"
else:
query.setopt(pycurl.HTTPHEADER, headers)
query.setopt(pycurl.POST, 1)
query.setopt(pycurl.POSTFIELDS, data)
try:
query.perform()
r_code = query.getinfo(pycurl.HTTP_CODE)
return r_code, output.getvalue()
except pycurl.error:
return 500, "Unable to reach " + url
def print_bootstrap_lines(line):
""" Print the bootstrap lines.
:param line: line to be printed.
:type line: str
:return: None.
"""
if "Bootstrapped " in line:
print(term.format(line, term.Color.BLUE))
def init_tor(socks_port=None, control_port=None):
""" Initiates a tor connection.
:param socks_port: local port socket where tor will listen to requests (configurable in tor.rc).
:type socks_port: int
:param control_port: local port where tor will listen to control requests (configurable in tor.rc).
:type control_port: int
:return: a tor process and a controller of the process.
:rtype: process, controller
"""
if socks_port is None:
socks_port = SOCKS_PORT
if control_port is None:
control_port = CONTROL_PORT
process = stem.process.launch_tor_with_config(
config={
'SocksPort': str(socks_port),
'ControlPort': str(control_port)
},
init_msg_handler=print_bootstrap_lines, timeout=60, take_ownership=True)
controller = Controller.from_port()
controller.authenticate()
return process, controller
|
SpoonITCurrency/SpoonITCoin
|
spoonitcoin/share/qt/clean_mac_info_plist.py
|
Python
|
mit
| 897 | 0.016722 |
#!/usr/bin/env python
# Jonas Schnelli, 2013
# make sure the Litecoin-Qt.app contains the right plist (including the right version)
# fix made because of serval bugs in Qt mac deployment (https://bugreports.qt-project.org/browse/QTBUG-21267)
from
|
string import Template
from datetime import date
bitcoinDir = "./";
inFile = bitcoinDir+"/share/qt/Info.plist"
outFile = "Litecoin-Qt.app/Contents/Info.plist"
version = "unknown";
fileForGrabbingVersion = bitcoinDir+"bitcoin-qt.pro"
for l
|
ine in open(fileForGrabbingVersion):
lineArr = line.replace(" ", "").split("=");
if lineArr[0].startswith("VERSION"):
version = lineArr[1].replace("\n", "");
fIn = open(inFile, "r")
fileContent = fIn.read()
s = Template(fileContent)
newFileContent = s.substitute(VERSION=version,YEAR=date.today().year)
fOut = open(outFile, "w");
fOut.write(newFileContent);
print "Info.plist fresh created"
|
Grumbel/dirtool
|
experiments/qnotify/qnotify.py
|
Python
|
gpl-3.0
| 1,673 | 0 |
#!/usr/bin/env python3
# dirtool.py - diff tool for directories
# Copyright (C) 2018 Ingo Ruhnke <grumbel@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of t
|
he GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public Licen
|
se for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import signal
import sys
from PyQt5.QtCore import QCoreApplication, QFileSystemWatcher
def directory_changed(path):
print("directory_changed: {}".format(path))
def file_changed(path):
print("file_changed: {}".format(path))
def main(argv):
signal.signal(signal.SIGINT, signal.SIG_DFL)
app = QCoreApplication([])
watcher = QFileSystemWatcher()
print("Watching /tmp/")
watcher.addPath("/tmp/")
watcher.addPath("/tmp/foo")
# Files have to be watched specifically for this to trigger.
# Deleting and recreating a file makes this no longer trigger.
watcher.fileChanged.connect(file_changed)
# This triggers on file creation and deletion
watcher.directoryChanged.connect(directory_changed)
print("files:", watcher.files())
print("directories:", watcher.directories())
sys.exit(app.exec())
if __name__ == "__main__":
main(sys.argv)
# EOF #
|
ifduyue/sentry
|
tests/sentry/middleware/test_useractive.py
|
Python
|
bsd-3-clause
| 912 | 0 |
from __future__ import absolute_import
from datetime import timedelta
from django.utils import timezone
from django.test import RequestFactory
from exam import fixture
from sentry.middleware.user import UserActiveMiddleware
from sentry.testutils import TestCase
class UserActiveMiddlewareTest(TestCase)
|
:
middleware = fixture(UserActiveMiddleware)
factory = fixture(RequestFactory)
def test_simple(self):
self.view = lambda x: None
user = self.user
req = self.factory.get('/')
req.user = user
resp = self.middleware.process_view(req, self.view, [], {})
assert resp is None
assert timezone.now() - user.last_active < timedelta(minutes=1)
|
user.last_active = None
resp = self.middleware.process_view(req, self.view, [], {})
assert resp is None
assert timezone.now() - user.last_active < timedelta(minutes=1)
|
Acehaidrey/incubator-airflow
|
airflow/providers/google/suite/transfers/sql_to_sheets.py
|
Python
|
apache-2.0
| 5,101 | 0.00196 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import datetime
import numbers
from contextlib import closing
from typing import Any, Iterable, Mapping, Optional, Sequence, Union
from airflow.operators.sql import BaseSQLOperator
from airflow.providers.google.suite.hooks.sheets import GSheetsHook
class SQLToGoogleSheetsOperator(BaseSQLOperator):
"""
Copy data from SQL results to provided Google Spreadsheet.
:param sql: The SQL to execute.
:param spreadsheet_id: The Google Sheet ID to interact with.
:param conn_id: the connection ID used to connect to the database.
:param parameters: The parameters to render the SQL query with.
:param database: name of database which overwrite the defined one in connection
:param spreadsheet_range: The A1 notation of the values to retrieve.
:param gcp_conn_id: The connection ID to use when fetching connection info.
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields: Sequence[str] = (
"sql",
"spreadsheet_id",
"spreadsheet_range",
"impersonation_chain",
)
template_fields_renderers = {"sql": "sql"}
template_ext: Sequence[str] = (".sql",)
ui_color = "#a0e08c"
def __init__(
self,
*,
sql: str,
spreadsheet_id: str,
sql_conn_id: str,
parameters: Optional[Union[Mapping, Iterable]] = None,
database: Optional[str] = None,
spreadsheet_range: str = "Sheet1",
gcp_conn_id: str = "google_cloud_default",
delegate_to: Optional[str] = None,
impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.sql = sql
self.conn_id = sql_conn_id
self.database = database
self.parameters = parameters
self.gcp_conn_id = gcp_conn_id
self.spreadsheet_id = spreadsheet_id
self.spreadsheet_range = spreadshe
|
et_range
self.delegate_to = delegate_to
self.impersonation_chain = impersonation_chain
def _data_prep(self, data):
for row in data:
item_list = []
for item in row:
if isinstance(item, (datetime.date, datetime.datetime)):
item = item.isoformat()
elif isinstance(item, int): # To exclude int from the number check.
|
pass
elif isinstance(item, numbers.Number):
item = float(item)
item_list.append(item)
yield item_list
def _get_data(self):
hook = self.get_db_hook()
with closing(hook.get_conn()) as conn, closing(conn.cursor()) as cur:
self.log.info("Executing query")
cur.execute(self.sql, self.parameters or ())
yield [field[0] for field in cur.description]
yield from self._data_prep(cur.fetchall())
def execute(self, context: Any) -> None:
self.log.info("Getting data")
values = list(self._get_data())
self.log.info("Connecting to Google")
sheet_hook = GSheetsHook(
gcp_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to,
impersonation_chain=self.impersonation_chain,
)
self.log.info(f"Uploading data to https://docs.google.com/spreadsheets/d/{self.spreadsheet_id}")
sheet_hook.update_values(
spreadsheet_id=self.spreadsheet_id,
range_=self.spreadsheet_range,
values=values,
)
|
hirolovesbeer/tfs
|
tfs.py
|
Python
|
apache-2.0
| 1,111 | 0.0027 |
#!/usr/bin/env python
import sys
import glob
import hdfs3
from hdfs3 import HDFileSystem
hdfs_nn = '192.168.33.10'
hdfs = HDFileSystem(host=hdfs_nn, port=8020)
class TransparentFileSystem:
def __init__(self):
self.hdfs_flag = False
return
def set_hdfs_flag(self, flag=True):
self.hdfs_flag = flag
def exists(self, target):
if hdfs.exists(target) is True:
print target + ' This dir is HDFS.'
|
self.hdfs_flag = True
else:
print target + ' This dir is not HDFS. Local FS.'
# if os.path.exists('')
def glob(self, target):
if self.hdfs_flag is True:
return hdfs.glob(target)
else:
return glob.glob(target)
if __name__ == "__main__":
tfs_hdfs = TransparentFileSystem()
tfs_hdfs.exists('/t
|
mp')
print tfs_hdfs.hdfs_flag
print tfs_hdfs.glob('/tmp')
tfs_local = TransparentFileSystem()
tfs_local.exists('dir to local')
tfs_local.set_hdfs_flag(False)
print tfs_local.hdfs_flag
print tfs_local.glob('dir to local')
sys.exit(0)
|
syrusakbary/Flask-SuperAdmin
|
flask_superadmin/tests/test_mongoengine.py
|
Python
|
bsd-3-clause
| 9,707 | 0.001751 |
from nose.tools import eq_, ok_, raises
import wtforms
from flask import Flask
from mongoengine import *
from flask_superadmin import Admin
from flask_superadmin.model.backends.mongoengine.view import ModelAdmin
class CustomModelView(ModelAdmin):
def __init__(self, model, name=None, category=None, endpoint=None,
url=None, **kwargs):
for k, v in kwargs.iteritems():
setattr(self, k, v)
super(CustomModelView, self).__init__(model, name, category, endpoint,
url)
def setup():
connect('superadmin_test')
app = Flask(__name__)
app.config['SECRET_KEY'] = '1'
app.config['WTF_CSRF_ENABLED'] = False
admin = Admin(app)
return app, admin
def test_model():
app, admin = setup()
class Person(Document):
name = StringField()
age = IntField()
Person.drop_collection()
view = CustomModelView(Person)
admin.add_view(view)
eq_(view.model, Person)
eq_(view.name, 'Person')
eq_(view.endpoint, 'person')
eq_(view.url, '/admin/person')
# Verify form
with app.test_request_context():
Form = view.get_form()
ok_(isinstance(Form()._fields['name'], wtforms.TextAreaField))
ok_(isinstance(Form()._fields['age'], wtforms.IntegerField))
# Make some test clients
client = app.test_client()
resp = client.get('/admin/person/')
eq_(resp.status_code, 200)
resp = client.get('/admin/person/add/')
eq_(resp.status_code, 200)
resp = client.post('/admin/person/add/',
data=dict(name='name', age='18'))
eq_(resp.status_code, 302)
person = Person.objects.first()
eq_(person.name, 'name')
eq_(person.age, 18)
resp = client.get('/admin/person/')
eq_(resp.status_code, 200)
ok_(str(person.pk) in resp.data)
resp = client.get('/admin/person/%s/' % person.pk)
eq_(resp.status_code, 200)
resp = client.post('/admin/person/%s/' % person.pk, data=dict(name='changed'))
eq_(resp.status_code, 302)
person = Person.objects.first()
eq_(person.name, 'changed')
eq_(person.age, 18)
resp = client.post('/admin/person/%s/delete/' % person.pk)
eq_(resp.status_code, 200)
eq_(Person.objects.count(), 1)
resp = client.post('/admin/person/%s/delete/' % person.pk, data={'confirm_delete': True})
eq_(resp.status_code, 302)
eq_(Person.objects.count(), 0)
def test_list_display():
app, admin = setup()
class Person(Document):
name = StringField()
age = IntField()
Person.drop_collection()
view = CustomModelView(Person, list_display=('name', 'age'))
admin.add_view(view)
eq_(len(view.list_display), 2)
client = app.test_client()
resp = client.get('/admin/person/')
ok_('Name' in resp.data)
ok_('Age' in resp.data)
resp = client.post('/admin/person/add/',
data=dict(name='Steve', age='18'))
eq_(resp.status_code, 302)
resp = client.get('/admin/person/')
ok_('Steve' in resp.data)
ok_('18' in resp.data)
def test_exclude():
app, admin = setup()
class Person(Document):
name = StringField()
age = IntField()
Person.drop_collection()
view = CustomModelView(Person, exclude=['name'])
admin.add_view(view)
# Verify form
with app.test_request_context():
Form = view.get_form()
eq_(Form()._fields.keys(), ['csrf_token', 'age'])
def test_fields():
app, admin = setup()
class Person(Document):
name = StringField()
age = IntField()
Person.drop_collection()
view = CustomModelView(Person, fields=['name'])
admin.add_view(view)
# Verify form
with app.test_request_context():
Form = view.get_form()
eq_(Form()._fields.keys(), ['csrf_token', 'name'])
def test_fields_and_exclude():
app, admin = setup()
class Person(Document):
name = StringField()
age = IntField()
Person.drop_collection()
view = CustomModelView(Person, fields=['name', 'age'], exclude=['name'])
admin.add_view(view)
# Verify form
with app.test_request_context():
Form = view.get_form()
eq_(Form()._fields.keys(), ['csrf_token', 'age'])
def test_search_fields():
app, admin = setup()
class Person(Document):
name = StringField()
age = IntField()
Person.drop_collection()
Person.objects.create(name='John', age=18)
Person.objects.create(name='Michael', age=21)
view = CustomModelView(Person, list_display=['name'],
search_fields=['name'])
admin.add_view(view)
eq_(len(view.search_fields), 1)
client = app.test_client()
resp = client.get('/admin/person/')
ok_('name="q" class="search-input"' in resp.data)
ok_('John' in resp.data)
ok_('Michael' in resp.data)
resp = client.get('/admin/person/?q=john')
ok_('John' in resp.data)
ok_('Michael' not in resp.data)
def test_pagination():
app, admin = setup()
class Person(Document):
name = StringField()
age = IntField()
Person.drop_collection()
Person.objects.create(name='John', age=18)
Person.objects.create(name='Michael', age=21)
Person.objects.create(name='Steve', age=15)
Person.objects.create(name='Ron', age=59)
view = CustomModelView(Person, list_per_page=2,
list_display=['name', 'age'])
admin.add_view(view)
client = app.test_client()
resp = client.get('/admin/person/')
ok_('<div class="total-count">Total count: 4</div>' in resp.data)
ok_('<a href="#">1</a>' in resp.data) # make sure the first page is active (i.e. has no url)
ok_('John' in resp.data)
ok_('Michael' in resp.data)
ok_('Steve' not in resp.data)
ok_('Ron' not in resp.data)
# default page == page 0
eq_(resp.data, client.get('/admin/person/?page=0').data)
resp = client.get('/admin/person/?page=1')
ok_('John' not in resp.data)
ok_('Michael' not in resp.data)
ok_('Steve' in resp.data)
ok_('Ron' in resp.data)
def test_sort():
app, admin = setup()
class Person(Document):
name = StringField()
age = IntField()
Person.drop_collection()
Person.objects.create(name='John', age=18)
Person.objects.create(name='Michael', age=21)
Person.objects.create(name='Steve', age=15)
Person.objects.create(name='Ron', age=59)
view = CustomModelView(Person, list_per_page=2,
list_display=['name', 'age'])
admin.add_view(view)
client = app.test_client()
resp = client.get('/admin/person/?sort=name')
ok_('John' in resp.data)
ok_('Michael' in resp.data)
ok_('Ron' not in resp.data)
ok_('Steve' not in resp.data)
resp = client.get('/admin/person/?sort=-name')
ok_('John' not in resp.data)
ok_('Michael' not in resp.data)
ok_('Ron' in resp.data)
ok_('Steve' in resp.data)
resp = client.get('/admin/person/?sort=age')
ok_('John' in resp.data)
ok_('Michael' not in resp.data)
ok_('Ron' not in resp.data)
ok_('Steve' in resp.data)
resp = client.get('/admin/person/?sort=-age')
ok_('John' not in resp.data)
ok_('Michael' in resp.data)
ok_('Ron' in resp.data)
ok_('Steve' not in resp.data)
def test_reference_linking():
app, admin = setup()
class Dog(Document):
name = StringField()
def __unicode__(self):
return self.name
class Person(Document):
name = StringField()
age = IntField()
pet = ReferenceField(Dog)
class DogAdmin(ModelAdmin):
pass
class PersonAdmin(ModelAdmin):
list_display = ('name', 'age', 'pet')
|
fields = ('name', 'age', 'pet')
readonly_fields = ('pet',)
Dog.drop_collection()
Person.drop_collect
|
ion()
dog = Dog.objects.create(name='Sparky')
person = Person.objects.create(name='Stan', age=10, pet=dog)
admin.register(Dog, DogAdmin, name='Dogs')
admin.register(Person, PersonAdmin, name='People')
client = app.test_client()
# te
|
zenoss/pywbem
|
attic/irecv/pycimlistener.py
|
Python
|
lgpl-2.1
| 6,068 | 0.012525 |
#!/usr/bin/python
#
# Simple indication receiver using Twisted Python. HTTP post requests
# are listened for on port 5988 and port 5899 using SSL.
#
# Requires Twisted Python and
#
import sys
import optparse
import pywbem
from twisted.internet import reactor
from twisted.web import server, resource
global conn
conn=None
class WBEMConn:
_shared_state = {}
conn = None
def __init__(self, options=None):
# Borgness
self.__dict__ = WBEMConn._shared_state
self.conn = pywbem.SFCBUDSConnection()
'''
if options:
proto = 'http'
if options.secure:
|
proto = 'https'
url = '%s://%s' % (proto, options.host)
self.conn = pywbem.WBEMConnection(
url,
(options.user, options.password),
default_namespace = options.name
|
space)
'''
global conn
conn = self.conn
class CIMOM(resource.Resource):
isLeaf = 1
def render_POST(self, request):
for line in request.content.readlines():
print(line)
return ''
from OpenSSL import SSL
class ServerContextFactory:
def getContext(self):
"""Create an SSL context with a dodgy certificate."""
ctx = SSL.Context(SSL.SSLv23_METHOD)
ctx.use_certificate_file('server.pem')
ctx.use_privatekey_file('server.pem')
return ctx
from twisted.internet import ssl, reactor
from twisted.python import log
from socket import getfqdn
import time
def _createFilter(query,
ns,
querylang='WQL',
src_ns='root/cimv2',
in_name=None):
name = in_name or 'cimfilter%s'%time.time()
filterinst=pywbem.CIMInstance('CIM_IndicationFilter')
filterinst['CreationClassName']='CIM_IndicationFilter'
filterinst['SystemCreationClassName']='CIM_ComputerSystem'
filterinst['SystemName']=getfqdn()
filterinst['Name']=name
filterinst['Query']=query
filterinst['QueryLanguage']=querylang
filterinst['SourceNamespace']=src_ns
cop = pywbem.CIMInstanceName('CIM_IndicationFilter')
cop.keybindings = { 'CreationClassName':'CIM_IndicationFilter',
'SystemClassName':'CIM_ComputerSystem',
'SystemName':getfqdn(),
'Name':name }
cop.namespace=ns
filterinst.path = cop
filtercop = conn.CreateInstance(filterinst)
return filtercop
def _createDest(destination,
ns,
in_name=None):
name = in_name or 'cimlistener%s'%time.time()
destinst=pywbem.CIMInstance('CIM_ListenerDestinationCIMXML')
destinst['CreationClassName']='CIM_ListenerDestinationCIMXML'
destinst['SystemCreationClassName']='CIM_ComputerSystem'
destinst['SystemName']=getfqdn()
print("destname=%s" % name)
destinst['Name']=name
destinst['Destination']=destination
cop = pywbem.CIMInstanceName('CIM_ListenerDestinationCIMXML')
cop.keybindings = { 'CreationClassName':'CIM_ListenerDestinationCIMXML',
'SystemClassName':'CIM_ComputerSystem',
'SystemName':getfqdn(),
'Name':name }
cop.namespace=ns
destinst.path = cop
destcop = conn.CreateInstance(destinst)
return destcop
def _createSubscription(ns,
handler,
indfilter):
subinst=pywbem.CIMInstance('CIM_IndicationSubscription')
subinst['Filter']=indfilter
subinst['Handler']=indhandler
cop = pywbem.CIMInstanceName('CIM_IndicationSubscription')
cop.keybindings = { 'Filter':indfilter,
'Handler':indhandler }
cop.namespace=ns
subinst.path = cop
subcop = conn.CreateInstance(subinst)
return subcop
if __name__ == '__main__':
global conn
parser = optparse.OptionParser()
parser.add_option('--level',
'-l',
action='store',
type='int',
dest='dbglevel',
help='Indicate the level of debugging statements to display (default=2)',
default=2)
parser.add_option('-s', '--UDS', help="Use the SFCBUDSConnection to the cimom", default=False )
parser.add_option('-u', '--url', default='https://localhost',
help='Specify the url of the CIMOM (default=https://localhost)')
parser.add_option('-n', '--namespace', default='root/interop',
help='Specify the namespace the test runs against (default=root/interop)')
parser.add_option('', '--user', default='pegasus',
help='Specify the user name used when connection to the CIMOM (default=pegasus)')
parser.add_option('', '--password', default='',
help='Specify the password for the user (default=<empty>)')
parser.add_option('--verbose', '', action='store_true', default=False,
help='Show verbose output')
parser.add_option('-q', '--query', help='Query string for Filter')
parser.add_option('-g', '--qlang', help='Query Language (default=WQL)', default="WQL")
parser.add_option('-d', '--dest', help='Destination for the CIM_ListenerDestination')
parser.add_option('-p', '--provider', help='Name of provider to setup listener for')
options, arguments = parser.parse_args()
conn = WBEMConn().conn
indhandler=None
indfilter=None
indsub=None
try:
indhandler = _createDest(options.dest, options.namespace)
indfilter = _createFilter(options.query, options.namespace, querylang=options.qlang)
indsub = _createSubscription(options.namespace, indhandler, indfilter)
log.startLogging(sys.stdout)
site = server.Site(CIMOM())
reactor.listenTCP(5998, site)
reactor.listenSSL(5999, site, ServerContextFactory())
reactor.run()
finally:
if indsub:
conn.DeleteInstance(indsub)
if indfilter:
conn.DeleteInstance(indfilter)
if indhandler:
conn.DeleteInstance(indhandler)
|
singingwolfboy/flask-dance
|
tests/contrib/test_github.py
|
Python
|
mit
| 2,744 | 0.001093 |
import pytest
import responses
from urlobject import URLObject
from flask import Flask
from flask_dance.contrib.github import make_github_blueprint, github
from flask_
|
dance.consumer import OAuth2ConsumerBlueprint
from flask_dance.consumer.storage import MemoryStorage
@pytest.fixture
def make_app():
"A callable to create a Flask app with the GitHub provider"
def _make_app(*args, **kwargs):
app = Flask(__name__)
app.secret_key = "whatever"
blueprint = m
|
ake_github_blueprint(*args, **kwargs)
app.register_blueprint(blueprint)
return app
return _make_app
def test_blueprint_factory():
github_bp = make_github_blueprint(
client_id="foo", client_secret="bar", scope="user:email", redirect_to="index"
)
assert isinstance(github_bp, OAuth2ConsumerBlueprint)
assert github_bp.session.scope == "user:email"
assert github_bp.session.base_url == "https://api.github.com/"
assert github_bp.session.client_id == "foo"
assert github_bp.client_secret == "bar"
assert github_bp.authorization_url == "https://github.com/login/oauth/authorize"
assert github_bp.token_url == "https://github.com/login/oauth/access_token"
def test_load_from_config(make_app):
app = make_app()
app.config["GITHUB_OAUTH_CLIENT_ID"] = "foo"
app.config["GITHUB_OAUTH_CLIENT_SECRET"] = "bar"
resp = app.test_client().get("/github")
url = resp.headers["Location"]
client_id = URLObject(url).query.dict.get("client_id")
assert client_id == "foo"
@responses.activate
def test_context_local(make_app):
responses.add(responses.GET, "https://google.com")
# set up two apps with two different set of auth tokens
app1 = make_app(
"foo1",
"bar1",
redirect_to="url1",
storage=MemoryStorage({"access_token": "app1"}),
)
app2 = make_app(
"foo2",
"bar2",
redirect_to="url2",
storage=MemoryStorage({"access_token": "app2"}),
)
# outside of a request context, referencing functions on the `github` object
# will raise an exception
with pytest.raises(RuntimeError):
github.get("https://google.com")
# inside of a request context, `github` should be a proxy to the correct
# blueprint session
with app1.test_request_context("/"):
app1.preprocess_request()
github.get("https://google.com")
request = responses.calls[0].request
assert request.headers["Authorization"] == "Bearer app1"
with app2.test_request_context("/"):
app2.preprocess_request()
github.get("https://google.com")
request = responses.calls[1].request
assert request.headers["Authorization"] == "Bearer app2"
|
south-coast-science/scs_dfe_eng
|
src/scs_dfe/particulate/sps_30/sps_30.py
|
Python
|
mit
| 8,991 | 0.008008 |
"""
Created on 1 May 2019
@author: Bruno Beloff (bruno.beloff@southcoastscience.com)
https://www.sensirion.com/en/environmental-sensors/particulate-matter-sensors-pm25/
https://bytes.com/topic/python/answers/171354-struct-ieee-754-internal-representation
Firmware report:
89667EE8A8B34BC0
"""
import time
from scs_core.data.datetime import LocalizedDatetime
from scs_core.data.datum import Decode, Encode
from scs_core.particulate.sps_datum import SPSDatum, SPSDatumCounts
from scs_dfe.particulate.opc import OPC
from scs_host.bus.i2c import I2C
# --------------------------------------------------------------------------------------------------------------------
class SPS30(OPC):
"""
classdocs
"""
SOURCE = 'S30'
MIN_SAMPLE_PERIOD = 1.0 # seconds
MAX_SAMPLE_PERIOD = 10.0 # seconds
DEFAULT_SAMPLE_PERIOD = 10.0 # seconds
DEFAULT_ADDR = 0x69
# ----------------------------------------------------------------------------------------------------------------
__BOOT_TIME = 4.0 # seconds
__POWER_CYCLE_TIME = 2.0 # seconds
__FAN_START_TIME = 2.0 # seconds
__FAN_STOP_TIME = 2.0 # seconds
__CLEANING_TIME = 10.0 # seconds
__MAX_PERMITTED_ZERO_READINGS = 4
__CMD_START_MEASUREMENT = 0x0010
__CMD_STOP_MEASUREMENT = 0x0104
__CMD_READ_DATA_READY_FLAG = 0x0202
__CMD_READ_MEASURED_VALUES = 0x0300
__CMD_AUTO_CLEANING_INTERVAL = 0x8004
__CMD_START_FAN_CLEANING = 0x5607
__CMD_READ_ARTICLE_CODE = 0xd025
__CMD_READ_SERIAL_NUMBER = 0xd033
__CMD_RESET = 0xd304
__POST_WRITE_DELAY = 0.020 # seconds
__LOCK_TIMEOUT = 2.0
# ----------------------------------------------------------------------------------------------------------------
@classmethod
def source(cls):
return cls.SOURCE
@classmethod
def uses_spi(cls):
return False
@classmethod
def datum_class(cls):
return SPSDatum
# ----------------------------------------------------------------------------------------------------------------
@classmethod
def __decode(cls, chars):
decoded = []
for i in range(0, len(chars), 3):
group = chars[i:i + 2]
decoded.extend(group)
actual_crc = chars[i + 2]
required_crc = cls.__crc(group)
if actual_crc != required_crc:
raise ValueError("bad checksum: required: 0x%02x actual: 0x%02x" % (required_crc, actual_crc))
return decoded
@classmethod
def __encode(cls, chars):
encoded = []
for i in range(0, len(chars), 2):
group = chars[i:i + 2]
encoded.extend(group)
encoded.append(cls.__crc(group))
return encoded
@staticmethod
def __crc(data):
crc = 0xff
for datum in data:
crc ^= datum
for bit in range(8, 0, -1):
crc = ((crc << 1) ^ 0x31 if crc & 0x80 else (crc << 1)) & 0xff
return crc
# ----------------------------------------------------------------------------------------------------------------
@classmethod
def lock_timeout(cls):
return cls.__LOCK_TIMEOUT
@classmethod
def boot_time(cls):
return cls.__BOOT_TIME
@classmethod
def power_cycle_time(cls):
return cls.__POWER_CYCLE_TIME
@classmethod
def max_permitted_zero_readings(cls):
return cls.__MAX_PERMITTED_ZERO_READINGS
# ----------------------------------------------------------------------------------------------------------------
def __init__(self, interface, i2c_bus, i2c_addr):
"""
Constructor
"""
super().__init__(interface)
self.__i2c_bus = i2c_bus
self.__i2c_addr = i2c_addr
# ----------------------------------------------------------------------------------------------------------------
def operations_on(self):
self.__write(self.__CMD_START_MEASUREMENT, self.__FAN_START_TIME, 0x03, 0x00)
def operations_off(self):
self.__read(self.__CMD_STOP_MEASUREMENT, self.__FAN_STOP_TIME)
def reset(self):
self.__read(self.__CMD_RESET, self.__BOOT_TIME)
# ----------------------------------------------------------------------------------------------------------------
def clean(self):
self.__read(self.__CMD_START_FAN_CLEANING, self.__CLEANING_TIME)
@property
def cleaning_interval(self):
r = self.__read(self.__CMD_AUTO_CLEANING_INTERVAL, 0, 6)
interval = Decode.unsigned_long(r[0:4], '>')
return interval
@cleaning_interval.setter
def cleaning_interval(self, interval):
values = Encode.unsigned_long(interval, '>')
self.__write(self.__CMD_AUTO_CLEANING_INTERVAL, self.__POST_WRITE_DELAY, *values)
# ----------------------------------------------------------------------------------------------------------------
def data_ready(self):
chars = self.__read(self.__CMD_READ_DATA_READY_FLAG, 0, 3)
return chars[1] == 0x01
def sample(self):
r = self.__read(self.__CMD_READ_MEASURED_VALUES, 0, 60)
# density...
pm1 = Decode.float(r[0:4], '>')
pm2p5 = Decode.float(r[4:8], '>')
pm4 = Decode.float(r[8:12], '>')
pm10 = Decode.float(r[12:16], '>')
# count...
pm0p5_count = Decode.float(r[16:20], '>')
pm1_count = Decode.float(r[20:24], '>')
pm2p5_count = Decode.float(r[24:28], '>')
pm4_count = Decode.float(r[28:32], '>')
pm10_count = Decode.float(r[32:36], '>')
# typical size...
tps = Decode.float(r[36:40], '>')
# time...
rec = LocalizedDatetime.now().utc()
# report...
|
counts = SPSDatumCounts(pm0p5_count, pm1_count, pm2p5_count, pm4_count, pm10_count)
return SPSDatum(self.SOURCE, rec, pm1, pm2p5, pm4, pm10, counts, tps)
# ----------------------------------------------------------------------------------------------------------------
def version(self):
r = self.__read(self.__CMD_READ_ARTICLE_CODE, 0, 48)
version =
|
''.join(chr(byte) for byte in r)
return version
def serial_no(self):
r = self.__read(self.__CMD_READ_SERIAL_NUMBER, 0, 48)
serial_no = ''.join(chr(byte) for byte in r)
return serial_no
def firmware(self):
return self.serial_no()
# ----------------------------------------------------------------------------------------------------------------
def get_firmware_conf(self):
raise NotImplementedError
def set_firmware_conf(self, jdict):
raise NotImplementedError
def commit_firmware_conf(self):
raise NotImplementedError
# ----------------------------------------------------------------------------------------------------------------
@property
def bus(self):
return self.__i2c_bus
@property
def address(self):
return self.__i2c_addr
# ----------------------------------------------------------------------------------------------------------------
@property
def lock_name(self):
return self.__class__.__name__ + '-' + str(self.__i2c_bus) + '-' + ("0x%02x" % self.__i2c_addr)
# ----------------------------------------------------------------------------------------------------------------
def __read(self, command, wait, count=0):
try:
self.obtain_lock()
try:
I2C.Sensors.start_tx(self.__i2c_addr)
encoded = I2C.Sensors.read_cmd16(command, count)
values = self.__decode(encoded)
finally:
I2C.Sensors.end_tx()
|
pdebuyl-lab/RMPCDMD
|
experiments/03-single-janus/plot_planar.py
|
Python
|
bsd-3-clause
| 1,110 | 0.00991 |
#!/usr/bin/env python
"""
Display the planar concentration and velocity fields of a RMPCDMD simulation.
"""
import argparse
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('file', help="H5MD file")
parser.add_argument('--species', type=int, default=0)
args = parser.parse_args()
import h5py
import matplotlib.pyplot as plt
import numpy as np
with h5py.File(args.file, 'r') as f:
c = f['fields/planar_concentration']
v = f['fields/planar_velocity']
x_min = c.attrs['x_min'][()]
dx = c.attrs['dx'][()]
y_min = c.attrs['y_min'][()]
dy = c.attrs[
|
'dy'][()]
thickness = c.attrs['thickness'][()]
c = c[:]
v = v[:]
N_x, N_y = c.shape[:2]
# x and y must overshoot c.shape by one for pcolormesh
x = x_min + np.arange(N_x+1)*dx
y = y_min + np.arange(N_y+1)*dy
c /= dx*dy*thickness
plt.subplot(121, aspect=1)
plt.pcolormesh(x, y, c[:,:,args.species].T, cmap=plt.cm.viridis)
plt.colorbar
|
()
plt.subplot(122, aspect=1)
x, y = np.meshgrid(x[:-1], y[:-1])
plt.quiver(x, y, v[:,:,args.species,0].T, v[:,:,args.species,1].T)
plt.show()
|
veltzer/demos-python
|
src/examples/short/pandas/tuples_as_indices.py
|
Python
|
gpl-3.0
| 308 | 0.003247 |
#!/usr/bin/env python
"""
A basic demo of pand
|
as
"""
from pandas import DataFrame
df = DataFrame(["a
|
", "b", "c"], index=[("0", "1"), ("1", "2"), ("2", "3")])
print(df.get_values())
try:
print(df.ix[("0", "1")])
except:
print("yes, accessing .ix with tuple does not work")
print(df.xs(("0", "1")))
|
sileht/pastamaker
|
pastamaker/gh_pr_fullifier.py
|
Python
|
apache-2.0
| 9,306 | 0 |
# -*- encoding: utf-8 -*-
#
# Copyright © 2017 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import logging
import time
import github
import requests
import six.moves
LOG = logging.getLogger(__name__)
TRAVIS_BASE_URL = 'https://api.travis-ci.org'
TRAVIS_V2_HEADERS = {"Accept": "application/vnd.travis-ci.2+json",
"User-Agent": "Pastamaker/1.0.0"}
UNUSABLE_STATES = ["unknown", None]
def ensure_mergable_state(pull):
if pull.is_merged() or pull.mergeable_state not in UNUSABLE_STATES:
return pull
# Github is currently processing this PR, we wait the completion
for i in range(0, 5):
LOG.info("%s, refreshing...", pull.pretty())
pull.update()
if pull.is_merged() or pull.mergeable_state not in UNUSABLE_STATES:
break
time.sleep(0.42) # you known, this one always work
return pull
def compute_travis_detail(pull, **extra):
if (not pull.pastamaker["travis_url"] or
pull.pastamaker["travis_url"] == "#"):
return None
build_id = pull.pastamaker["travis_url"].split("?")[0].split("/")[-1]
r = requests.get(TRAVIS_BASE_URL + "/builds/" + build_id,
headers=TRAVIS_V2_HEADERS)
if r.status_code != 200:
return None
build = r.json()["build"]
build["resume_state"] = pull.pastamaker["travis_state"]
build["jobs"] = []
for job_id in build["job_ids"]:
r = requests.get(TRAVIS_BASE_URL + "/jobs/%s" % job_id,
headers=TRAVIS_V2_HEADERS)
if r.status_code == 200:
job = r.json()["job"]
job["log_url"] = TRAVIS_BASE_URL + "/jobs/%s/log" % job_id
LOG.debug("%s: job %s %s -> %s" % (pull.pretty(), job_id,
job["state"],
job["log_url"]))
build["jobs"].append(job)
if (pull.pastamaker["travis_state"] == "pending" and
job["state"] == "started"):
build["resume_state"] = "working"
LOG.debug("%s: build %s %s/%s" % (pull.pretty(), build_id,
build["state"],
build["resume_state"]))
return build
def compute_approvals(pull, **extra):
users_info = {}
reviews_ok = set()
reviews_ko = set()
for review in pull.pastamaker["reviews"]:
if review.user.id not in extra["collaborators"]:
continue
users_info[review.user.login] = review.user.raw_data
if review.state == 'APPROVED':
reviews_ok.add(review.user.login)
if review.user.login in reviews_ko:
reviews_ko.remove(review.user.login)
elif review.state in ["DISMISSED", "CHANGES_REQUESTED"]:
if review.user.login in reviews_ok:
reviews_ok.remove(review.user.login)
if review.user.login in reviews_ko:
reviews_ko.remove(review.user.login)
if review.state == "CHANGES_REQUESTED":
reviews_ko.add(review.user.login)
elif review.state == 'COMMENTED':
pass
else:
LOG.error("%s FIXME review state unhandled: %s",
pull.pretty(), review.state)
try:
required = extra["branch_policy"][
"required_pull_request_reviews"]["required_approving_review_count"]
except KeyError:
return [], [], 1, 1
# FIXME(sileht): Compute the thing on JS side
remaining = list(six.moves.range(max(0, required - len(reviews_ok))))
return ([users_info[u] for u in reviews_ok],
[users_info[u] for u in reviews_ko],
required, remaining)
def compute_combined_status(pull, **extra):
commit = pull.base.repo.get_commit(pull.head.sha)
status = commit.get_combined_status()
return status.state
def compute_ci_statuses(pull, **extra):
# We need only travis, so shorcut to it here
if "travis" in extra:
raw_statuses = [extra["travis"]]
else:
# NOTE(sileht): Statuses are returned in reverse chronolo
|
gical order.
# The first status in the list will be the latest one.
commit = pull.base.repo.get_commit(pull.head.sha)
raw_statuses = [s.raw_data
for s in reversed(list(commit.get_statuses()))]
statuses = {}
for s in raw_s
|
tatuses:
statuses[s["context"]] = {"state": s["state"], "url": s["target_url"]}
return statuses
def compute_approved(pull, **extra):
approved = len(pull.pastamaker["approvals"][0])
requested_changes = len(pull.pastamaker['approvals'][1])
required = pull.pastamaker['approvals'][2]
if requested_changes != 0:
return False
else:
return approved >= required
def compute_travis_state(pull, **extra):
return pull.pastamaker["ci_statuses"].get(
"continuous-integration/travis-ci/pr", {"state": "unknown"}
)["state"]
def compute_travis_url(pull, **extra):
return pull.pastamaker["ci_statuses"].get(
"continuous-integration/travis-ci/pr", {"url": "#"}
)["url"]
def compute_weight(pull, **extra):
if not pull.pastamaker["approved"]:
weight = -1
elif (pull.mergeable_state == "clean"
and pull.pastamaker["combined_status"] == "success"):
# Best PR ever, up2date and CI OK
weight = 11
elif pull.mergeable_state in ["clean", "unstable"]:
weight = 10
elif (pull.mergeable_state == "blocked"
and pull.pastamaker["combined_status"] == "pending"):
# Maybe clean soon, or maybe this is the previous run
# selected PR that we just rebase
weight = 10
elif pull.mergeable_state == "behind":
# Not up2date, but ready to merge, is branch updatable
if not pull.maintainer_can_modify:
weight = -1
elif pull.pastamaker["combined_status"] == "success":
weight = 7
elif pull.pastamaker["combined_status"] == "pending":
weight = 5
else:
weight = -1
else:
weight = -1
if weight >= 0 and pull.milestone is not None:
weight += 1
# LOG.info("%s prio: %s, %s, %s, %s, %s", pull.pretty(), weight,
# pull.pastamaker["approved"], pull.mergeable_state,
# pull.pastamaker["combined_status"])
return weight
# Order matter, some method need result of some other
FULLIFIER = [
("commits", lambda p, **extra: list(p.get_commits())),
("reviews", lambda p, **extra: list(p.get_reviews())),
("combined_status", compute_combined_status),
("approvals", compute_approvals), # Need reviews
("approved", compute_approved), # Need approvals
("ci_statuses", compute_ci_statuses), # Need approvals
("travis_state", compute_travis_state), # Need ci_statuses
("travis_url", compute_travis_url), # Need ci_statuses
("travis_detail", compute_travis_detail), # Need travis_url
("weight", compute_weight), # Need approved, travis_state
]
CACHE_HOOK_LIST_CONVERT = {
"commits": github.Commit.Commit,
"reviews": github.PullRequestReview.PullRequestReview,
}
def jsonify(pull):
raw = copy.copy(pull.raw_data)
for key, method in FULLIFIER:
value = pull.pastamaker[key]
if key in CACHE_HOOK_LIST_CONVERT:
try:
value = [item.raw_data for item in value]
except AttributeError:
LOG.exception("%s, fail to cache %s: %s",
pull.pretty(), key, value)
raw["pa
|
RyanJenkins/ISS
|
ISS/migrations/0012_privatemessage.py
|
Python
|
gpl-3.0
| 1,032 | 0.004845 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.utils.timezone
from django.conf import settings
import uuid
class Migration(migrations.Migration):
dependencies = [
('ISS', '0011_poster_timezone'),
]
operations = [
migrations.CreateModel(
name='PrivateMessage',
fields=[
('id', model
|
s.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('chain', models.UUIDField(default=uuid.uuid4, editable=False)),
('created'
|
, models.DateTimeField(default=django.utils.timezone.now)),
('subject', models.CharField(max_length=256)),
('content', models.TextField()),
('receiver', models.ForeignKey(related_name='pms_received', to=settings.AUTH_USER_MODEL)),
('sender', models.ForeignKey(related_name='pms_sent', to=settings.AUTH_USER_MODEL)),
],
),
]
|
atlefren/beerdatabase
|
alembic/versions/3b3de4db8006_fix_stock_again_17_03_16.py
|
Python
|
mit
| 642 | 0.004673 |
"""fix stock again (17.03.16)
Revision ID: 3b3de4db8006
Revises: 1b434f6a7b5
Create Date: 2016-03-17 22:02:55.090285
"""
# revision identifiers, used by Alembic.
revision = '3b3de4db8006'
down_revision = '1b434f
|
6a7b5'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
op.execute('DROP VIEW pol_stock_latest;')
op.execute('''
CREATE VIEW po
|
l_stock_latest as
SELECT DISTINCT ON (pol_beer_id, shop_id) shop_id, pol_beer_id, stock, updated
FROM pol_stock
ORDER BY pol_beer_id, shop_id, updated DESC;
''')
def downgrade():
pass
|
ColinKeigher/McAfeeWebGateway
|
mwg/parse.py
|
Python
|
gpl-2.0
| 236 | 0.012712 |
import xmltodict
|
def parseData(data):
try:
return xmltodict.parse(data)
except:
if len(data.split()) is 0:
retu
|
rn None
else:
raise Exception('Invalid XML data', data)
|
stardog-union/stardog-graviton
|
release/release.py
|
Python
|
apache-2.0
| 8,041 | 0.000871 |
import os
import shutil
import subprocess
import sys
import tempfile
import threading
_g_failed = []
def this_location():
return os.path.abspath(os.path.dirname(__file__))
def checkenv(sd_license, release, ssh_key_path):
required_vars = ['AWS_ACCESS_KEY_ID',
'AWS_SECRET_ACCESS_KEY',
'GOPATH']
for k in required_vars:
v = os.getenv(k)
if v is None:
raise Exception("The environment variable %s must be set" % k)
p = subprocess.Popen("docker ps", shell=True)
rc = p.wait()
if rc != 0:
raise Exception("The docker environment is not configured")
file_list = [sd_license, release, ssh_key_path]
for f in file_list:
if not os.path.exists(f):
raise Exception("The file %s does not exist" % f)
os.unsetenv('STARDOG_ADMIN_PASSWORD')
def build_with_gox():
base_dir = os.path.dirname(this_location())
cmd = 'gox -osarch="linux/amd64" -osarch="darwin/amd64" ' \
'-output=release/{{.OS}}_{{.Arch}}/stardog-graviton '\
'github.com/stardog-union/stardog-graviton/cmd/stardog-graviton'
p = subprocess.Popen(cmd, shell=True, cwd=base_dir)
rc = p.wait()
if rc != 0:
raise Exception("Failed to cross compile graviton")
if not os.path.exists(os.path.join(this_location(), "linux_amd64", "stardog-graviton")):
raise Exception("The linux compile failed")
if not os.path.exists(os.path.join(this_location(), "darwin_amd64",
"stardog-graviton")):
raise Exception("The osx compile failed")
def prep_run(sd_license, release, grav_exe, ssh_key_path):
src_dir = this_location()
work_dir = tempfile.mkdtemp(p
|
refix="graviton",
dir=os.path.abspath(os.path.dirname(__file__)))
try:
files_to_join_and_copy = ['rows.rdf', 'smoke_test_1.py']
for f in files_to_join_and_copy:
shutil.copy(os.path.join(src_dir, f),
os.path.join(work_dir, f))
shutil.copy(sd_license,
os.path.join(work_dir, "stardog-license-k
|
ey.bin"))
shutil.copy(release,
os.path.join(work_dir, os.path.basename(release)))
shutil.copy(grav_exe,
os.path.join(work_dir, "stardog-graviton"))
shutil.copy(ssh_key_path,
os.path.join(work_dir, "ssh_key"))
return work_dir
finally:
pass
def run_local(work_dir, ssh_key_name, release):
print("Running in %s" % work_dir)
cmd = "python %s %s %s %s %s" % (
os.path.join(work_dir, "smoke_test_1.py"),
work_dir, release, ssh_key_name, os.path.dirname(this_location()))
print("Running %s" % cmd)
p = subprocess.Popen(cmd, shell=True, cwd=work_dir)
rc = p.wait()
if rc != 0:
raise Exception("Failed to run the smoke test")
print ("XXX Local run was successful")
def build_docker(image_name):
print("Building the docker container")
cmd = "docker build -t %s . --no-cache" % image_name
p = subprocess.Popen(cmd, shell=True, cwd=this_location())
rc = p.wait()
if rc != 0:
raise Exception("Failed build the container")
def compile_linux(image_name):
print("Compiling in a docker container")
top_dir = os.path.join(this_location(), "..")
try:
os.makedirs(os.path.join(this_location(), "release", "linux_amd64"))
except:
pass
internal_gopath = "/opt/go/src/"
docker_cmd = "/usr/lib/go-1.10/bin/go build -o %s/src/github.com/stardog-union/stardog-graviton/release/linux_amd64/stardog-graviton github.com/stardog-union/stardog-graviton/cmd/stardog-graviton" % internal_gopath
cmd = "docker run -e GOPATH=%s -v %s:%s/src/github.com/stardog-union/stardog-graviton -it %s %s" % (internal_gopath, top_dir, internal_gopath, image_name, docker_cmd)
print(cmd)
p = subprocess.Popen(cmd, shell=True, cwd=this_location())
rc = p.wait()
if rc != 0:
raise Exception("Failed build the container")
def run_docker(work_dir, ssh_key_name, release, image_name):
print("Running docker for testing...")
cmd = "docker run -v %s:/smoke " \
"-e AWS_SECRET_ACCESS_KEY=%s " \
"-e AWS_ACCESS_KEY_ID=%s " \
"-it %s " \
"python /smoke/smoke_test_1.py /smoke %s %s" %\
(work_dir,
os.environ['AWS_SECRET_ACCESS_KEY'],
os.environ['AWS_ACCESS_KEY_ID'],
image_name, release, ssh_key_name)
p = subprocess.Popen(cmd, shell=True, cwd=work_dir)
rc = p.wait()
if rc != 0:
raise Exception("Failed to run the smoke tests in the container")
def print_usage():
print("Invalid arguments:")
print("<path to stardog license> <path to stardog release file>"
" <path to ssh private key> <aws key name>")
def get_version():
cmd = "git describe --abbrev=0 --tags"
work_dir = os.path.dirname(this_location())
p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, cwd=work_dir)
(o, e) = p.communicate()
rc = p.wait()
if rc != 0:
raise Exception("Failed to zip the file")
return o.strip()
def zip_one(arch):
ver = get_version()
work_dir = os.path.join(this_location(), arch)
cmd = "zip stardog-graviton_%s_%s.zip stardog-graviton" % (ver, arch)
p = subprocess.Popen(cmd, shell=True, cwd=work_dir)
rc = p.wait()
if rc != 0:
raise Exception("Failed to zip the file")
def darwin_test(sd_license, release, ssh_key_path, ssh_key_name):
try:
darwin_binary = os.path.join(this_location(),
"darwin_amd64", "stardog-graviton")
release_name = os.path.basename(release)
work_dir = prep_run(sd_license, release, darwin_binary, ssh_key_path)
run_local(work_dir, ssh_key_name, release_name)
print("Successfully smoke tested for darwin.")
print("Exe: darwin_amd64/stardog-graviton")
except Exception as ex:
global _g_failed
_g_failed.append("Darwin failed: %s" % str(ex))
print("TEST ERROR darwin %s" % str(ex))
zip_one("darwin_amd64")
def linux_test(sd_license, release, ssh_key_path, ssh_key_name):
try:
build_docker("graviton-release-tester")
compile_linux("graviton-release-tester")
linux_binary = os.path.join(this_location(),
"linux_amd64", "stardog-graviton")
release_name = os.path.basename(release)
work_dir = prep_run(sd_license, release, linux_binary, ssh_key_path)
run_docker(work_dir, ssh_key_name, release_name, "graviton-release-tester")
print("Successfully smoke tested for darwin.")
print("Exe: linux_amd64/stardog-graviton")
except Exception as ex:
global _g_failed
_g_failed.append("Linus failed: %s" % str(ex))
print("TEST ERROR linux %s" % str(ex))
zip_one("linux_amd64")
def main():
if len(sys.argv) < 4:
print_usage()
return 1
sd_license = sys.argv[1]
release = sys.argv[2]
ssh_key_path = sys.argv[3]
ssh_key_name = sys.argv[4]
checkenv(sd_license, release, ssh_key_path)
build_with_gox()
threads = []
if sys.platform != "darwin":
print("XXXXXX We cannot test of OSX on this platform")
else:
t = threading.Thread(
target=darwin_test,
args=(sd_license, release, ssh_key_path, ssh_key_name))
threads.append(t)
t.start()
t = threading.Thread(
target=linux_test,
args=(sd_license, release, ssh_key_path, ssh_key_name))
threads.append(t)
t.start()
print("Started %d tests, waiting for completion..." % len(threads))
for t in threads:
t.join()
if len(_g_failed) != 0:
print("The tests failed %s" % _g_failed)
return 1
print("Success!")
return 0
if __name__ == "__main__":
rc = main()
sys.exit(rc)
|
MrOnlineCoder/shockd
|
scripts/get.py
|
Python
|
mit
| 523 | 0.00956 |
# Sends GET to local server
# Author: schdub
#!/usr/bin/python
import socket
import sys
def GET(host, path, port):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(0.30)
s.connect((host, port))
s.send("GET %s HTTP/1.0\r\n" % (path))
total_data = []
while True:
data = s.recv(8192)
if (len(dat
|
a)>0):
total_data.append(data)
else:
break
print ''.join(
|
total_data)
s.shutdown(1)
s.close()
GET(sys.argv[1], sys.argv[2], 3000)
|
arantebillywilson/python-snippets
|
py3/py344-tutor/ch06-modules/importing_modules.py
|
Python
|
mit
| 444 | 0.004505 |
#! /usr/bin/env python3
#
# importing_modules.py
#
# Author: Billy Wilson Arante
# Created: 2/24/2016 PHT
#
import fibo
def test():
"""Test cases."""
print('Exam
|
ple 1:')
fibo.fib(1000)
print('Example 2:')
print(fibo.fib1(1000))
print('Example 3:')
print(fibo.__name__
|
)
# Assigning function a local name
fib = fibo.fib
print('Example 4:')
fib(1000)
if __name__ == '__main__':
test()
|
kerneltask/micropython
|
tools/metrics.py
|
Python
|
mit
| 7,086 | 0.001129 |
#!/usr/bin/env python3
#
# This file is part of the MicroPython project, http://micropython.org/
#
# The MIT License (MIT)
#
# Copyright (c) 2020 Damien P. George
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
This script is used to compute metrics, like code size, of the various ports.
Typical usage is:
$ ./tools/metrics.py build | tee size0
<wait for build to complete>
$ git switch new-feature-branch
$ ./tools/metrics.py build | tee size1
<wait for build to complete>
$ ./tools/metrics.py diff size0 size1
Other commands:
$ ./tools/metrics.py sizes # print all firmware sizes
$ ./tools/metrics.py clean # clean all ports
"""
import collections, sys, re, subprocess
MAKE_FLAGS = ["-j3", "CFLAGS_EXTRA=-DNDEBUG"]
class PortData:
def __init__(self, name, dir, output, make_flags=None):
self.name = name
self.dir = dir
self.output = output
self.make_flags = make_flags
self.needs_mpy_cross = dir not in ("bare-arm", "minimal")
port_data = {
"b": PortData("bare-arm", "bare-arm", "build/firmware.elf"),
"m": PortData("minimal x86", "minimal", "build/firmware.elf"),
"u": PortData("unix x64", "unix", "micropython"),
"n": PortData("unix nanbox", "unix", "micropython-nanbox", "VARIANT=nanbox"),
"s": PortData("stm32", "stm32", "build-PYBV10/firmware.elf", "BOARD=PYBV10"),
"c": PortData("cc3200", "cc3200", "build/WIPY/release/application.axf", "BTARGET=application"),
"8": PortData("esp8266", "esp8266", "build-GENERIC/firmware.elf"),
"3": PortData("esp32", "esp32", "build-GENERIC/application.elf"),
"r": PortData("nrf", "nrf", "build-pca10040/firmware.elf"),
"d": PortData("samd", "samd", "build-ADAFRUIT_ITSYBITSY_M4_EXPRESS/firmware.elf"),
}
def syscmd(*args):
sys.stdout.flush()
a2 = []
for a in args:
if isinstance(a, str):
a2.append(a)
elif a:
a2.extend(a)
subprocess.check_call(a2)
def parse_port_list(args):
if not args:
return list(port_data.values())
else:
ports = []
for arg in args:
for port_char in arg:
try:
ports.append(port_data[port_char])
except KeyError:
print("unknown port:", port_char)
sys.exit(1)
return ports
def read_build_log(filename):
data = collections.OrderedDict()
|
lines = []
found_sizes = False
with open(filename) as f:
for line
|
in f:
line = line.strip()
if line.strip() == "COMPUTING SIZES":
found_sizes = True
elif found_sizes:
lines.append(line)
is_size_line = False
for line in lines:
if is_size_line:
fields = line.split()
data[fields[-1]] = [int(f) for f in fields[:-2]]
is_size_line = False
else:
is_size_line = line.startswith("text\t ")
return data
def do_diff(args):
"""Compute the difference between firmware sizes."""
# Parse arguments.
error_threshold = None
if len(args) >= 2 and args[0] == "--error-threshold":
args.pop(0)
error_threshold = int(args.pop(0))
if len(args) != 2:
print("usage: %s diff [--error-threshold <x>] <out1> <out2>" % sys.argv[0])
sys.exit(1)
data1 = read_build_log(args[0])
data2 = read_build_log(args[1])
max_delta = None
for key, value1 in data1.items():
value2 = data2[key]
for port in port_data.values():
if key == "ports/{}/{}".format(port.dir, port.output):
name = port.name
break
data = [v2 - v1 for v1, v2 in zip(value1, value2)]
warn = ""
board = re.search(r"/build-([A-Za-z0-9_]+)/", key)
if board:
board = board.group(1)
else:
board = ""
if name == "cc3200":
delta = data[0]
percent = 100 * delta / value1[0]
if data[1] != 0:
warn += " %+u(data)" % data[1]
else:
delta = data[3]
percent = 100 * delta / value1[3]
if data[1] != 0:
warn += " %+u(data)" % data[1]
if data[2] != 0:
warn += " %+u(bss)" % data[2]
if warn:
warn = "[incl%s]" % warn
print("%11s: %+5u %+.3f%% %s%s" % (name, delta, percent, board, warn))
max_delta = delta if max_delta is None else max(max_delta, delta)
if error_threshold is not None and max_delta is not None:
if max_delta > error_threshold:
sys.exit(1)
def do_clean(args):
"""Clean ports."""
ports = parse_port_list(args)
print("CLEANING")
for port in ports:
syscmd("make", "-C", "ports/{}".format(port.dir), port.make_flags, "clean")
def do_build(args):
"""Build ports and print firmware sizes."""
ports = parse_port_list(args)
if any(port.needs_mpy_cross for port in ports):
print("BUILDING MPY-CROSS")
syscmd("make", "-C", "mpy-cross", MAKE_FLAGS)
print("BUILDING PORTS")
for port in ports:
syscmd("make", "-C", "ports/{}".format(port.dir), MAKE_FLAGS, port.make_flags)
do_sizes(args)
def do_sizes(args):
"""Compute and print sizes of firmware."""
ports = parse_port_list(args)
print("COMPUTING SIZES")
for port in ports:
syscmd("size", "ports/{}/{}".format(port.dir, port.output))
def main():
# Get command to execute
if len(sys.argv) == 1:
print("Available commands:")
for cmd in globals():
if cmd.startswith("do_"):
print(" {:9} {}".format(cmd[3:], globals()[cmd].__doc__))
sys.exit(1)
cmd = sys.argv.pop(1)
# Dispatch to desired command
try:
cmd = globals()["do_{}".format(cmd)]
except KeyError:
print("{}: unknown command '{}'".format(sys.argv[0], cmd))
sys.exit(1)
cmd(sys.argv[1:])
if __name__ == "__main__":
main()
|
alexeyraspopov/aiohttp-mongodb-example
|
server.py
|
Python
|
mit
| 1,357 | 0 |
import asyncio
from aiohttp.web import Application
from Todos import handlers
def create_server(loop, handler, host, port):
srv = loop.create_server(handler, host, port)
return loop.run_until_complete(srv)
def create_app(loop):
app = Application(loop=loop)
handler = app.make_handler()
return app, handler
def run_server():
loop = asyncio.get_event_loop()
app, handler = create_app(loop=loop)
server = create_server(loop=loop, handler=handler,
host='0.0.0.0', port=9000)
app.router.add_route('GET', '/todos', handlers.get_all_todos)
app.router.add_route('POST', '/todos', handlers.create_todos)
app.router.add_route('PATCH', '/todos', handlers.update_t
|
odos)
app.router.add_route('DELETE', '/todos', handlers.remove_todos)
app.router.add_route('GET', '/todos/{id}', handlers.get_todo)
app.router.add_route('PATCH', '/todos/{id}', handlers.update_todo)
app.router.add_route('DELETE', '/todos/{id}', handlers.remove_todo)
try:
loop.run_forever()
except KeyboardInterrupt:
pass
final
|
ly:
server.close()
loop.run_until_complete(server.wait_closed())
loop.run_until_complete(handler.finish_connections(1.0))
loop.run_until_complete(app.finish())
loop.close()
if __name__ == '__main__':
run_server()
|
jgerschler/ESL-Games
|
Camera Pistol/range-detector.py
|
Python
|
mit
| 1,679 | 0.004169 |
# This was pulled from one of the python/opencv sites appearing
# in a Google search. Need to find site and add attribution!
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# USAGE: You need to specify a filter and "only one" image source
#
# (python) range-detector --filter RGB --image /path/to/image.png
# or
# (python) range-detector --filter HSV --webcam
#!/usr/bin/env python
import cv2
import argparse
from operator import xor
def callback(value):
pass
def setup_trackbars(range_filter):
cv2.namedWindow("Trackbars", 0)
for i in ["MIN", "MAX"]:
v = 0 if i == "MIN" else 255
for j in range_filter:
cv2.createTrackbar("%s_%s" % (j, i), "Trackbars", v, 255, callback)
def get_trackbar_values(range_filter):
values = []
for i in ["MIN", "MAX"]:
for j in range_filter:
v = cv2.getTrackbarPos("%s_%s" % (j, i), "Trackbars")
values.append(v)
return values
def main():
range_filter = 'HSV'
camera = cv2.VideoCapture(1)
setup_trackbars(range_filter)
while True:
ret, image = camera.read()
if not ret:
break
if range_filter == 'RGB':
frame_to_thresh = image.copy()
else:
frame_to_thresh = cv2.cvtColor(image, cv2.COLOR_BGR2HSV)
v1_min, v2_min, v3_min, v1_max, v2_max, v3_max = get_trackbar_values(range_filter)
|
thresh = cv2.inRange(frame_to_thresh, (v1_min, v2_min, v3_min), (v1_max, v2_max, v3_max))
cv2.imshow("Original", image)
cv2.
|
imshow("Thresh", thresh)
if cv2.waitKey(1) & 0xFF is ord('q'):
break
if __name__ == '__main__':
main()
|
theflockers/sober-filter
|
programs/webservice/__init__.py
|
Python
|
gpl-2.0
| 9,399 | 0.005001 |
#!/usr/bin/env python2.6
from BaseHTTPServer import HTTPServer
from SimpleHTTPServer import SimpleHTTPRequestHandler
import cgi
import SocketServer
import ssl
import re
import setproctitle
import others.dict2xml as dict2xml
import sober.config
import sober.settings
import sober.rule
__version__ = 'Sober HTTP/1.0'
__service__ = 'sober'
class WSHandler(SimpleHTTPRequestHandler):
value = None
def load_blacklist(self):
return self.settings.get_blacklist()
def load_whitelist(self):
return {'item': self.value}
def load_settings(self):
return self.settings.get()
def return_error(self):
return 'error'
def do_POST(self):
self.do_GET()
def do_GET(self):
try:
path = self.path.strip().split('/')
if len(path) > 5 and self.command == 'GET':
self.value = path[5]
self.object_type = path[2]
resource = path[3]
resource = 'self.do_'+ resource.upper() + '()'
response = eval(resource)
self.send_ok_response(self.to_xml(response, resource))
elif self.command == 'POST':
self.action = path[3]
resource = path[2]
resource = 'self.do_'+ resource.upper() + '()'
response = eval(resource)
self.send_ok_response(self.to_xml(response, resource))
else:
self.send_ok_response(self.to_xml(self.error_data('missing_arguments'), 'webservices'))
except Exception, e:
self.send_ok_response(self.to_xml(self.error_data(str(e)), resource))
def do_SETTINGS(self):
settings = sober.settings.Settings().get(self.object_type, self.value)
if type(settings).__name__ == 'instance':
response = {'settings': {
'type': self.object_type,
'name': settings.get_cn()[0],
'surename': settings.get_sn()[0],
'uid': settings.get_uid()[0],
'homeDirectory': settings.get_homeDirectory()[0],
'mail': settings.get_mail()[0],
'soberMailConditions': settings.get_soberMailConditions(),
'soberMailVirusCheck': settings.get_soberMailVirusCheck()[0],
'soberMailVirusAction': settings.get_soberMailVirusAction()[0],
'soberMailSpamCheck': settings.get_soberMailSpamCheck()[0],
'soberMailSpamKillLevel': settings.get_soberMailSpamKillLevel()[0],
'soberMailSpamTagLevel': settings.get_soberMailSpamTagLevel()[0],
'soberMailSpamTag2Level': settings.get_soberMailSpamTag2Level()[0],
}
}
return response
return self.error_data('not_found')
def do_BLACKLIST(self):
settings = sober.settings.Settings().get(self.object_type, self.value)
rules = settings.get_soberMailRule()
blacklist = {}
for rule in rules:
if re.search("blacklist[0-9]+", rule[1]['cn'][0]):
i = 0
for cond in rule[1]['soberMailRuleCondition']:
cond = eval(cond)
blacklist['item' + str(i)] = cond[0]['From']
i = i + 1
response = {'blacklist': {'from': blacklist } }
return response
return self.error_data('not_found')
def do_WHITELIST(self):
settings = sober.settings.Settings().get(self.object_type, self.value)
try:
rules = settings.get_soberMailRule()
except AttributeError:
return self.error_data('not_found')
whitelist = {}
for rule in rules:
if re.search("whitelist[0-9]+", rule[1]['cn'][0]):
i = 0
for cond in rule[1]['soberMailRuleCondition']:
cond = eval(cond)
for addr in cond[0]['From']:
whitelist['item' + str(i)] = addr
i = i + 1
response = {'whitelist': {'from': whitelist } }
return response
return self.error_data('not_found')
def do_RULE(self):
# POST
if self.command == 'POST':
postvars = None
ctype, pdict = cgi.parse_header(self.headers.getheader('content-type'))
length = int(self.headers.getheader('content-length'))
data = self.rfile.read(length)
if ctype == 'multipart/form-data':
postvars = cgi.parse_multipart(data, pdict)
elif ctype == 'application/x-www-form-urlencoded':
postvars = cgi.parse_qs(data, keep_blank_values=1)
name = postvars['name'][0]
direction = tuple(postvars['direction'])
sentence = {}
items = {}
conditions = {}
for key, val in postvars.iteritems():
reg = re.search(r'(item|condition)\[(.*)\]', key)
if reg:
i = int(reg.group(2))
if reg.group(1).strip() == 'item':
items[
|
i] = tuple(val)
elif reg.group(1) == 'condition':
try:
parts = val[0].split(':')
conditions[i] = {parts[0]: { parts[1]: None}}
except:
conditions[i] = {val[0]: None}
temp = {}
for key, val in conditions.iteritems():
|
for skey, sval in val.iteritems():
if type(sval).__name__ == 'dict':
temp[skey] = {sval.keys()[0]: ('in', items[key])}
else:
temp[skey] = ('in', items[key])
sobermailrulecondition = '(%s)' % str(temp)
return {'rule': { 'name': name, 'directions': direction, 'conditions': sobermailrulecondition } }
# GET
rule = sober.rule.Rule().get(self.value)
name = rule.get_cn()[0]
directions = eval(rule.get_soberMailRuleDirection()[0])
actions = {}
conditions = {}
i = 0
for action in eval(rule.get_soberMailRuleAction()[0]):
actions['action' + str(i)] = action
i = i + 1
i = 0
x = 0
for condition in rule.get_soberMailRuleCondition():
cond = eval(condition)[0]
rtype = cond.keys()[0]
if not rtype in conditions:
conditions[rtype] = {}
if type(cond[rtype]).__name__ == 'tuple':
items = {}
if len(cond[rtype]) > 2 :
x = 0
for item in cond[rtype]:
items['item'+ str(x)] = item
x = x + 1
conditions[rtype] = items
elif len(cond[rtype]) == 1:
x = 0
for item in cond[rtype]:
items['item'+ str(x)] = item
x = x + 1
conditions[rtype] = items
else:
op = cond[rtype][0]
items = {}
x = 0
for item in cond[rtype][1]:
items['word'+ str(x)] = item
x = x + 1
conditions[rtype][op] = items
else:
for item in cond[rtype].iteritems():
if item[0] not in conditions[rtype]:
x = 0
conditions[rtype][item[0]] = {}
for word in item[1][1]:
if item[1][0] not in conditions[rtype][item[0]]:
conditions[rtype][item[0]][item[1][0]] = {}
conditions[rtype][item[0]][item[1][0]]['word' + str(x)] = word
x = x + 1
# end main conditions loop
i = i + 1
drt = {}
x = 0
for direction in directions:
|
nikitos/npui
|
netprofile/netprofile/pdf/__init__.py
|
Python
|
agpl-3.0
| 10,229 | 0.03257 |
#!/usr/bin/env python
# -*- coding: utf-8; tab-width: 4; indent-tabs-mode: t -*-
#
# NetProfile: PDF-related tables, utility functions etc.
# © Copyright 2015 Alex 'Unik' Unigovsky
#
# This file is part of NetProfile.
# NetProfile is free software: you can redistribute it and/or
# modify it under the terms of the GNU Affero General Public
# License as published by the Free Software Foundation, either
# version 3 of the License, or (at your option) any later
# version.
#
# NetProfile is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General
# Public License along with NetProfile. If not, see
# <http://www.gnu.org/licenses/>.
from __future__ import (
unicode_literals,
print_function,
absolute_import,
division
)
__all__ = (
'PAGE_ORIENTATIONS',
'PAGE_SIZES',
'TABLE_STYLE_DEFAULT'
)
import logging
import os
from reportlab.lib import (
colors,
enums,
pagesizes,
styles
)
from reportlab.lib.units import (
cm,
inch
)
from reportlab.pdfbase import (
pdfmetrics,
ttfonts
)
from reportlab.platypus import (
doctemplate,
frames,
tables
)
from pyramid.i18n import TranslationStringFactory
from netprofile.common.util import (
as_dict,
make_config_dict
)
logger = logging.getLogger(__name__)
_ = TranslationStringFactory('netprofile')
_pdfss = None
PAGE_SIZES = {
'4a0' : ('4A0 (DIN 476)', (168.2 * cm, 237.8 * cm)),
'2a0' : ('2A0 (DIN 476)', (118.9 * cm, 168.2 * cm)),
'a0' : ('A0 (ISO 216)', pagesizes.A0),
'a1' : ('A1 (ISO 216)', pagesizes.A1),
'a2' : ('A2 (ISO 216)', pagesizes.A2),
'a3' : ('A3 (ISO 216)', pagesizes.A3),
'a4' : ('A4 (ISO 216)', pagesizes.A4),
'a5' : ('A5 (ISO 216)', pagesizes.A5),
'a6' : ('A6 (ISO 216)', pagesizes.A6),
'a7' : ('A7 (ISO 216)', (pagesizes.A6[1] * 0.5, pagesizes.A6[0])),
'a8' : ('A8 (ISO 216)', (pagesizes.A6[0] * 0.5, pagesizes.A6[1] * 0.5)),
'b0' : ('B0 (ISO 216)', pagesizes.B0),
'b1' : ('B1 (ISO 216)', pagesizes.B1),
'b2' : ('B2 (ISO 216)', pagesizes.B2),
'b3' : ('B3 (ISO 216)', pagesizes.B3),
'b4' : ('B4 (ISO 216)', pagesizes.B4),
'b5' : ('B5 (ISO 216)', pagesizes.B5),
'b6' : ('B6 (ISO 216)', pagesizes.B6),
'b7' : ('B7 (ISO 216)', (pagesizes.B6[1] * 0.5, pagesizes.B6[0])),
'b8' : ('B8 (ISO 216)', (pagesizes.B6[0] * 0.5, p
|
agesizes.B6[1] * 0.5)),
'c0' : ('C0 (ISO 269)', (91.7 * cm, 129.7 * cm)),
'c1' : ('C1 (ISO 269)', (64.8 * c
|
m, 91.7 * cm)),
'c2' : ('C2 (ISO 269)', (45.8 * cm, 64.8 * cm)),
'c3' : ('C3 (ISO 269)', (32.4 * cm, 45.8 * cm)),
'c4' : ('C4 (ISO 269)', (22.9 * cm, 32.4 * cm)),
'c5' : ('C5 (ISO 269)', (16.2 * cm, 22.9 * cm)),
'c6' : ('C6 (ISO 269)', (11.4 * cm, 16.2 * cm)),
'c7' : ('C7 (ISO 269)', (8.1 * cm, 11.4 * cm)),
'c8' : ('C8 (ISO 269)', (5.7 * cm, 8.1 * cm)),
'e5' : ('E5 (SS 014711)', (15.5 * cm, 22 * cm)),
'g5' : ('G5 (SS 014711)', (16.9 * cm, 23.9 * cm)),
'f4' : ('F4', (21 * cm, 33 * cm)),
'a3p' : ('A3+', (32.9 * cm, 48.3 * cm)),
'dl' : ('DL (ISO 269)', (9.9 * cm, 21 * cm)),
'dle' : ('DLE (ISO 269)', (11 * cm, 22 * cm)),
'e4' : ('E4 (ISO 269)', (28 * cm, 40 * cm)),
'c6c5' : ('C6/C5 (ISO 269)', (11.4 * cm, 22.9 * cm)),
'jb0' : ('JIS B0', (103 * cm, 145.6 * cm)),
'jb1' : ('JIS B1', (72.8 * cm, 103 * cm)),
'jb2' : ('JIS B2', (51.5 * cm, 72.8 * cm)),
'jb3' : ('JIS B3', (36.4 * cm, 51.5 * cm)),
'jb4' : ('JIS B4', (25.7 * cm, 36.4 * cm)),
'jb5' : ('JIS B5', (18.2 * cm, 25.7 * cm)),
'jb6' : ('JIS B6', (12.8 * cm, 18.2 * cm)),
'jb7' : ('JIS B7', (9.1 * cm, 12.8 * cm)),
'jb8' : ('JIS B8', (6.4 * cm, 9.1 * cm)),
'letter' : ('Letter (ANSI A)', pagesizes.LETTER),
'h_letter' : ('Half Letter', (pagesizes.LETTER[1] * 0.5, pagesizes.LETTER[0])),
'exec' : ('Executive', (7 * inch, 10 * inch)),
'g_letter' : ('Government-Letter', (8 * inch, 10.5 * inch)),
'legal' : ('Legal', pagesizes.LEGAL),
'j_legal' : ('Junior Legal', (5 * inch, 8 * inch)),
'11by17' : ('Tabloid (ANSI B)', pagesizes.ELEVENSEVENTEEN),
'ansi_c' : ('ANSI C', (17 * inch, 22 * inch)),
'ansi_d' : ('ANSI D', (22 * inch, 34 * inch)),
'ansi_e' : ('ANSI E', (34 * inch, 44 * inch)),
'p1' : ('P1 (CAN 2-9.60M)', (56 * cm, 86 * cm)),
'p2' : ('P2 (CAN 2-9.60M)', (43 * cm, 56 * cm)),
'p3' : ('P3 (CAN 2-9.60M)', (28 * cm, 43 * cm)),
'p4' : ('P4 (CAN 2-9.60M)', (21.5 * cm, 28 * cm)),
'p5' : ('P5 (CAN 2-9.60M)', (14 * cm, 21.5 * cm)),
'p6' : ('P6 (CAN 2-9.60M)', (10.7 * cm, 14 * cm)),
'pli1' : ('Pliego', (70 * cm, 100 * cm)),
'pli2' : ('½ pliego', (50 * cm, 70 * cm)),
'pli4' : ('¼ pliego', (35 * cm, 50 * cm)),
'pli8' : ('⅛ pliego', (25 * cm, 35 * cm)),
'carta' : ('Carta', (21.6 * cm, 27.9 * cm)),
'oficio' : ('Oficio', (21.6 * cm, 33 * cm)),
'exttab' : ('Extra Tabloide', (30.48 * cm, 45.72 * cm))
}
PAGE_ORIENTATIONS = {
'portrait' : (_('Portrait'), pagesizes.portrait),
'landscape' : (_('Landscape'), pagesizes.landscape)
}
TABLE_STYLE_DEFAULT = tables.TableStyle((
('GRID', (0, 0), (-1, -1), 0.2, colors.dimgrey),
('TEXTCOLOR', (0, 0), (-1, 0), colors.black),
('BACKGROUND', (0, 0), (-1, 0), colors.HexColor(0xe6e6e6)),
('ROWBACKGROUNDS', (0, 1), (-1, -1), (colors.white, colors.HexColor(0xf5f5f5)))
))
class DefaultDocTemplate(doctemplate.BaseDocTemplate):
def __init__(self, filename, **kwargs):
pgsz = kwargs.pop('pagesize', 'a4')
if pgsz in PAGE_SIZES:
pgsz = PAGE_SIZES[pgsz][1]
else:
pgsz = pagesizes.A4
orient = kwargs.pop('orientation', 'portrait')
if orient in PAGE_ORIENTATIONS:
pgsz = PAGE_ORIENTATIONS[orient][1](pgsz)
kwargs['pagesize'] = pgsz
kwargs['creator'] = 'NetProfile'
req = kwargs.pop('request', None)
if req:
u = req.user
if u:
kwargs['author'] = (u.name_full + ' (' + u.login + ')').strip()
super(DefaultDocTemplate, self).__init__(filename, **kwargs)
fr_body = frames.Frame(
self.leftMargin,
self.bottomMargin,
self.width,
self.height,
id='body'
)
fr_left = frames.Frame(
self.leftMargin,
self.bottomMargin,
self.width / 2,
self.height,
rightPadding=12,
id='left'
)
fr_right = frames.Frame(
self.leftMargin + self.width / 2,
self.bottomMargin,
self.width / 2,
self.height,
leftPadding=12,
id='right'
)
self.addPageTemplates((
doctemplate.PageTemplate(id='default', pagesize=pgsz, frames=(fr_body,)), # onPage=callback
doctemplate.PageTemplate(id='2columns', pagesize=pgsz, frames=(fr_left, fr_right))
))
def _register_fonts(settings):
default_fontdir = settings.get('netprofile.fonts.directory', '')
default_family = settings.get('netprofile.fonts.default_family', 'tinos')
fontcfg = make_config_dict(settings, 'netprofile.fonts.family.')
fontcfg = as_dict(fontcfg)
for fname, cfg in fontcfg.items():
if 'normal' not in cfg:
continue
fname = cfg.get('name', fname)
fontdir = cfg.get('directory', default_fontdir)
pdfmetrics.registerFont(ttfonts.TTFont(
fname,
os.path.join(fontdir, cfg['normal'])
))
reg = { 'normal' : fname }
if 'bold' in cfg:
reg['bold'] = fname + '_b'
pdfmetrics.registerFont(ttfonts.TTFont(
reg['bold'],
os.path.join(fontdir, cfg['bold'])
))
else:
reg['bold'] = fname
if 'italic' in cfg:
reg['italic'] = fname + '_i'
pdfmetrics.registerFont(ttfonts.TTFont(
re
|
kocicjelena/python-docs-samples
|
tests/__init__.py
|
Python
|
apache-2.0
| 3,946 | 0 |
# Copyright 2015, Google, Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Common testing utilities between samples
"""
import __builtin__
import contextlib
import json
import os
import StringIO
import sys
import unittest
from google.appengine.datastore import datastore_stub_util
from google.appengine.ext import testbed
BUCKET_NAME_ENV = 'TEST_BUCKET_NAME'
PROJECT_ID_ENV = 'TEST_PROJECT_ID'
RESOURCE_PATH = os.path.join(
os.path.abspath(os.path.dirname(__file__
|
)), 'resources')
class mock_raw_input(object):
def __init__(self, list_):
self.i = 0
self.list_ = list_
def get_next_value(self, question):
ret = self.list_[self.i]
self.i += 1
return ret
def __enter__(self):
self.r
|
aw_input_cache = __builtin__.raw_input
__builtin__.raw_input = self.get_next_value
def __exit__(self, exc_type, exc_value, traceback):
__builtin__.raw_input = self.raw_input_cache
class CloudBaseTest(unittest.TestCase):
def setUp(self):
self.resource_path = RESOURCE_PATH
# A hack to prevent get_application_default from going GAE route.
self._server_software_org = os.environ.get('SERVER_SOFTWARE')
os.environ['SERVER_SOFTWARE'] = ''
# Constants from environment
test_bucket_name = os.environ.get(BUCKET_NAME_ENV, '')
test_project_id = os.environ.get(PROJECT_ID_ENV, '')
if not test_project_id or not test_bucket_name:
raise Exception('You need to define an env var "%s" and "%s" to '
'run the test.'
% (PROJECT_ID_ENV, BUCKET_NAME_ENV))
# Constants from resources/constants.json
with open(
os.path.join(RESOURCE_PATH, 'constants.json'),
'r') as constants_file:
self.constants = json.load(constants_file)
self.constants['projectId'] = test_project_id
self.constants['bucketName'] = test_bucket_name
self.constants['cloudStorageInputURI'] = (
self.constants['cloudStorageInputURI'] % test_bucket_name)
self.constants['cloudStorageOutputURI'] = (
self.constants['cloudStorageOutputURI'] % test_bucket_name)
def tearDown(self):
os.environ['SERVER_SOFTWARE'] = self._server_software_org
class DatastoreTestbedCase(unittest.TestCase):
"""A base test case for common setup/teardown tasks for test."""
def setUp(self):
"""Setup the datastore and memcache stub."""
# First, create an instance of the Testbed class.
self.testbed = testbed.Testbed()
# Then activate the testbed, which prepares the service stubs for
# use.
self.testbed.activate()
# Create a consistency policy that will simulate the High
# Replication consistency model.
self.policy = datastore_stub_util.PseudoRandomHRConsistencyPolicy(
probability=0)
# Initialize the datastore stub with this policy.
self.testbed.init_datastore_v3_stub(consistency_policy=self.policy)
self.testbed.init_memcache_stub()
def tearDown(self):
self.testbed.deactivate()
@contextlib.contextmanager
def capture_stdout():
"""Capture stdout."""
fake_stdout = StringIO.StringIO()
old_stdout = sys.stdout
try:
sys.stdout = fake_stdout
yield fake_stdout
finally:
sys.stdout = old_stdout
|
LinuxChristian/home-assistant
|
tests/components/sensor/test_sonarr.py
|
Python
|
apache-2.0
| 34,600 | 0 |
"""The tests for the Sonarr platform."""
import unittest
import time
from datetime import datetime
import pytest
from homeassistant.components.sensor import sonarr
from tests.common import get_test_home_assistant
def mocked_exception(*args, **kwargs):
"""Mock exception thrown by requests.get."""
raise OSError
def mocked_requests_get(*args, **kwargs):
"""Mock requests.get invocations."""
class MockResponse:
"""Class to represent a mocked response."""
def __init__(self, json_data, status_code):
"""Initialize the mock response class."""
self.json_data = json_data
self.status_code = status_code
def json(self):
"""Return the json of the response."""
return self.json_data
today = datetime.date(datetime.fromtimestamp(time.time()))
url = str(args[0])
if 'api/calendar' in url:
return MockResponse([
{
"seriesId": 3,
"episodeFileId": 0,
"seasonNumber": 4,
"episodeNumber": 11,
"title": "Easy Com-mercial, Easy Go-mercial",
"airDate": str(today),
"airDateUtc": "2014-01-27T01:30:00Z",
"overview": "To compete with fellow “restaurateur,” Ji...",
"hasFile": "false",
"monitored": "true",
"sceneEpisodeNumber": 0,
"sceneSeasonNumber": 0,
"tvDbEpisodeId": 0,
"series": {
"tvdbId": 194031,
"tvRageId": 24607,
"imdbId": "tt1561755",
"title": "Bob's Burgers",
"cleanTitle": "bobsburgers",
"status": "continuing",
"overview": "Bob's Burgers follows a third-generation ...",
"airTime": "5:30pm",
"monitored": "true",
"qualityProfileId": 1,
"seasonFolder": "true",
"lastInfoSync": "2014-01-26T19:25:55.4555946Z",
"runtime": 30,
"images": [
{
"coverType": "banner",
"url": "http://slurm.trakt.us/images/bann.jpg"
},
{
"coverType": "poster",
"url": "http://slurm.trakt.us/images/poster00.jpg"
},
{
"coverType": "fanart",
"url": "http://slurm.trakt.us/images/fan6.jpg"
}
],
"seriesType": "standard",
"network": "FOX",
"useSceneNumbering": "false",
"titleSlug": "bobs-burgers",
"path": "T:\\Bob's Burgers",
"year": 0,
"firstAired": "2011-01-10T01:30:00Z",
"qualityProfile": {
"value": {
"name": "SD",
"allowed": [
{
"id": 1,
"name": "SDTV",
"weight": 1
},
{
"id": 8,
"name": "WEBDL-480p",
"weight": 2
},
{
"id": 2,
"name": "DVD",
"weight": 3
}
],
"cutoff": {
"id": 1,
"name": "SDTV",
"weight": 1
},
"id": 1
},
"isLoaded": "true"
},
"seasons": [
{
"seasonNumber": 4,
"monitored": "true"
},
{
"seasonNumber": 3,
"monitored": "true"
},
{
"seasonNumber": 2,
"monitored": "true"
},
{
"seasonNumber": 1,
"monitored": "true"
},
{
"seasonNumber": 0,
"monitored": "false"
}
],
"id": 66
},
"downloading": "false",
"id": 14402
}
], 200)
elif 'api/command' in url:
return MockResponse([
{
"name": "RescanSeries",
"startedOn": "0001-01-01T00:00:00Z",
"stateChangeTime": "2014-02-05T05:09:09.2366139Z",
"sendUpdatesToClient": "true",
"state": "pending",
"id": 24
}
], 200)
elif 'api/wanted/missing' in url or 'totalRecords' in url:
return MockResponse(
{
"page": 1,
"pageSize": 15,
"sortKey": "airDateUtc",
"sortDirection": "descending",
"totalRecords": 1,
"records": [
{
"seriesId": 1,
"episodeFileId": 0,
"seasonNumber": 5,
"episodeNumber": 4,
"title": "Archer Vice: House Call",
"airDate": "2014-02-03",
"airDateUtc": "2014-02-04T03:00:00Z",
"overview": "Archer has to stage an that ... ",
|
"hasFile": "false",
"monitored": "true",
"sceneEpisodeNumber": 0,
"sceneSeasonNumber": 0,
"tvDbEpisodeId": 0,
"absoluteEpisodeNumber": 50,
"series": {
"tv
|
dbId": 110381,
"tvRageId": 23354,
"imdbId": "tt1486217",
"title": "Archer (2009)",
"cleanTitle": "archer2009",
"status": "continuing",
"overview": "At ISIS, an international spy ...",
"airTime": "7:00pm",
"monitored": "true",
"qualityProfileId": 1,
"seasonFolder": "true",
"lastInfoSync": "2014-02-05T04:39:28.550495Z",
"runtime": 30,
"images": [
{
"coverType": "banner",
"url": "http://slurm.trakt.us//57.12.jpg"
},
{
"coverType": "poster",
"url": "http://slurm.trakt.u/57.12-300.jpg"
},
{
"coverType": "fanart",
"url": "http://slurm.trakt.us/image.12.jpg"
}
],
"seriesType": "standard",
"network": "FX",
"useSceneNumbering": "false",
|
maui-packages/calamares
|
src/modules/displaymanager/main.py
|
Python
|
gpl-3.0
| 17,415 | 0.002871 |
#!/usr/bin/env python3
# encoding: utf-8
# === This file is part of Calamares - <http://github.com/calamares> ===
#
# Copyright 2014, Philip Müller <philm@manjaro.org>
# Copyright 2014, Teo Mrnjavac <teo@kde.org>
#
# Calamares is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Calamares is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Calamares. If not, see <http://www.gnu.org/licenses/>.
import os
import libcalamares
def set_autologin(username, displaymanagers, root_mount_point):
""" Enables automatic login for the installed desktop managers """
if "mdm" in displaymanagers:
# Systems with MDM as Desktop Manager
mdm_conf_path = os.path.join(root_mount_point, "etc/mdm/custom.conf")
if os.path.exists(mdm_conf_path):
with open(mdm_conf_path, 'r') as mdm_conf:
text = mdm_conf.readlines()
with open(mdm_conf_path, 'w') as mdm_conf:
for line in text:
if '[daemon]' in line:
line = '[daemon]\nAutomaticLogin=%s\nAutomaticLoginEnable=True\n' % username
mdm_conf.write(line)
else:
with open(mdm_conf_path, 'w') as mdm_conf:
mdm_conf.write(
'# Calamares - Enable automatic login for user\n')
mdm_conf.write('[daemon]\n')
mdm_conf.write('AutomaticLogin=%s\n' % username)
mdm_conf.write('AutomaticLoginEnable=True\n')
if "gdm" in disp
|
laymanagers:
# Systems with GDM as Desktop Manager
gdm_conf_path = os.path.join(root_mount_point, "etc/gdm/custom.conf")
if os.path.exists(gdm_conf_path):
with open(gdm_conf_path, 'r') as gdm_conf:
text = gdm_conf.readlines()
with open(gdm_conf_path, 'w') as gdm_conf:
for line in text:
if '[daemon]' in line:
line = '[daemon]\nAutomaticLogin=%s\nAutomaticLoginEnable=Tr
|
ue\n' % username
gdm_conf.write(line)
else:
with open(gdm_conf_path, 'w') as gdm_conf:
gdm_conf.write(
'# Calamares - Enable automatic login for user\n')
gdm_conf.write('[daemon]\n')
gdm_conf.write('AutomaticLogin=%s\n' % username)
gdm_conf.write('AutomaticLoginEnable=True\n')
if "kdm" in displaymanagers:
# Systems with KDM as Desktop Manager
kdm_conf_path = os.path.join(
root_mount_point, "usr/share/config/kdm/kdmrc")
text = []
if os.path.exists(kdm_conf_path):
with open(kdm_conf_path, 'r') as kdm_conf:
text = kdm_conf.readlines()
with open(kdm_conf_path, 'w') as kdm_conf:
for line in text:
if '#AutoLoginEnable=true' in line:
line = 'AutoLoginEnable=true\n'
if 'AutoLoginUser=' in line:
line = 'AutoLoginUser=%s\n' % username
kdm_conf.write(line)
else:
return "Cannot write KDM configuration file", "KDM config file %s does not exist" % kdm_conf_path
if "lxdm" in displaymanagers:
# Systems with LXDM as Desktop Manager
lxdm_conf_path = os.path.join(root_mount_point, "etc/lxdm/lxdm.conf")
text = []
if os.path.exists(lxdm_conf_path):
with open(lxdm_conf_path, 'r') as lxdm_conf:
text = lxdm_conf.readlines()
with open(lxdm_conf_path, 'w') as lxdm_conf:
for line in text:
if '# autologin=dgod' in line:
line = 'autologin=%s\n' % username
lxdm_conf.write(line)
else:
return "Cannot write LXDM configuration file", "LXDM config file %s does not exist" % lxdm_conf_path
if "lightdm" in displaymanagers:
# Systems with LightDM as Desktop Manager
# Ideally, we should use configparser for the ini conf file,
# but we just do a simple text replacement for now, as it
# worksforme(tm)
lightdm_conf_path = os.path.join(
root_mount_point, "etc/lightdm/lightdm.conf")
text = []
if os.path.exists(lightdm_conf_path):
with open(lightdm_conf_path, 'r') as lightdm_conf:
text = lightdm_conf.readlines()
with open(lightdm_conf_path, 'w') as lightdm_conf:
for line in text:
if '#autologin-user=' in line:
line = 'autologin-user=%s\n' % username
lightdm_conf.write(line)
else:
return "Cannot write LightDM configuration file", "LightDM config file %s does not exist" % lightdm_conf_path
if "slim" in displaymanagers:
# Systems with Slim as Desktop Manager
slim_conf_path = os.path.join(root_mount_point, "etc/slim.conf")
text = []
if os.path.exists(slim_conf_path):
with open(slim_conf_path, 'r') as slim_conf:
text = slim_conf.readlines()
with open(slim_conf_path, 'w') as slim_conf:
for line in text:
if 'auto_login' in line:
line = 'auto_login yes\n'
if 'default_user' in line:
line = 'default_user %s\n' % username
slim_conf.write(line)
else:
return "Cannot write SLIM configuration file", "SLIM config file %s does not exist" % slim_conf_path
if "sddm" in displaymanagers:
# Systems with Sddm as Desktop Manager
sddm_conf_path = os.path.join(root_mount_point, "etc/sddm.conf")
text = []
if os.path.exists(sddm_conf_path):
with open(sddm_conf_path, 'r') as sddm_conf:
text = sddm_conf.readlines()
with open(sddm_conf_path, 'w') as sddm_conf:
for line in text:
if 'User=' in line:
line = 'User={}\n'.format(username)
sddm_conf.write(line)
else:
return "Cannot write SDDM configuration file", "SDDM config file %s does not exist" % sddm_conf_path
return None
def run():
""" Configure display managers """
# We acquire a list of displaymanagers, either from config or (overridden) from globalstorage.
# This module will try to set up (including autologin) all the displaymanagers in the list, in that specific order.
# Most distros will probably only ship one displaymanager.
# If a displaymanager is in the list but not installed, this module quits with error.
if "displaymanagers" in libcalamares.job.configuration:
displaymanagers = libcalamares.job.configuration["displaymanagers"]
if libcalamares.globalstorage.contains("displaymanagers"):
displaymanagers = libcalamares.globalstorage.value("displaymanagers")
if displaymanagers is None:
return "No display managers selected for the displaymanager module.",\
"The displaymanagers list is empty or undefined in both globalstorage and displaymanager.conf."
username = libcalamares.globalstorage.value("autologinUser")
root_mount_point = libcalamares.globalstorage.value("rootMountPoint")
# Setup slim
if "slim" in displaymanagers:
if not os.path.exists("%s/usr/bin/slim" % root_mount_point):
return "slim selected but not installed", ""
# Setup sddm
if "sddm" in displaymanagers:
if not os.path.exists("%s/usr/bin/sddm" % root_mount_point):
return "sddm selected but not installe
|
bridadan/yotta
|
yotta/lib/fsutils.py
|
Python
|
apache-2.0
| 2,102 | 0.004757 |
# Copyright 2014 ARM Limited
#
# Licensed under the Apache License, Version 2.0
# See LICENSE file for details.
# standard library modules, , ,
import os
import errno
import shutil
import platform
import stat
def mkDirP(path):
try:
os.makedirs(path)
except O
|
SError as exception:
if exception.errno != errno.EEXIST:
raise
def rmF(path):
try:
os.remove(path)
except OSError as exception:
if exception.errno != errno.ENOENT:
raise
def rmRf(path):
# we may have to make files writable before we can successfully delete
# them, to do this
def fixPermissions(fn, path, excinfo):
if os.access(path, os.W_OK):
|
raise
else:
os.chmod(path, stat.S_IWUSR)
fn(path)
try:
shutil.rmtree(path, onerror=fixPermissions)
except OSError as exception:
if 'cannot call rmtree on a symbolic link' in str(exception).lower():
os.unlink(path)
elif exception.errno == errno.ENOTDIR:
rmF(path)
elif exception.errno != errno.ENOENT:
raise
def fullySplitPath(path):
components = []
while True:
path, component = os.path.split(path)
if component != '':
components.append(component)
else:
if path != '':
components.append(path)
break
components.reverse()
return components
# The link-related functions are platform-dependent
links = __import__("fsutils_win" if os.name == 'nt' else "fsutils_posix", globals(), locals(), ['*'])
isLink = links.isLink
tryReadLink = links.tryReadLink
_symlink = links._symlink
realpath = links.realpath
# !!! FIXME: the logic in the "except" block below probably doesn't work in Windows
def symlink(source, link_name):
try:
# os.symlink doesn't update existing links, so need to rm first
rmF(link_name)
_symlink(source, link_name)
except OSError as exception:
if exception.errno != errno.EEXIST and (tryReadLink(link_name) != source):
raise
|
mdmintz/SeleniumBase
|
seleniumbase/plugins/pytest_plugin.py
|
Python
|
mit
| 34,757 | 0 |
# -*- coding: utf-8 -*-
""" This is the pytest configuration file """
import colorama
import pytest
import sys
from seleniumbase import config as sb_config
from seleniumbase.core import log_helper
from seleniumbase.core import proxy_helper
from seleniumbase.fixtures import constants
def pytest_addoption(parser):
"""
This plugin adds the following command-line options to pytest:
--browser=BROWSER (The web browser to use. Default: "chrome".)
--settings-file=FILE (Override default SeleniumBase settings.)
--env=ENV (Set the test env. Access with "self.env" in tests.)
--data=DATA (Extra test data. Access with "self.data" in tests.)
--var1=DATA (Extra test data. Access with "self.var1" in tests.)
--var2=DATA (Extra test data. Access with "self.var2" in tests.)
--var3=DATA (Extra test data. Access with "self.var3" in tests.)
--user-data-dir=DIR (Set the Chrome user data directory to use.)
--server=SERVER (The Selenium Grid server/IP used for tests.)
--port=PORT (The Selenium Grid port used by the test server.)
--cap-file=FILE (The web browser's desired capabilities to use.)
--cap-string=STRING (The web browser's desired capabilities to use.)
--proxy=SERVER:PORT (Connect to a proxy server:port for tests.)
--proxy=USERNAME:PASSWORD@SERVER:PORT (Use authenticated proxy server.)
--agent=STRING (Modify the web browser's User-Agent string.)
--mobile (Use the mobile device emulator while running tests.)
--metrics=STRING (Set mobile "CSSWidth,CSSHeight,PixelRatio".)
--extension-zip=ZIP (Load a Chrome Extension .zip|.crx, comma-separated.)
--extension-dir=DIR (Load a Chrome Extension directory, comma-separated.)
--headless (Run tests headlessly. Default mode on Linux OS.)
--headed (Run tests with a GUI on Linux OS.)
--locale=LOCALE_CODE (Set the Language Locale Code for the web browser.)
--start-page=URL (The starting URL for the web browser when tests begin.)
--archive-logs (Archive old log files instead of deleting them.)
--time-limit=SECONDS (Safely fail any test that exceeds the limit limit.)
--slow (Slow down the automation. Faster than using Demo Mode.)
--demo (Slow down and visually see test actions as they occur.)
--demo-sleep=SECONDS (Set the wait time after Demo Mode actions.)
--highlights=NUM (Number of highlight animations for Demo Mode actions.)
--message-duration=SECONDS (The time length for Messenger alerts.)
--check-js (Check for JavaScript errors after page loads.)
--ad-block (Block some types of display ads after page loads.)
--block-images (Block images from loading during tests.)
--verify-delay=SECONDS (The delay before MasterQA verification checks.)
--disable-csp (Disable the Content Security Policy of websites.)
--enable-ws (Enable Web Security on Chrome.)
--enable-sync (Enable "Chrome Sync".)
--use-auto-ext (Use Chrome's automation extension.)
--swiftshader (Use Chrome's "--use-gl=swiftshader" feature.)
--incognito (Enable Chrome's Incognito mode.)
--guest (Enable Chrome's Guest mode.)
--devtools (Open Chrome's DevTools when the browser opens.)
--reuse-session / --rs (Reuse the browser session between tests.)
--crumbs (Delete all cookies between tests reusing a session.)
--maximize (Start tests with the web browser window maximized.)
--save-screenshot (Save a screenshot at the end of each test.)
--visual-baseline (Set the visual baseline for Visual/Layout tests.)
--timeout-multiplier=MULTIPLIER (Multiplies the default timeout values.)
"""
colorama.init(autoreset=True)
c1 = colorama.Fore.BLUE + colorama.Back.LIGHTCYAN_EX
c2 = colorama.Fore.BLUE + colorama.Back.LIGHTGREEN_EX
c3 = colorama.Fore.MAGENTA + colorama.Back.LIGHTYELLOW_EX
cr = colorama.Style.RESET_ALL
s_str = "SeleniumBase"
s_str = s_str.replace("SeleniumBase", c1 + "Selenium" + c2 + "Base" + cr)
s_str = s_str + cr + " " + c3 + "command-line options for pytest" + cr
parser = parser.getgroup('SeleniumBase', s_str)
parser.addoption('--browser',
action="store",
dest='browser',
type=str.lower,
choices=constants.ValidBrowsers.valid_browsers,
default=constants.Browser.GOOGLE_CHROME,
help="""Specifies the web browser to use. Default: Chrome.
If you want to use Firefox, explicitly indicate that.
Example: (--browser=firefox)""")
parser.addoption('--with-selenium',
action="store_true",
dest='with_selenium',
default=True,
help="""(DEPRECATED) Start tests with an open web browser.
(This is ALWAYS True now when importing BaseCase)""")
parser.addoption('--env',
action='store',
dest='environment',
type=str.lower,
choices=(
constants.Environment.QA,
constants.Environment.STAGING,
|
constants.Environment.DEVELOP,
constants.Environment.PRODUCTION,
constants.Environment.MASTER,
constants.Environment.LOCAL,
constants.Environment.TEST
),
default=constants.Environment.TEST,
help="The environment to
|
run the tests in.")
parser.addoption('--data',
dest='data',
default=None,
help='Extra data to pass to tests from the command line.')
parser.addoption('--var1',
dest='var1',
default=None,
help='Extra data to pass to tests from the command line.')
parser.addoption('--var2',
dest='var2',
default=None,
help='Extra data to pass to tests from the command line.')
parser.addoption('--var3',
dest='var3',
default=None,
help='Extra data to pass to tests from the command line.')
parser.addoption('--cap_file', '--cap-file',
dest='cap_file',
default=None,
help="""The file that stores browser desired capabilities
for BrowserStack, Sauce Labs, and other
remote web drivers to use.""")
parser.addoption('--cap_string', '--cap-string',
dest='cap_string',
default=None,
help="""The string that stores browser desired
capabilities for BrowserStack, Sauce Labs,
and other remote web drivers to use.
Enclose cap-string in single quotes.
Enclose parameter keys in double quotes.
Example: --cap-string='{"name":"test1","v":"42"}'""")
parser.addoption('--settings_file', '--settings-file', '--settings',
action='store',
dest='settings_file',
default=None,
help="""The file that stores key/value pairs for
overriding values in the
seleniumbase/config/settings.py file.""")
parser.addoption('--user_data_dir', '--user-data-dir',
dest='user_data_dir',
default=None,
help="""The Chrome User Data Directory to use. (Profile)
If the directory doesn't exist, it'll be created.""")
parser.addoption('--with-testing_base', '--with-testing-base',
action="store_true",
dest='with_testing_base',
default=True,
help="""Use to save logs and screenshots when tests fa
|
joopert/home-assistant
|
homeassistant/components/nmbs/sensor.py
|
Python
|
apache-2.0
| 8,620 | 0.000232 |
"""Get ride details and liveboard details for NMBS (Belgian railway)."""
import logging
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
ATTR_ATTRIBUTION,
ATTR_LATITUDE,
ATTR_LONGITUDE,
CONF_NAME,
CONF_SHOW_ON_MAP,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
import homeassistant.util.dt as dt_util
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "NMBS"
DEFAULT_ICON = "mdi:train"
DEFAULT_ICON_ALERT = "mdi:alert-octagon"
CONF_STATION_FROM = "station_from"
CONF_STATION_TO = "station_to"
CONF_STATION_LIVE = "station_live"
CONF_EXCLUDE_VIAS = "exclude_vias"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_STATION_FROM): cv.string,
vol.Required(CONF_STATION_TO): cv.string,
vol.Optional(CONF_STATION_LIVE): cv.string,
vol.Optional(CONF_EXCLUDE_VIAS, default=False): cv.boolean,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_SHOW_ON_MAP, default=False): cv.boolean,
}
)
def get_time_until(departure_time=None):
"""Calculate the time between now and a train's departure time."""
if departure_time is None:
return 0
delta = dt_util.utc_from_timestamp(int(departure_time)) - dt_util.now()
return round((delta.total_seconds() / 60))
def get_delay_in_minutes(delay=0):
"""Get the delay in minutes from a delay in seconds."""
return round((int(delay) / 60))
def get_ride_duration(departure_time, arrival_time, delay=0):
"""Calculate the total travel time in minutes."""
duration = dt_util.utc_from_timestamp(
int(arrival_time)
) - dt_util.utc_from_timestamp(int(departure_time))
duration_time = int(round((duration.total_seconds() / 60)))
return duration_time + get_delay_in_minutes(delay)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the NMBS sensor with iRail API."""
from pyrail import iRail
api_client = iRail()
name = config[CONF_NAME]
show_on_map = config[CONF_SHOW_ON_MAP]
station_from = config[CONF_STATION_FROM]
station_to = config[CONF_STATION_TO]
station_live = config.get(CONF_STATION_LIVE)
excl_vias = config[CONF_EXCLUDE_VIAS]
sensors = [
NMBSSensor(api_client, name, show_on_map, station_from, station_to, excl_vias)
]
if station_live is not None:
|
sensors.append(NMBSLiveBoard(api_client, station_live))
add_entities(sensors, T
|
rue)
class NMBSLiveBoard(Entity):
"""Get the next train from a station's liveboard."""
def __init__(self, api_client, live_station):
"""Initialize the sensor for getting liveboard data."""
self._station = live_station
self._api_client = api_client
self._attrs = {}
self._state = None
@property
def name(self):
"""Return the sensor default name."""
return "NMBS Live"
@property
def icon(self):
"""Return the default icon or an alert icon if delays."""
if self._attrs and int(self._attrs["delay"]) > 0:
return DEFAULT_ICON_ALERT
return DEFAULT_ICON
@property
def state(self):
"""Return sensor state."""
return self._state
@property
def device_state_attributes(self):
"""Return the sensor attributes if data is available."""
if self._state is None or not self._attrs:
return None
delay = get_delay_in_minutes(self._attrs["delay"])
departure = get_time_until(self._attrs["time"])
attrs = {
"departure": f"In {departure} minutes",
"extra_train": int(self._attrs["isExtra"]) > 0,
"vehicle_id": self._attrs["vehicle"],
"monitored_station": self._station,
ATTR_ATTRIBUTION: "https://api.irail.be/",
}
if delay > 0:
attrs["delay"] = f"{delay} minutes"
return attrs
def update(self):
"""Set the state equal to the next departure."""
liveboard = self._api_client.get_liveboard(self._station)
next_departure = liveboard["departures"]["departure"][0]
self._attrs = next_departure
self._state = "Track {} - {}".format(
next_departure["platform"], next_departure["station"]
)
class NMBSSensor(Entity):
"""Get the the total travel time for a given connection."""
def __init__(
self, api_client, name, show_on_map, station_from, station_to, excl_vias
):
"""Initialize the NMBS connection sensor."""
self._name = name
self._show_on_map = show_on_map
self._api_client = api_client
self._station_from = station_from
self._station_to = station_to
self._excl_vias = excl_vias
self._attrs = {}
self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return "min"
@property
def icon(self):
"""Return the sensor default icon or an alert icon if any delay."""
if self._attrs:
delay = get_delay_in_minutes(self._attrs["departure"]["delay"])
if delay > 0:
return "mdi:alert-octagon"
return "mdi:train"
@property
def device_state_attributes(self):
"""Return sensor attributes if data is available."""
if self._state is None or not self._attrs:
return None
delay = get_delay_in_minutes(self._attrs["departure"]["delay"])
departure = get_time_until(self._attrs["departure"]["time"])
attrs = {
"departure": f"In {departure} minutes",
"destination": self._station_to,
"direction": self._attrs["departure"]["direction"]["name"],
"platform_arriving": self._attrs["arrival"]["platform"],
"platform_departing": self._attrs["departure"]["platform"],
"vehicle_id": self._attrs["departure"]["vehicle"],
ATTR_ATTRIBUTION: "https://api.irail.be/",
}
if self._show_on_map and self.station_coordinates:
attrs[ATTR_LATITUDE] = self.station_coordinates[0]
attrs[ATTR_LONGITUDE] = self.station_coordinates[1]
if self.is_via_connection and not self._excl_vias:
via = self._attrs["vias"]["via"][0]
attrs["via"] = via["station"]
attrs["via_arrival_platform"] = via["arrival"]["platform"]
attrs["via_transfer_platform"] = via["departure"]["platform"]
attrs["via_transfer_time"] = get_delay_in_minutes(
via["timeBetween"]
) + get_delay_in_minutes(via["departure"]["delay"])
if delay > 0:
attrs["delay"] = f"{delay} minutes"
return attrs
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def station_coordinates(self):
"""Get the lat, long coordinates for station."""
if self._state is None or not self._attrs:
return []
latitude = float(self._attrs["departure"]["stationinfo"]["locationY"])
longitude = float(self._attrs["departure"]["stationinfo"]["locationX"])
return [latitude, longitude]
@property
def is_via_connection(self):
"""Return whether the connection goes through another station."""
if not self._attrs:
return False
return "vias" in self._attrs and int(self._attrs["vias"]["number"]) > 0
def update(self):
"""Set the state to the duration of a connection."""
connections = self._api_client.get_connections(
self._station_from, self._station_to
)
if int(connections["connection"][0]["departure"]["left"]) > 0:
next_connection = connections["connection"][1]
else:
next_connection = connections["connection"][0]
self._attrs = next_connection
|
zaresdelweb/tecnoservicio
|
tecnoservicio/ordenes/migrations/0009_auto_20150513_1841.py
|
Python
|
bsd-3-clause
| 449 | 0.002227 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('ordenes', '0008_tecnico_orden'),
]
operations = [
migrations.AlterField(
|
model_name='concepto',
name='nombre',
field=models.CharField(max_length=100, null=True, verbose_name=b'Concepto', blank=True),
),
|
]
|
huran2014/huran.github.io
|
wot_gateway/usr/lib/python2.7/email/mime/application.py
|
Python
|
gpl-2.0
| 1,256 | 0 |
# Copyright (C) 2001-2006 Python Software Foundation
# Author: Keith Dart
# Contact: email-sig@python.org
"""Class representing application/* type MIME documents."""
__all__ = ["MIMEApplication"]
from email import encoders
from email.mime.nonmultipart import MIMENonMultipart
class MIMEApplication(MIMENonMultipart):
"""Class for generating application/* MIME documents."""
def __init__(
|
self, _data, _subtype='octet-stream',
_encoder=encoders.encode_base64, **_params):
"""Create an application/* type MIME document.
_data is a string containing the raw application data.
_subtype is the MIME content type subtype, defaulting to
'octet-stream'.
_encoder is a function which will perform the actual encoding for
transport of the application data, defaulting to bas
|
e64 encoding.
Any additional keyword arguments are passed to the base class
constructor, which turns them into parameters on the Content-Type
header.
"""
if _subtype is None:
raise TypeError('Invalid application MIME subtype')
MIMENonMultipart.__init__(self, 'application', _subtype, **_params)
self.set_payload(_data)
_encoder(self)
|
dslackw/slpkg
|
slpkg/repolist.py
|
Python
|
gpl-3.0
| 2,957 | 0 |
#!/usr/bin/python3
# -*- coding: utf-8 -*-
# repolist.py file is part of slpkg.
# Copyright 2014-2021 Dimitris Zlatanidis <d.zlatanidis@gmail.com>
# All rights reserved.
# Slpkg is a user-friendly package manager for Slackware in
|
stallations
# https://gitlab.com/dslackw/slpkg
# Slpkg is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either ve
|
rsion 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from slpkg.messages import Msg
from slpkg.repositories import Repo
from slpkg.__metadata__ import MetaData as _meta_
class RepoList:
"""List of repositories
"""
def __init__(self):
self.meta = _meta_
self.green = self.meta.color["GREEN"]
self.red = self.meta.color["RED"]
self.grey = self.meta.color["GREY"]
self.endc = self.meta.color["ENDC"]
self.msg = Msg()
self.all_repos = Repo().default_repository()
self.all_repos["slack"] = Repo().slack()
self.all_repos.update(Repo().custom_repository())
def repos(self):
"""View or enabled or disabled repositories
"""
def_cnt, cus_cnt = 0, 0
self.msg.template(78)
print("{0}{1}{2}{3}{4}{5}{6}".format(
"| Repo id", " " * 2,
"Repo URL", " " * 44,
"Default", " " * 3,
"Status"))
self.msg.template(78)
for repo_id, repo_URL in sorted(self.all_repos.items()):
status, COLOR = "disabled", self.red
default = "yes"
if len(repo_URL) > 49:
repo_URL = repo_URL[:48] + "~"
if repo_id in self.meta.repositories:
def_cnt += 1
status, COLOR = "enabled", self.green
if repo_id not in self.meta.default_repositories:
cus_cnt += 1
default = "no"
print(" {0}{1}{2}{3}{4}{5}{6}{7:>8}{8}".format(
repo_id, " " * (9 - len(repo_id)),
repo_URL, " " * (52 - len(repo_URL)),
default, " " * (8 - len(default)),
COLOR, status, self.endc))
print("\nRepositories summary")
print("=" * 79)
print(f"{self.grey}{def_cnt}/{len(self.all_repos)} enabled default "
f"repositories and {cus_cnt} custom.")
print("Edit the file '/etc/slpkg/repositories.conf' for enable "
"and disable default\nrepositories or run 'slpkg "
f"repo-enable' command.{self.endc}")
|
snyderr/robotframework
|
src/robot/reporting/stringcache.py
|
Python
|
apache-2.0
| 1,574 | 0 |
# Copyright 2008-2015 Nokia Solutions and Networks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations u
|
nder the License.
from robot.utils import OrderedDict, compress_text
class StringIndex(int):
pass
class StringCache(object):
_compress_threshold = 80
_use_compressed_threshold = 1.1
_zero_index = StringIndex(0)
def __init__(self):
self._cache = OrderedDict({'*': self._zero_index})
def add(self, text):
if not text:
return self._zero_index
text = self._encode(text)
|
if text not in self._cache:
self._cache[text] = StringIndex(len(self._cache))
return self._cache[text]
def _encode(self, text):
raw = self._raw(text)
if raw in self._cache or len(raw) < self._compress_threshold:
return raw
compressed = compress_text(text)
if len(compressed) * self._use_compressed_threshold < len(raw):
return compressed
return raw
def _raw(self, text):
return '*'+text
def dump(self):
return tuple(self._cache)
|
lwldcr/keyboardman
|
common/const.py
|
Python
|
gpl-3.0
| 214 | 0.009346 |
# -*- coding: utf-8 -*-
__author__ = 'LIWEI2
|
40'
"""
Constants definition
"""
class Const(object):
class RetCode(object):
OK = 0
Inv
|
alidParam = -1
NotExist = -2
ParseError = -3
|
KineticCookie/mist
|
examples-python/simple_streaming.py
|
Python
|
apache-2.0
| 1,188 | 0.004209 |
from mist.mist_job import *
class SimpleStreaming(MistJob, WithStreamingContext, WithPublisher):
def execute(self, parameters):
import time
def takeAndPublish(time, rdd):
taken = rdd.take(11)
self.publisher.publish("-------------------------------------------")
self.publisher.publish("Time: %s" % time)
self.publisher.publish("-------------------------------------------")
self.publisher.publish(str(taken))
ssc = self.streaming_context
|
type(ssc)
rddQueue = []
for i in range(500):
rddQueue += [ssc.sparkContext.parallelize([j for j in range(1, 1001)], 10)]
# Create the QueueInputDStream and use it do some processing
inputStream = ssc.queueStream(rddQueue)
mappedStream = inputStream.map(lambda x: (x % 10, 1))
reducedStream = mappedStream.reduceByKey(lambda a, b: a + b)
#reducedStream.pprint()
reduce
|
dStream.foreachRDD(takeAndPublish)
ssc.start()
time.sleep(15)
ssc.stop(stopSparkContext=False, stopGraceFully=False)
result = "success"
return {"result": result}
|
ufjfeng/leetcode-jf-soln
|
python/189_rotate_array.py
|
Python
|
mit
| 1,252 | 0.003994 |
"""
Rotate an array of n elements to the right by k steps.
For example, with n = 7 and k = 3, the array [1,2,3,4,5,6,7] is rotated to
[5,6,7,1,2,3,4].
Note:
Try to come up as many solutions as you can, there are at least 3 different
ways to solve this problem.
Hint:
Could you do it in-place with O(1) extra space?
Related problem: Reverse Words in a String II
Credits:
Special thanks to @Freezen for adding this problem and creating all test
cases.
Show Company Tags
Show Tags
Show Similar Problems
"""
class Solution(object):
def rotate(self, nums, k):
"""
:type nums: List[int]
:type k: int
:rtype: void Do not return anything, modify nums in-place instead.
"""
n=len(nums)
k%=n
nums[:]=nums[n-k:]+nums[:n-k]
"""
Note:
def rotate(s
|
elf, nums, k):
n = len(nums)
k %= n
self.reverse(nums, 0, n - k)
self.reverse(nums, n - k, n)
self.reverse(nums, 0, n)
def reverse(self, n
|
ums, start, end):
for x in range(start, (start + end) / 2):
nums[x] ^= nums[start + end - x - 1]
nums[start + end - x - 1] ^= nums[x]
nums[x] ^= nums[start + end - x - 1]
"""
|
gangadharkadam/johnfrappe
|
frappe/website/render.py
|
Python
|
mit
| 4,424 | 0.028707 |
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.utils import cstr
import mimetypes, json
from werkzeug.wrappers import Response
from frappe.website.context import get_context
from frappe.website.utils import scrub_relative_urls, get_home_page, can_cache, delete_page_cache
from frappe.website.permissions import clear_permissions
from frappe.website.router import clear_sitemap
class PageNotFoundError(Exception): pass
def render(path, http_status_code=None):
"""render html page"""
path = resolve_path(path.strip("/"))
try:
data = render_page(path)
except frappe.DoesNotExistError, e:
doctype, name = get_doctype_from_path(path)
if doctype and name:
path = "print"
frappe.local.form_dict.doctype = doctype
frappe.local.form_dict.name = name
elif doctype:
path = "list"
frappe.local.form_dict.type = doctype
else:
path = "404"
http_status_code = e.http_status_code
try:
data = render_page(path)
except frappe.PermissionError, e:
data, http_status_code = render_403(e, path)
except frappe.PermissionError, e:
data, http_status_code = render_403(e, path)
except Exception:
path = "error"
data = render_page(path)
http_status_code = 500
return build_response(path, data, http_status_code or 200)
def render_403(e, pathname):
path = "message"
frappe.local.message = """<p><strong>{error}</strong></p>
<p>
<a href="/login?redirect-to=/{pathname}" class="btn btn-primary>{login}</a>
</p>""".format(error=cstr(e), login=_("Login"), pathname=pathname)
frappe.local.message_title = _("Not Permitted")
return render_page(path), e.http_status_code
def get_doctype_from_path(path):
doctypes = frappe.db.sql_list("select name from tabDocType")
parts = path.split("/")
doctype = parts[0]
name = parts[1] if len(parts) >
|
1 else None
if doctype in doctypes:
return doctype, name
# try scrubbed
doctype = doctype.replace("_", " ").title()
if doctype in doctypes:
return doctype, name
return None, None
def build_response(path, data, http_status_code):
# build response
response = Response()
response.
|
data = set_content_type(response, data, path)
response.status_code = http_status_code
response.headers[b"X-Page-Name"] = path.encode("utf-8")
response.headers[b"X-From-Cache"] = frappe.local.response.from_cache or False
return response
def render_page(path):
"""get page html"""
cache_key = ("page_context:{}" if is_ajax() else "page:{}").format(path)
out = None
# try memcache
if can_cache():
out = frappe.cache().get_value(cache_key)
if out and is_ajax():
out = out.get("data")
if out:
frappe.local.response.from_cache = True
return out
return build(path)
def build(path):
if not frappe.db:
frappe.connect()
build_method = (build_json if is_ajax() else build_page)
try:
return build_method(path)
except frappe.DoesNotExistError:
hooks = frappe.get_hooks()
if hooks.website_catch_all:
path = hooks.website_catch_all[0]
return build_method(path)
else:
raise
def build_json(path):
return get_context(path).data
def build_page(path):
context = get_context(path)
html = frappe.get_template(context.base_template_path).render(context)
html = scrub_relative_urls(html)
if can_cache(context.no_cache):
frappe.cache().set_value("page:" + path, html)
return html
def is_ajax():
return getattr(frappe.local, "is_ajax", False)
def resolve_path(path):
if not path:
path = "index"
if path.endswith('.html'):
path = path[:-5]
if path == "index":
path = get_home_page()
return path
def set_content_type(response, data, path):
if isinstance(data, dict):
response.headers[b"Content-Type"] = b"application/json; charset: utf-8"
data = json.dumps(data)
return data
response.headers[b"Content-Type"] = b"text/html; charset: utf-8"
if "." in path:
content_type, encoding = mimetypes.guess_type(path)
if not content_type:
raise frappe.UnsupportedMediaType("Cannot determine content type of {}".format(path))
response.headers[b"Content-Type"] = content_type.encode("utf-8")
return data
def clear_cache(path=None):
if path:
delete_page_cache(path)
else:
clear_sitemap()
frappe.clear_cache("Guest")
clear_permissions()
for method in frappe.get_hooks("website_clear_cache"):
frappe.get_attr(method)(path)
|
lmazuel/azure-sdk-for-python
|
azure-mgmt-servicebus/tests/test_azure_mgmt_servicebus_check_name_availability.py
|
Python
|
mit
| 1,330 | 0.003765 |
# coding: utf-8
#-------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#-------------------------------------
|
-------------------------------------
import unittest
import azure.mgmt.servicebus.models
from azure.mgmt.servicebus.models import SBNamespace
from azure.common.credentials import ServicePrincipalCredentials
from devtools_testutils import AzureMgmtTestCase, ResourceGroupPreparer
class MgmtServiceBusTest(AzureMgmtTestCase):
def s
|
etUp(self):
super(MgmtServiceBusTest, self).setUp()
self.servicebus_client = self.create_mgmt_client(
azure.mgmt.servicebus.ServiceBusManagementClient
)
def process(self, result):
pass
@ResourceGroupPreparer()
def test_sb_namespace_available(self, resource_group, location):
# Check the namespace availability
availabilityresult = self.servicebus_client.namespaces.check_name_availability_method("Testingthenamespacenameforpython")
self.assertEqual(availabilityresult.name_available, True)
#------------------------------------------------------------------------------
if __name__ == '__main__':
unittest.main()
|
christianurich/VIBe2UrbanSim
|
3rdparty/opus/src/psrc/household_x_zone/travel_time_hbw_am_drive_alone_from_home_to_work_alt.py
|
Python
|
gpl-2.0
| 2,397 | 0.01627 |
# Opus/UrbanSim urban simulation software.
# Copyright (C) 2005-2009 University of Washington
# See opus_core/LICENSE
from urbansim.abstract_variables.abstract_travel_time_variable import abstract_travel_time_variable
class travel_time_hbw_am_drive_alone_from_home_to_work_alt(abstract_travel_time_variable):
"""travel_time_hbw_am_drive_alone_from_home_to_work"""
agent_zone_id = "psrc.household.home_zone_id_from_grid_id"
location_zone_id = "urbansim.zone.zone_id"
travel_data_attribute = "urbansim.travel_data.am_single_vehicle_to_work_travel_time"
from numpy import ma, array
from opus_core.tests import opus_unittest
|
from urbansim.variable_test_toolbox import VariableTestToolbox
from psrc.opus_package_info import package
from urbansim.datasets.zone_dataset import ZoneDataset
from urbansim.datasets.household_dataset impo
|
rt HouseholdDataset
from psrc.datasets.person_x_zone_dataset import PersonXZoneDataset
from psrc.datasets.person_dataset import PersonDataset
class Tests(opus_unittest.OpusTestCase):
variable_name = "psrc.household_x_zone.travel_time_hbw_am_drive_alone_from_home_to_work_alt"
def test_my_inputs(self):
values = VariableTestToolbox().compute_variable(self.variable_name, \
{
"household":{
"household_id":array([1,2,3,4,5]),
"home_zone_id_from_grid_id":array([3, 1, 1, 1, 2]),
},
"zone":{
"zone_id":array([1, 2, 3]),
},
"travel_data":{
"from_zone_id": array([3, 3, 1, 1, 1, 2, 2, 3, 2]),
"to_zone_id": array([1, 3, 1, 3, 2, 1, 3, 2, 2]),
"am_single_vehicle_to_work_travel_time":array([1.1, 2.2, 3.3, 4.4, 0.5, 0.7, 8.7, 7.8, 1.0])}},
dataset = "household_x_zone")
default_value = travel_time_hbw_am_drive_alone_from_home_to_work_alt.default_value
should_be = array([[1.1, 7.8, 2.2],
[3.3, 0.5, 4.4], [3.3, 0.5, 4.4],
[3.3, 0.5, 4.4], [0.7, 1.0, 8.7]])
self.assertEqual(ma.allclose(values, should_be, rtol=1e-3), \
True, msg = "Error in " + self.variable_name)
if __name__=='__main__':
opus_unittest.main()
|
lbybee/reddit_spelling_index
|
reddit_db_scraper.py
|
Python
|
gpl-2.0
| 2,876 | 0.004868 |
from pymongo import MongoClient
import json
import requests
import time
from datetime import datetime
def subredditInfo(sr, limit=100, sorting="top", period="day",
user_agent="ChicagoSchool's scraper", **kwargs):
"""retrieves X (max 100) amount of stories in a subreddit
'sorting' is whether or not the sorting of the reddit should be customized or not,
if it is: Allowed passing params/queries such as t=hour, week, month, year or all"""
#query to send
parameters = {"limit": limit,}
parameters.update(kwargs)
url = "http://www.reddit.com/r/%s/%s.json?limit=%d&t=%s" % (sr, sorting, limit, period)
r = requests.get(url, headers={"user-agent": user_agent})
j = json.loads(r.text)
#return list of stories
stories = []
for story in j["data"]["children"]:
stories.append(story)
return stories
def extractContent(link, sub, limit=100, sorting="top",
user_agent="
|
ChicagoSchool's scraper", **kwargs):
url = "http://www.reddit.com/%s.json?sort=%s&limit=%d" % (link, sorting, limit)
r = requests.get(url, headers={"user-agent": user_agent})
j = json.loads(r.text)
date = datetime.fromtimestamp(j[0]["data"]["children"][0]["data"]["created"])
db_data = {"date": date, "link": link, "subreddit": sub, "content": j}
return db_data
def dbScraper(db_n, col_n, sub_l):
"""scrapes
|
all the threads for a subreddit and stores them in a
mongodb db"""
m_ind = 0
t_f = datetime.now()
sub_ln = len(sub_l)
client = MongoClient()
db = client[db_n]
col = db[col_n]
while True:
t_1 = datetime.now()
for i, s in enumerate(sub_l):
try:
sub_info = subredditInfo(s)
except Exception as e:
print e
time.sleep(300)
sub_info = subredditInfo(s)
time.sleep(2)
sub_info_ln = len(sub_info)
for j, l in enumerate(sub_info):
link = l["data"]["permalink"]
try:
content = extractContent(link, s)
col.insert(content)
except Exception as e:
print e
time.sleep(60)
try:
content = extractContent(link, s)
col.insert(content)
except Exception as e:
print e
time.sleep(300)
print i * 100. / sub_ln, j * 100. / sub_info_ln, m_ind, i, j, datetime.now() - t_1, datetime.now() - t_f
time.sleep(2)
# now we wait until a full day has passed since we started our search
t_diff = datetime.now() - t_1
while t_diff.days < 1:
time.sleep(60)
t_diff = datetime.now() - t_1
|
SMALLplayer/smallplayer-image-creator
|
storage/.xbmc/addons/script.module.elementtree/lib/elementtree/ElementInclude.py
|
Python
|
gpl-2.0
| 5,051 | 0.00099 |
#
# ElementTree
# $Id: ElementInclude.py 3225 2007-08-27 21:32:08Z fredrik $
#
# limited xinclude support for element trees
#
# history:
# 2003-08-15 fl created
# 2003-11-14 fl fixed default loader
#
# Copyright (c) 2003-2004 by Fredrik Lundh. All rights reserved.
#
# fredrik@pythonware.com
# http://www.pythonware.com
#
# --------------------------------------------------------------------
# The ElementTree toolkit is
#
# Copyright (c) 1999-2007 by Fredrik Lundh
#
# By obtaining, using, and/or copying this software and/or its
# associated documentation, you agree that you have read, understood,
# and will comply with the following terms and conditions:
#
# Permission to use, copy, modify, and distribute this software and
# its associated documentation for any purpose and without fee is
# hereby granted, provided that the above copyright notice appears in
# all copies, and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of
# Secret Labs AB or the author not be used in advertising or publicity
# pertaining to distribution of the software without specific, written
# prior permission.
#
# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT-
# ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR
# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
# OF THIS SOFTWARE.
# --------------------------------------------------------------------
##
# Limited XInclude support for the ElementTree package.
##
import ElementTree
def copy(elem):
e = ElementTree.Element(elem.tag, elem.attrib)
e.text = elem.text
e.tail = elem.tail
e[:] = elem
return e
XINCLUDE = "{http://www.w3.org/2001/XInclude}"
XINCLUDE_INCLUDE = XINCLUDE + "include"
XINCLUDE_FALLBACK = XINCLUDE + "fallback"
##
# Fatal include error.
class FatalIncludeError(SyntaxError):
pass
##
# Default loader. This loader reads an included resource from disk.
#
# @param href Resource reference.
# @param parse Parse mode. Either "xml" or "text".
# @param encoding Optional text encoding.
# @return The expanded resource. If the parse mode is "xml", this
# is an ElementTree instance. If the parse mode is "text", this
# is a Unicode string. If the loader fails, it can return None
# or raise an IOError exception.
# @throws IOError If the loader fails to load the resource.
def default_loader(href, parse, encoding=None):
file = open(href)
if parse == "xml":
data = ElementTree.parse(file).getroot()
else:
data = file.read()
if encoding:
data = data.decode(encoding)
file.close()
return data
##
# Expand XInclude directives.
#
# @param elem Root element.
# @param loader Optional resource loader. If omitted, it defaults
# to {@link default_loader}. If given, it should be a callable
# that implements the same interface as <b>default_loader</b>.
# @throws FatalIncludeError If the function fails to include a given
# resource, or if t
|
he tree contains malformed XInclude elements.
# @throws IOError If the function fails to load a given resource.
def include(elem, loader=None):
if loader is None:
loader = default_loader
# look for xinclude elements
i = 0
while i < len(elem):
e = elem[i]
if e.tag == XINCLUDE_INCLUDE:
# process xinclude directive
href = e.get("h
|
ref")
parse = e.get("parse", "xml")
if parse == "xml":
node = loader(href, parse)
if node is None:
raise FatalIncludeError(
"cannot load %r as %r" % (href, parse)
)
node = copy(node)
if e.tail:
node.tail = (node.tail or "") + e.tail
elem[i] = node
elif parse == "text":
text = loader(href, parse, e.get("encoding"))
if text is None:
raise FatalIncludeError(
"cannot load %r as %r" % (href, parse)
)
if i:
node = elem[i-1]
node.tail = (node.tail or "") + text
else:
elem.text = (elem.text or "") + text + (e.tail or "")
del elem[i]
continue
else:
raise FatalIncludeError(
"unknown parse type in xi:include tag (%r)" % parse
)
elif e.tag == XINCLUDE_FALLBACK:
raise FatalIncludeError(
"xi:fallback tag must be child of xi:include (%r)" % e.tag
)
else:
include(e, loader)
i = i + 1
|
reverbrain/elliptics
|
recovery/elliptics_recovery/types/__init__.py
|
Python
|
lgpl-3.0
| 769 | 0 |
# =============================================================================
# 2013+ Copyright (c) Alexey Ivanov <rbtz@ph34r.me>
# All rights reserved.
#
# This program is free software; you can redi
|
stribute it and/or modify
# it under the terms of the GNU General Public License as published by
|
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# =============================================================================
from __future__ import absolute_import
|
galaxy-genome-annotation/python-apollo
|
arrow/commands/cmd_remote.py
|
Python
|
mit
| 643 | 0 |
import click
from arrow.commands.remote.add_organism import cli as add_organism
from arrow.commands.remote.add_track import cli as add_track
from arrow.commands.remote.delete_organism import cli
|
as delete_organism
from arrow.commands.remote.delete_track import cli as delete_track
from
|
arrow.commands.remote.update_organism import cli as update_organism
from arrow.commands.remote.update_track import cli as update_track
@click.group()
def cli():
pass
cli.add_command(add_organism)
cli.add_command(add_track)
cli.add_command(delete_organism)
cli.add_command(delete_track)
cli.add_command(update_organism)
cli.add_command(update_track)
|
robwarm/gpaw-symm
|
gpaw/test/au02_absorption.py
|
Python
|
gpl-3.0
| 2,114 | 0.003784 |
import numpy as np
from ase import Atoms
|
from gpaw import GPAW, FermiDirac
from gpaw.response.df import DielectricFunction
from gpaw.test import equal, findpeak
GS = 1
ABS = 1
if GS:
cluster = Atoms('Au2', [(0, 0, 0), (0, 0, 2.564)])
cluster.set_cell((6, 6, 6), scale_atoms=False)
cluster.center()
calc = GPAW(mode='pw',
dtype=complex,
xc='RPBE',
nbands=16,
eigensolver='rmm-diis',
occupations=FermiDirac(0.01))
cluster.set_calculator(
|
calc)
cluster.get_potential_energy()
calc.diagonalize_full_hamiltonian(nbands=24, scalapack=True)
calc.write('Au2.gpw', 'all')
if ABS:
df = DielectricFunction('Au2.gpw',
frequencies=np.linspace(0, 14, 141),
hilbert=not True,
eta=0.1,
ecut=10)
b0, b = df.get_dielectric_function(filename=None,
direction='z')
a0, a = df.get_polarizability(filename=None,
direction='z')
a0_ws, a_ws = df.get_polarizability(filename=None,
wigner_seitz_truncation=True,
direction='z')
w0_ = 5.60491055
I0_ = 244.693028
w_ = 5.696528390
I_ = 207.8
w, I = findpeak(np.linspace(0, 14., 141), b0.imag)
equal(w, w0_, 0.05)
equal(6**3 * I / (4 * np.pi), I0_, 0.5)
w, I = findpeak(np.linspace(0, 14., 141), a0.imag)
equal(w, w0_, 0.05)
equal(I, I0_, 0.5)
w, I = findpeak(np.linspace(0, 14., 141), a0_ws.imag)
equal(w, w0_, 0.05)
equal(I, I0_, 0.5)
w, I = findpeak(np.linspace(0, 14., 141), b.imag)
equal(w, w_, 0.05)
equal(6**3 * I / (4 * np.pi), I_, 0.5)
w, I = findpeak(np.linspace(0, 14., 141), a.imag)
equal(w, w_, 0.05)
equal(I, I_, 0.5)
# The Wigner-Seitz truncation does not give exactly the same for small cell
w, I = findpeak(np.linspace(0, 14., 141), a_ws.imag)
equal(w, w_, 0.2)
equal(I, I_, 8.0)
|
palankai/pyrs-resource
|
pyrs/resource/errors.py
|
Python
|
lgpl-3.0
| 5,253 | 0 |
from pyrs import schema
import six
from . import lib
from . import response
class Error(Exception):
"""
This is the base exception of this framework.
The response based on this exception will be a JSON data
"""
#: HTTP status code (default=500)
status = 500
#: HTTP Response headers, (default None processed as empty)
headers = None
#: Error code should be a string. If it's not specified the class fully
#: qualified name will be used
error = None
#: Description of error. Should give details about the error
#: In the message it will appearing as error_description
description = None
#: Reference for this error. You can pointing out a documentation which
#: gives more information about how could this error happen and how could
#: be possible to avoid
uri = None
#: None used as empty dict. Gives extra information about this error which
#: could be parsed by the consumer of API.
details = None
#: You can specify your schema class for validating your message
#: By default the application default error schema the `ErrorSchema` will
#: be used
schema = None
def __init__(self, *args, **details):
super(Error, self).__init__(*args)
if six.PY3:
self.traceback = lib.parse_traceback(self.__traceback__)
cause = self.__cause__ or self.__context__
else:
self.traceback = lib.get_traceback()
cause = None
self.cause = details.pop('cause', cause)
self.details = details
def get_headers(self):
"""
This method gives back the header property by default or an empty dict,
but you can override, then provide special headers based on the context
"""
return self.headers or {}
def get_status(self):
"""
This method gives back the status property by default which will be
threated as HTTP status code. You can override, then provide your own
status code based on the context.
"""
return self.status
def get_message(self, debug=False):
"""
Should give back a dictionary which will be threated the response body.
The message should be conform with the `ErrorSchema`.
"""
res = {
'error': self.error or lib.get_fqname(self)
}
if self.args:
res['message'] = self.args[0]
if self.description:
res['error_description'] = self.description
if self.uri:
res['error_uri'] = self.uri
details = self.get_details(debug)
if details:
res['details'] = details
return res
def get_details(self, debug=False):
"""
Gives back detailed information about the error and the context.
By default its an empty dictionary. The `debug` depends on the debug
parameter should give back traceback information and the positional
arguments of the exception.
As this is part of the message should conform with the `ErrorSchema`.
"""
details = {}
if self.details:
details = self.details.copy()
if debug:
details['traceback'] = self.traceback
details['args'] = self.args[1:]
return details
@classmethod
def wrap(cls, original):
"""
Wraps the exceptio
|
n gives back an `Error` instance. The created `Error`
instance `error` property will be updated by the fully qualified name
of the `original` exception.
You could use it for `Error` instances as well, though is not
recommended.
"""
ex = cls(*original.args, cause=original)
ex.error = lib.get_fqname(original)
return ex
class ClientError(Error):
"""
Generic Client Error. Normally the client errors have 4xx status
|
codes.
"""
status = 400
class ValidationError(Error):
status = 500
error = 'validation_error'
class InputValidationError(Error):
status = 400
error = 'invalid_request_format'
class DetailsSchema(schema.Object):
"""
Details part of the error schema. Additional properties possible.
"""
traceback = schema.Array()
args = schema.Array()
class Attrs:
additional = True
class ErrorSchema(schema.Object):
"""
Describe how the error response should look like. Goal of this schema is
a minimalistic but usable error response.
"""
error = schema.String(required=True)
error_description = schema.String()
error_uri = schema.String()
message = schema.String()
details = DetailsSchema()
def dump(self, ex):
msg = ex.get_message(self['debug'])
return super(ErrorSchema, self).dump(msg)
class ErrorResponse(response.Response):
def setup(self):
if not isinstance(self.content, Error):
self.content = Error.wrap(self.content)
self.status = self.content.get_status()
self.headers = self.content.get_headers()
if self.content.schema:
self.processor = self.content.schema(debug=self.app['debug'])
else:
self.processor = ErrorSchema(debug=self.app['debug'])
|
akozumpl/anaconda
|
pyanaconda/network.py
|
Python
|
gpl-2.0
| 48,413 | 0.002417 |
#
# network.py - network configuration install data
#
# Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007 Red Hat, Inc.
# 2008, 2009
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Author(s): Matt Wilson <ewt@redhat.com>
# Erik Troan <ewt@redhat.com>
# Mike Fulbright <msf@redhat.com>
# Brent Fox <bfox@redhat.com>
# David Cantrell <dcantrell@redhat.com>
# Radek Vykydal <rvykydal@redhat.com>
import string
import shutil
from pyanaconda import iutil
import socket
import os
import time
import threading
import re
import dbus
import IPy
from uuid import uuid4
import itertools
from pyanaconda.simpleconfig import SimpleConfigFile
from blivet.devices import FcoeDiskDevice, iScsiDiskDevice
import blivet.arch
from pyanaconda import nm
from pyanaconda import constants
from pyanaconda.flags import flags, can_touch_runtime_system
from pyanaconda.i18n import _
from gi.repository import NetworkManager
import logging
log = logging.getLogger("anaconda")
sysconfigDir = "/etc/sysconfig"
netscriptsDir = "%s/network-scripts" % (sysconfigDir)
networkConfFile = "%s/network" % (sysconfigDir)
hostnameFile = "/etc/hostname"
ipv6ConfFile = "/etc/sysctl.d/anaconda.conf"
ifcfgLogFile = "/tmp/ifcfg.log"
DEFAULT_HOSTNAME = "localhost.localdomain"
# part of a valid hostname between two periods (cannot start nor end with '-')
# for more info about '(?!-)' and '(?<!-)' see 're' module documentation
HOSTNAME_PART_RE = re.compile(r"(?!-)[A-Z\d-]{1,63}(?<!-)$", re.IGNORECASE)
ifcfglog = None
network_connected = None
network_connected_condition = threading.Condition()
def setup_ifcfg_log():
# Setup special logging for ifcfg NM interface
from pyanaconda import anaconda_log
global ifcfglog
logger = logging.getLogger("ifcfg")
logger.setLevel(logging.DEBUG)
anaconda_log.logger.addFileHandler(ifcfgLogFile, logger, logging.DEBUG)
if os.access("/dev/tty3", os.W_OK):
anaconda_log.logger.addFileHandler("/dev/tty3", logger,
anaconda_log.DEFAULT_TTY_LEVEL,
anaconda_log.TTY_FORMAT,
autoLevel=True)
anaconda_log.logger.forwardToSyslog(logger)
ifcfglog = logging.getLogger("ifcfg")
def check_ip_address(address, version=None):
try:
_ip, ver = IPy.parseAddress(address)
except ValueError:
return False
if version and version == ver:
return True
def sanityCheckHostname(hostname):
"""
Check if the given string is (syntactically) a valid hostname.
:param hostname: a string to check
:returns: a pair containing boolean value (valid or invalid) and
an error message (if applicable)
:rtype: (bool, str)
"""
if not hostname:
return (False, _("Hostname cannot be None or an empty string."))
if len(hostname) > 255:
return (False, _("Hostname must be 255 or fewer characters in length."))
validStart = string.ascii_letters + string.digits
if hostname[0] not in validStart:
return (False, _("Hostname must start with a valid character in the "
"ranges 'a-z', 'A-Z', or '0-9'"))
if hostname.endswith("."):
# hostname can end with '.', but the regexp used below would not match
hostname = hostname[:-1]
if not all(HOSTNAME_PART_RE.match(part) for part in hostname.split(".")):
return (False, _("Hostnames can only contain the characters 'a-z', "
"'A-Z', '0-9', '-', or '.', parts between periods "
"must contain something and cannot start or end with "
"'-'."))
return (True, "")
# Return a list of IP addresses for all active devices.
def getIPs():
ipv4_addresses = []
ipv6_addresses = []
for devname in nm.nm_activated_devices():
try:
ipv4_addresses += nm.nm_device_ip_addresses(devname, version=4)
ipv6_addresses += nm.nm_device_ip_addresses(devname, version=6)
except (dbus.DBusException, ValueError) as e:
log.warning("Got an exception trying to get the ip addr "
"of %s: %s", devname, e)
# prefer IPv4 addresses to IPv6 addresses
return ipv4_addresses + ipv6_addresses
# Return the first real non-local IP we find
def getFirstRealIP():
for ip in getIPs():
if ip not in ("127.0.0.1", "::1"):
return ip
return None
def netmask2prefix(netmask):
prefix = 0
while prefix < 33:
if (prefix2netmask(prefix) == netmask):
return prefix
prefix += 1
return prefix
def prefix2netmask(pre
|
fix):
""" Convert prefix (CIDR bits) to netmask """
_bytes = []
for _i in range(4):
if prefix >= 8:
_bytes.append(255)
prefix -= 8
else:
_bytes.append(256 - 2**(8
|
-prefix))
prefix = 0
netmask = ".".join(str(byte) for byte in _bytes)
return netmask
# Try to determine what the hostname should be for this system
def getHostname():
hn = None
# First address (we prefer ipv4) of last device (as it used to be) wins
for dev in nm.nm_activated_devices():
addrs = (nm.nm_device_ip_addresses(dev, version=4) +
nm.nm_device_ip_addresses(dev, version=6))
for ipaddr in addrs:
try:
hinfo = socket.gethostbyaddr(ipaddr)
except socket.herror as e:
log.debug("Exception caught trying to get host name of %s: %s", ipaddr, e)
else:
if len(hinfo) == 3:
hn = hinfo[0]
break
if not hn or hn in ('(none)', 'localhost', 'localhost.localdomain'):
hn = socket.gethostname()
if not hn or hn in ('(none)', 'localhost', 'localhost.localdomain'):
hn = DEFAULT_HOSTNAME
return hn
def logIfcfgFile(path, message=""):
content = ""
if os.access(path, os.R_OK):
f = open(path, 'r')
content = f.read()
f.close()
else:
content = "file not found"
ifcfglog.debug("%s%s:\n%s", message, path, content)
def _ifcfg_files(directory):
rv = []
for name in os.listdir(directory):
if name.startswith("ifcfg-"):
if name == "ifcfg-lo":
continue
rv.append(os.path.join(directory,name))
return rv
def logIfcfgFiles(message=""):
ifcfglog.debug("content of files (%s):", message)
for path in _ifcfg_files(netscriptsDir):
ifcfglog.debug("%s:", path)
with open(path, "r") as f:
for line in f:
ifcfglog.debug(" %s", line.strip())
ifcfglog.debug("all settings: %s", nm.nm_get_all_settings())
class IfcfgFile(SimpleConfigFile):
def __init__(self, filename):
SimpleConfigFile.__init__(self, always_quote=True, filename=filename)
self._dirty = False
def read(self, filename=None):
self.reset()
ifcfglog.debug("IfcfFile.read %s", self.filename)
SimpleConfigFile.read(self)
self._dirty = False
def write(self, filename=None, use_tmp=False):
if self._dirty or filename:
# ifcfg-rh is using inotify IN_CLOSE_WRITE event so we don't use
# temporary file for new configuration
ifcfglog.debug("IfcfgFile.write %s:\n%s", self.filename, self.__str__())
SimpleConfigFile.write(self, filename, use_tmp=use_tmp)
self._dirty = Fals
|
susemeee/Chunsabot-framework
|
chunsabot/pi.py
|
Python
|
mit
| 1,587 | 0.009452 |
from decimal import *
class PI:
#Sets decimal to 25 digits of precision
getcontext().prec = 1000
@staticmethod
def factorial(n):
# if n<1:
# return 1
# else:
# return n * PI.factorial(n-1)
result = 1
for i in xrange(2, n+1):
result *= i
return result
@staticmethod
def plouffBig(n): #http://en.wikipedia.org/wiki/Bailey%E2%80%93Borwein%E2%80%93Plouffe_formula
pi = Decimal(0)
k = 0
while k < n:
pi += (Decimal(1)/(16**k))*((Decimal(4)/(8*k+1))-(Decimal(2)/(8*k+4))-(Decimal(1)/(8*k+5))-(Decimal(1)/(8*k+6)))
k += 1
return pi
@staticmethod
def bellardBig(n): #http://en.wikipedia.org/wiki/Bellard%27s_formula
pi = Decimal(0)
k = 0
while k < n:
pi += (Decimal(-1)**k/(1024**k))*( Decimal(256)/(10*k+1) + Decimal(1)/(10*k+9) - Decimal(64)/(10*k+3) - Decimal(32)/(4*k+1) - Decimal(4)/(10*k+5) - Decimal(4)/(10*k+7) -Decimal(1)/(4*k+3))
k += 1
pi = pi * 1/(2**6)
re
|
turn pi
@staticmethod
def chudnovskyBig(n): #http://en.wikipedia.org/wiki/Chudnovsky_algorithm
pi = Decimal(0)
k = 0
while k < n:
pi += (Decimal(-1)**k)*(Deci
|
mal(PI.factorial(6*k))/((PI.factorial(k)**3)*(PI.factorial(3*k)))* (13591409+545140134*k)/(640320**(3*k)))
k += 1
pi = pi * Decimal(10005).sqrt()/4270934400
pi = pi**(-1)
return pi
@staticmethod
def calculate():
return PI.bellardBig(1000)
|
mila-iqia/babyai
|
scripts/make_agent_demos.py
|
Python
|
bsd-3-clause
| 8,078 | 0.0026 |
#!/usr/bin/env python3
"""
Generate a set of agent demonstrations.
The agent can either be a trained model or the heuristic expert (bot).
Demonstration generation can take a long time, but it can be parallelized
if you have a cluster at your disposal. Provide a script that launches
make_agent_demos.py at your cluster as --job-script and the number of jobs as --jobs.
"""
import argparse
import gym
import logging
import sys
import subprocess
import os
import time
import numpy as np
import blosc
import torch
import babyai.utils as utils
# Parse arguments
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("--env", required=True,
help="name of the environment to be run (REQUIRED)")
parser.add_argument("--model", default='BOT',
help="name of the trained model (REQUIRED)")
parser.add_argument("--demos", default=None,
help="path to save demonstrations (based on --model and --origin by default)")
parser.add_argument("--episodes", type=int, default=1000,
help="number of episodes to generate demonstrations for")
parser.add_argument("--valid-episodes", type=int, default=512,
help="number of validation episodes to generate demonstrations for")
parser.add_argument("--seed", type=int, default=0,
help="start random seed")
parser.add_argument("--argmax", action="store_true", default=False,
help="action with highest probability is selected")
parser.add_argument("--log-interval", type=int, default=100,
help="interval between progress reports")
parser.add_argument("--save-interval", type=int, default=10000,
help="interval between demonstrations saving")
parser.add_argument("--filter-steps", type=int, default=0,
help="filter out demos with number of steps more than filter-steps")
parser.add_argument("--on-exception", type=str, default='warn', choices=('warn', 'crash'),
help="How to handle exceptions during demo generation")
parser.add_argument("--job-script", type=str, default=None,
help="The script that launches make_agent_demos.py at a cluster.")
parser.add_argument("--jobs", type=int, default=0,
help="Split generation in that many jobs")
args = parser.parse_args()
logger = logging.getLogger(__name__)
# Set seed for all randomness sources
def print_demo_lengths(demos):
num_frames_per_episode = [len(demo[2]) for demo in demos]
logger.info('Demo length: {:.3f}+-{:.3f}'.format(
np.mean(num_frames_per_episode), np.std(num_frames_per_episode)))
def generate_demos(n_episodes, valid, seed, shift=0):
utils.seed(seed)
# Generate environment
env = gym.make(args.env)
agent = utils.load_agent(env, args.model, args.demos, 'agent', args.argmax, args.env)
demos_path = utils.get_demos_path(args.demos, args.env, 'agent', valid)
demos = []
checkpoint_time = time.time()
just_crashed = False
while True:
if len(demos) == n_episodes:
break
done = False
if just_crashed:
logger.info("reset the environment to find a mission that the bot can solve")
env.reset()
else:
env.seed(seed + len(demos))
obs = env.reset()
agent.on_reset()
actions = []
mission = obs["mission"]
images = []
directions = []
try:
while not done:
action = agent.act(obs)['action']
if isinstance(action, torch.Tensor):
action = action.item()
new_obs, reward, done, _ = env.step(action)
agent.analyze_feedback(reward, done)
actions.append(action)
images.append(obs['image'])
directions.append(obs['direction'])
obs = new_obs
if reward > 0 and (args.filter_steps == 0 or len(images) <= args.filter_steps):
demos.append((mission, blosc.pack_array(np.array(images)), directions, actions))
just_crashed = False
if reward == 0:
if args.on_exception == 'crash':
raise Exception("mission failed, the seed is {}".format(seed + len(demos)))
just_crashed = True
logger.info("mission failed")
except (Exception, AssertionError):
if args.on_exception == 'crash':
raise
just_crashed = True
logger.exception("error while generating demo #{}".format(len(demos)))
continue
if len(demos) and len(demos) % args.log_interval == 0:
now = time.time()
demos_per_second = args.log_interval / (now - checkpoint_time)
to_go = (n_episodes - len(demos)) / demos_per_second
logger.info("demo #{}, {:.3f} demos per second, {:.3f} seconds to go".format(
len(demos) - 1, demos_per_second, to_go))
checkpoint_time = now
# Save demonstrations
if args.save_interval > 0 and len(demos) < n_episodes and len(demos) % args.save_interval == 0:
logger.info("Saving demos...")
utils.save_demos(demos, demos_path)
logger.info("{} demos saved".format(len(demos)))
# print statistics for the last 100 demonstrations
print_demo_lengths(demos[-100:])
# Save demonstrations
logger.info("Saving demos...")
utils.save_demos(demos, demos_path)
logger.info("{} demos saved".format(len(demos)))
print_demo_lengths(demos[-100:])
def generate_demos_cluster():
demos_per_job = args.episodes // args.jobs
demos_path = utils.get_demos_path(args.demos, args.env, 'agent')
job_demo_names = [os.path.realpath(demos_path + '.shard{}'.format(i))
for i in range(args.jobs)]
for demo_name in job_demo_names:
job_demos_path = utils.get_demos_path(demo_name)
if os.path.exists(job_demos_path):
os.remove(job_demos_path)
command = [args.job_script]
command += sys.argv[1:]
for i in range(args.jobs):
cmd_i = list(map(str,
command
+ ['--seed', args.seed + i * demos_per_job]
+ ['--demos', job_demo_names[i]]
+ ['--episodes', demos_per_job]
+ ['--jobs', 0]
+ ['--valid-epi
|
sodes', 0]))
logger.info('LAUNCH COMMAND')
logger.info(cmd_i)
output = subprocess.check_output(cmd_i)
logger.info('LAUNCH OUTPUT')
logger.info(output.decode('utf-8'))
job_demos = [None] * args.jobs
while True:
jobs_done = 0
for i in range(args.jobs):
|
if job_demos[i] is None or len(job_demos[i]) < demos_per_job:
try:
logger.info("Trying to load shard {}".format(i))
job_demos[i] = utils.load_demos(utils.get_demos_path(job_demo_names[i]))
logger.info("{} demos ready in shard {}".format(
len(job_demos[i]), i))
except Exception:
logger.exception("Failed to load the shard")
if job_demos[i] and len(job_demos[i]) == demos_per_job:
jobs_done += 1
logger.info("{} out of {} shards done".format(jobs_done, args.jobs))
if jobs_done == args.jobs:
break
logger.info("sleep for 60 seconds")
time.sleep(60)
# Training demos
all_demos = []
for demos in job_demos:
all_demos.extend(demos)
utils.save_demos(all_demos, demos_path)
logging.basicConfig(level='INFO', format="%(asctime)s: %(levelname)s: %(message)s")
logger.info(args)
# Training demos
if args.jobs == 0:
generate_demos(args.episodes, False, args.seed)
else:
generate_demos_cluster()
# Validation demos
if args.valid_episodes:
generate_demos(args.valid_episodes, True, int(1e9))
|
daajoe/trellis
|
trellis/extractor/__init__.py
|
Python
|
gpl-3.0
| 99 | 0.010101 |
f
|
rom edges import EdgeExtractor
from extractor import Extract
|
or
from parambfs import ParamExtractor
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.