text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
# -*- coding: utf-8 -*-"
import sys
import threading
from celery.datastructures import ExceptionInfo
from celery.exceptions import MaxRetriesExceededError, RetryTaskError
from celery.execute.trace import TaskTrace
from celery.registry import tasks, _unpickle_task
from celery.result import EagerResult
from celery.utils import mattrgetter, gen_unique_id, fun_takes_kwargs
extract_exec_options = mattrgetter("queue", "routing_key",
"exchange", "immediate",
"mandatory", "priority",
"serializer", "delivery_mode",
"compression")
class Context(threading.local):
# Default context
logfile = None
loglevel = None
id = None
args = None
kwargs = None
retries = 0
is_eager = False
delivery_info = None
taskset = None
chord = None
def update(self, d, **kwargs):
self.__dict__.update(d, **kwargs)
def clear(self):
self.__dict__.clear()
def get(self, key, default=None):
try:
return getattr(self, key)
except AttributeError:
return default
class TaskType(type):
"""Meta class for tasks.
Automatically registers the task in the task registry, except
if the `abstract` attribute is set.
If no `name` attribute is provided, then no name is automatically
set to the name of the module it was defined in, and the class name.
"""
def __new__(cls, name, bases, attrs):
new = super(TaskType, cls).__new__
task_module = attrs.get("__module__") or "__main__"
# Abstract class: abstract attribute should not be inherited.
if attrs.pop("abstract", None) or not attrs.get("autoregister", True):
return new(cls, name, bases, attrs)
# Automatically generate missing/empty name.
autoname = False
if not attrs.get("name"):
try:
module_name = sys.modules[task_module].__name__
except KeyError: # pragma: no cover
# Fix for manage.py shell_plus (Issue #366).
module_name = task_module
attrs["name"] = '.'.join([module_name, name])
autoname = True
# Because of the way import happens (recursively)
# we may or may not be the first time the task tries to register
# with the framework. There should only be one class for each task
# name, so we always return the registered version.
task_name = attrs["name"]
if task_name not in tasks:
task_cls = new(cls, name, bases, attrs)
if autoname and task_module == "__main__" and task_cls.app.main:
task_name = task_cls.name = '.'.join([task_cls.app.main, name])
tasks.register(task_cls)
task = tasks[task_name].__class__
return task
def __repr__(cls):
return "<class Task of %s>" % (cls.app, )
class BaseTask(object):
"""Task base class.
When called tasks apply the :meth:`run` method. This method must
be defined by all tasks (that is unless the :meth:`__call__` method
is overridden).
"""
__metaclass__ = TaskType
MaxRetriesExceededError = MaxRetriesExceededError
#: The application instance associated with this task class.
app = None
#: Name of the task.
name = None
#: If :const:`True` the task is an abstract base class.
abstract = True
#: If disabled the worker will not forward magic keyword arguments.
#: Deprecated and scheduled for removal in v3.0.
accept_magic_kwargs = False
#: Request context (set when task is applied).
request = Context()
#: Destination queue. The queue needs to exist
#: in :setting:`CELERY_QUEUES`. The `routing_key`, `exchange` and
#: `exchange_type` attributes will be ignored if this is set.
queue = None
#: Overrides the apps default `routing_key` for this task.
routing_key = None
#: Overrides the apps default `exchange` for this task.
exchange = None
#: Overrides the apps default exchange type for this task.
exchange_type = None
#: Override the apps default delivery mode for this task. Default is
#: `"persistent"`, but you can change this to `"transient"`, which means
#: messages will be lost if the broker is restarted. Consult your broker
#: manual for any additional delivery modes.
delivery_mode = None
#: Mandatory message routing.
mandatory = False
#: Request immediate delivery.
immediate = False
#: Default message priority. A number between 0 to 9, where 0 is the
#: highest. Note that RabbitMQ does not support priorities.
priority = None
#: Maximum number of retries before giving up. If set to :const:`None`,
#: it will **never** stop retrying.
max_retries = 3
#: Default time in seconds before a retry of the task should be
#: executed. 3 minutes by default.
default_retry_delay = 3 * 60
#: Rate limit for this task type. Examples: :const:`None` (no rate
#: limit), `"100/s"` (hundred tasks a second), `"100/m"` (hundred tasks
#: a minute),`"100/h"` (hundred tasks an hour)
rate_limit = None
#: If enabled the worker will not store task state and return values
#: for this task. Defaults to the :setting:`CELERY_IGNORE_RESULT`
#: setting.
ignore_result = False
#: When enabled errors will be stored even if the task is otherwise
#: configured to ignore results.
store_errors_even_if_ignored = False
#: If enabled an email will be sent to :setting:`ADMINS` whenever a task
#: of this type fails.
send_error_emails = False
disable_error_emails = False # FIXME
#: List of exception types to send error emails for.
error_whitelist = ()
#: The name of a serializer that are registered with
#: :mod:`kombu.serialization.registry`. Default is `"pickle"`.
serializer = "pickle"
#: Hard time limit.
#: Defaults to the :setting:`CELERY_TASK_TIME_LIMIT` setting.
time_limit = None
#: Soft time limit.
#: Defaults to the :setting:`CELERY_TASK_SOFT_TIME_LIMIT` setting.
soft_time_limit = None
#: The result store backend used for this task.
backend = None
#: If disabled this task won't be registered automatically.
autoregister = True
#: If enabled the task will report its status as "started" when the task
#: is executed by a worker. Disabled by default as the normal behaviour
#: is to not report that level of granularity. Tasks are either pending,
#: finished, or waiting to be retried.
#:
#: Having a "started" status can be useful for when there are long
#: running tasks and there is a need to report which task is currently
#: running.
#:
#: The application default can be overridden using the
#: :setting:`CELERY_TRACK_STARTED` setting.
track_started = False
#: When enabled messages for this task will be acknowledged **after**
#: the task has been executed, and not *just before* which is the
#: default behavior.
#:
#: Please note that this means the task may be executed twice if the
#: worker crashes mid execution (which may be acceptable for some
#: applications).
#:
#: The application default can be overridden with the
#: :setting:`CELERY_ACKS_LATE` setting.
acks_late = False
#: Default task expiry time.
expires = None
#: The type of task *(no longer used)*.
type = "regular"
def __call__(self, *args, **kwargs):
return self.run(*args, **kwargs)
def __reduce__(self):
return (_unpickle_task, (self.name, ), None)
def run(self, *args, **kwargs):
"""The body of the task executed by workers."""
raise NotImplementedError("Tasks must define the run method.")
@classmethod
def get_logger(self, loglevel=None, logfile=None, propagate=False,
**kwargs):
"""Get task-aware logger object."""
return self.app.log.setup_task_logger(
loglevel=self.request.loglevel if loglevel is None else loglevel,
logfile=self.request.logfile if logfile is None else logfile,
propagate=propagate, task_name=self.name, task_id=self.request.id)
@classmethod
def establish_connection(self, connect_timeout=None):
"""Establish a connection to the message broker."""
return self.app.broker_connection(connect_timeout=connect_timeout)
@classmethod
def get_publisher(self, connection=None, exchange=None,
connect_timeout=None, exchange_type=None, **options):
"""Get a celery task message publisher.
:rtype :class:`~celery.app.amqp.TaskPublisher`:
.. warning::
If you don't specify a connection, one will automatically
be established for you, in that case you need to close this
connection after use::
>>> publisher = self.get_publisher()
>>> # ... do something with publisher
>>> publisher.connection.close()
or used as a context::
>>> with self.get_publisher() as publisher:
... # ... do something with publisher
"""
exchange = self.exchange if exchange is None else exchange
if exchange_type is None:
exchange_type = self.exchange_type
connection = connection or self.establish_connection(connect_timeout)
return self.app.amqp.TaskPublisher(connection=connection,
exchange=exchange,
exchange_type=exchange_type,
routing_key=self.routing_key,
**options)
@classmethod
def get_consumer(self, connection=None, connect_timeout=None):
"""Get message consumer.
:rtype :class:`kombu.messaging.Consumer`:
.. warning::
If you don't specify a connection, one will automatically
be established for you, in that case you need to close this
connection after use::
>>> consumer = self.get_consumer()
>>> # do something with consumer
>>> consumer.close()
>>> consumer.connection.close()
"""
connection = connection or self.establish_connection(connect_timeout)
return self.app.amqp.TaskConsumer(connection=connection,
exchange=self.exchange,
routing_key=self.routing_key)
@classmethod
def delay(self, *args, **kwargs):
"""Star argument version of :meth:`apply_async`.
Does not support the extra options enabled by :meth:`apply_async`.
:param \*args: positional arguments passed on to the task.
:param \*\*kwargs: keyword arguments passed on to the task.
:returns :class:`celery.result.AsyncResult`:
"""
return self.apply_async(args, kwargs)
@classmethod
def apply_async(self, args=None, kwargs=None, countdown=None,
eta=None, task_id=None, publisher=None, connection=None,
connect_timeout=None, router=None, expires=None, queues=None,
**options):
"""Apply tasks asynchronously by sending a message.
:keyword args: The positional arguments to pass on to the
task (a :class:`list` or :class:`tuple`).
:keyword kwargs: The keyword arguments to pass on to the
task (a :class:`dict`)
:keyword countdown: Number of seconds into the future that the
task should execute. Defaults to immediate
execution (do not confuse with the
`immediate` flag, as they are unrelated).
:keyword eta: A :class:`~datetime.datetime` object describing
the absolute time and date of when the task should
be executed. May not be specified if `countdown`
is also supplied. (Do not confuse this with the
`immediate` flag, as they are unrelated).
:keyword expires: Either a :class:`int`, describing the number of
seconds, or a :class:`~datetime.datetime` object
that describes the absolute time and date of when
the task should expire. The task will not be
executed after the expiration time.
:keyword connection: Re-use existing broker connection instead
of establishing a new one. The `connect_timeout`
argument is not respected if this is set.
:keyword connect_timeout: The timeout in seconds, before we give up
on establishing a connection to the AMQP
server.
:keyword retry: If enabled sending of the task message will be retried
in the event of connection loss or failure. Default
is taken from the :setting:`CELERY_TASK_PUBLISH_RETRY`
setting. Note you need to handle the
publisher/connection manually for this to work.
:keyword retry_policy: Override the retry policy used. See the
:setting:`CELERY_TASK_PUBLISH_RETRY` setting.
:keyword routing_key: The routing key used to route the task to a
worker server. Defaults to the
:attr:`routing_key` attribute.
:keyword exchange: The named exchange to send the task to.
Defaults to the :attr:`exchange` attribute.
:keyword exchange_type: The exchange type to initialize the exchange
if not already declared. Defaults to the
:attr:`exchange_type` attribute.
:keyword immediate: Request immediate delivery. Will raise an
exception if the task cannot be routed to a worker
immediately. (Do not confuse this parameter with
the `countdown` and `eta` settings, as they are
unrelated). Defaults to the :attr:`immediate`
attribute.
:keyword mandatory: Mandatory routing. Raises an exception if
there's no running workers able to take on this
task. Defaults to the :attr:`mandatory`
attribute.
:keyword priority: The task priority, a number between 0 and 9.
Defaults to the :attr:`priority` attribute.
:keyword serializer: A string identifying the default
serialization method to use. Can be `pickle`,
`json`, `yaml`, `msgpack` or any custom
serialization method that has been registered
with :mod:`kombu.serialization.registry`.
Defaults to the :attr:`serializer` attribute.
:keyword compression: A string identifying the compression method
to use. Can be one of ``zlib``, ``bzip2``,
or any custom compression methods registered with
:func:`kombu.compression.register`. Defaults to
the :setting:`CELERY_MESSAGE_COMPRESSION`
setting.
.. note::
If the :setting:`CELERY_ALWAYS_EAGER` setting is set, it will
be replaced by a local :func:`apply` call instead.
"""
router = self.app.amqp.Router(queues)
conf = self.app.conf
if conf.CELERY_ALWAYS_EAGER:
return self.apply(args, kwargs, task_id=task_id)
options.setdefault("compression",
conf.CELERY_MESSAGE_COMPRESSION)
options = dict(extract_exec_options(self), **options)
options = router.route(options, self.name, args, kwargs)
expires = expires or self.expires
publish = publisher or self.app.amqp.publisher_pool.acquire(block=True)
evd = None
if conf.CELERY_SEND_TASK_SENT_EVENT:
evd = self.app.events.Dispatcher(channel=publish.channel,
buffer_while_offline=False)
try:
task_id = publish.delay_task(self.name, args, kwargs,
task_id=task_id,
countdown=countdown,
eta=eta, expires=expires,
event_dispatcher=evd,
**options)
finally:
if not publisher:
publish.release()
return self.AsyncResult(task_id)
@classmethod
def retry(self, args=None, kwargs=None, exc=None, throw=True,
eta=None, countdown=None, max_retries=None, **options):
"""Retry the task.
:param args: Positional arguments to retry with.
:param kwargs: Keyword arguments to retry with.
:keyword exc: Optional exception to raise instead of
:exc:`~celery.exceptions.MaxRetriesExceededError`
when the max restart limit has been exceeded.
:keyword countdown: Time in seconds to delay the retry for.
:keyword eta: Explicit time and date to run the retry at
(must be a :class:`~datetime.datetime` instance).
:keyword max_retries: If set, overrides the default retry limit.
:keyword \*\*options: Any extra options to pass on to
meth:`apply_async`.
:keyword throw: If this is :const:`False`, do not raise the
:exc:`~celery.exceptions.RetryTaskError` exception,
that tells the worker to mark the task as being
retried. Note that this means the task will be
marked as failed if the task raises an exception,
or successful if it returns.
:raises celery.exceptions.RetryTaskError: To tell the worker that
the task has been re-sent for retry. This always happens,
unless the `throw` keyword argument has been explicitly set
to :const:`False`, and is considered normal operation.
**Example**
.. code-block:: python
>>> @task
>>> def tweet(auth, message):
... twitter = Twitter(oauth=auth)
... try:
... twitter.post_status_update(message)
... except twitter.FailWhale, exc:
... # Retry in 5 minutes.
... return tweet.retry(countdown=60 * 5, exc=exc)
Although the task will never return above as `retry` raises an
exception to notify the worker, we use `return` in front of the retry
to convey that the rest of the block will not be executed.
"""
request = self.request
max_retries = self.max_retries if max_retries is None else max_retries
args = request.args if args is None else args
kwargs = request.kwargs if kwargs is None else kwargs
delivery_info = request.delivery_info
if delivery_info:
options.setdefault("exchange", delivery_info.get("exchange"))
options.setdefault("routing_key", delivery_info.get("routing_key"))
if not eta and countdown is None:
countdown = self.default_retry_delay
options.update({"retries": request.retries + 1,
"task_id": request.id,
"countdown": countdown,
"eta": eta})
if max_retries is not None and options["retries"] > max_retries:
raise exc or self.MaxRetriesExceededError(
"Can't retry %s[%s] args:%s kwargs:%s" % (
self.name, options["task_id"], args, kwargs))
# If task was executed eagerly using apply(),
# then the retry must also be executed eagerly.
if request.is_eager:
return self.apply(args=args, kwargs=kwargs, **options).get()
self.apply_async(args=args, kwargs=kwargs, **options)
if throw:
raise RetryTaskError(
eta and "Retry at %s" % (eta, )
or "Retry in %s secs." % (countdown, ), exc)
@classmethod
def apply(self, args=None, kwargs=None, **options):
"""Execute this task locally, by blocking until the task returns.
:param args: positional arguments passed on to the task.
:param kwargs: keyword arguments passed on to the task.
:keyword throw: Re-raise task exceptions. Defaults to
the :setting:`CELERY_EAGER_PROPAGATES_EXCEPTIONS`
setting.
:rtype :class:`celery.result.EagerResult`:
"""
args = args or []
kwargs = kwargs or {}
task_id = options.get("task_id") or gen_unique_id()
retries = options.get("retries", 0)
throw = self.app.either("CELERY_EAGER_PROPAGATES_EXCEPTIONS",
options.pop("throw", None))
# Make sure we get the task instance, not class.
task = tasks[self.name]
request = {"id": task_id,
"retries": retries,
"is_eager": True,
"logfile": options.get("logfile"),
"loglevel": options.get("loglevel", 0),
"delivery_info": {"is_eager": True}}
if self.accept_magic_kwargs:
default_kwargs = {"task_name": task.name,
"task_id": task_id,
"task_retries": retries,
"task_is_eager": True,
"logfile": options.get("logfile"),
"loglevel": options.get("loglevel", 0),
"delivery_info": {"is_eager": True}}
supported_keys = fun_takes_kwargs(task.run, default_kwargs)
extend_with = dict((key, val)
for key, val in default_kwargs.items()
if key in supported_keys)
kwargs.update(extend_with)
trace = TaskTrace(task.name, task_id, args, kwargs,
task=task, request=request, propagate=throw)
retval = trace.execute()
if isinstance(retval, ExceptionInfo):
retval = retval.exception
return EagerResult(task_id, retval, trace.status,
traceback=trace.strtb)
@classmethod
def AsyncResult(self, task_id):
"""Get AsyncResult instance for this kind of task.
:param task_id: Task id to get result for.
"""
return self.app.AsyncResult(task_id, backend=self.backend,
task_name=self.name)
def update_state(self, task_id=None, state=None, meta=None):
"""Update task state.
:param task_id: Id of the task to update.
:param state: New state (:class:`str`).
:param meta: State metadata (:class:`dict`).
"""
if task_id is None:
task_id = self.request.id
self.backend.store_result(task_id, meta, state)
def on_retry(self, exc, task_id, args, kwargs, einfo):
"""Retry handler.
This is run by the worker when the task is to be retried.
:param exc: The exception sent to :meth:`retry`.
:param task_id: Unique id of the retried task.
:param args: Original arguments for the retried task.
:param kwargs: Original keyword arguments for the retried task.
:keyword einfo: :class:`~celery.datastructures.ExceptionInfo`
instance, containing the traceback.
The return value of this handler is ignored.
"""
pass
def after_return(self, status, retval, task_id, args, kwargs, einfo):
"""Handler called after the task returns.
:param status: Current task state.
:param retval: Task return value/exception.
:param task_id: Unique id of the task.
:param args: Original arguments for the task that failed.
:param kwargs: Original keyword arguments for the task
that failed.
:keyword einfo: :class:`~celery.datastructures.ExceptionInfo`
instance, containing the traceback (if any).
The return value of this handler is ignored.
"""
if self.request.chord:
self.backend.on_chord_part_return(self)
def on_failure(self, exc, task_id, args, kwargs, einfo):
"""Error handler.
This is run by the worker when the task fails.
:param exc: The exception raised by the task.
:param task_id: Unique id of the failed task.
:param args: Original arguments for the task that failed.
:param kwargs: Original keyword arguments for the task
that failed.
:keyword einfo: :class:`~celery.datastructures.ExceptionInfo`
instance, containing the traceback.
The return value of this handler is ignored.
"""
pass
def on_success(self, retval, task_id, args, kwargs):
"""Success handler.
Run by the worker if the task executes successfully.
:param retval: The return value of the task.
:param task_id: Unique id of the executed task.
:param args: Original arguments for the executed task.
:param kwargs: Original keyword arguments for the executed task.
The return value of this handler is ignored.
"""
pass
def execute(self, request, pool, loglevel, logfile, **kwargs):
"""The method the worker calls to execute the task.
:param request: A :class:`~celery.worker.job.TaskRequest`.
:param pool: A task pool.
:param loglevel: Current loglevel.
:param logfile: Name of the currently used logfile.
:keyword consumer: The :class:`~celery.worker.consumer.Consumer`.
"""
request.execute_using_pool(pool, loglevel, logfile)
def __repr__(self):
"""`repr(task)`"""
return "<@task: %s>" % (self.name, )
@classmethod
def subtask(cls, *args, **kwargs):
"""Returns :class:`~celery.task.sets.subtask` object for
this task, wrapping arguments and execution options
for a single task invocation."""
from celery.task.sets import subtask
return subtask(cls, *args, **kwargs)
@property
def __name__(self):
return self.__class__.__name__
| WoLpH/celery | celery/app/task/__init__.py | Python | bsd-3-clause | 27,392 | 0.000548 |
# The content of this file was generated using the Python profile of libCellML 0.2.0.
from enum import Enum
from math import *
__version__ = "0.3.0"
LIBCELLML_VERSION = "0.2.0"
STATE_COUNT = 3
VARIABLE_COUNT = 19
class VariableType(Enum):
VARIABLE_OF_INTEGRATION = 1
STATE = 2
CONSTANT = 3
COMPUTED_CONSTANT = 4
ALGEBRAIC = 5
EXTERNAL = 6
VOI_INFO = {"name": "time", "units": "millisecond", "component": "environment", "type": VariableType.VARIABLE_OF_INTEGRATION}
STATE_INFO = [
{"name": "m", "units": "dimensionless", "component": "sodium_channel_m_gate", "type": VariableType.STATE},
{"name": "h", "units": "dimensionless", "component": "sodium_channel_h_gate", "type": VariableType.STATE},
{"name": "n", "units": "dimensionless", "component": "potassium_channel_n_gate", "type": VariableType.STATE}
]
VARIABLE_INFO = [
{"name": "V", "units": "millivolt", "component": "membrane", "type": VariableType.EXTERNAL},
{"name": "g_L", "units": "milliS_per_cm2", "component": "leakage_current", "type": VariableType.CONSTANT},
{"name": "Cm", "units": "microF_per_cm2", "component": "membrane", "type": VariableType.CONSTANT},
{"name": "E_R", "units": "millivolt", "component": "membrane", "type": VariableType.CONSTANT},
{"name": "g_K", "units": "milliS_per_cm2", "component": "potassium_channel", "type": VariableType.CONSTANT},
{"name": "g_Na", "units": "milliS_per_cm2", "component": "sodium_channel", "type": VariableType.CONSTANT},
{"name": "i_Stim", "units": "microA_per_cm2", "component": "membrane", "type": VariableType.ALGEBRAIC},
{"name": "E_L", "units": "millivolt", "component": "leakage_current", "type": VariableType.COMPUTED_CONSTANT},
{"name": "i_L", "units": "microA_per_cm2", "component": "leakage_current", "type": VariableType.ALGEBRAIC},
{"name": "E_Na", "units": "millivolt", "component": "sodium_channel", "type": VariableType.COMPUTED_CONSTANT},
{"name": "i_Na", "units": "microA_per_cm2", "component": "sodium_channel", "type": VariableType.EXTERNAL},
{"name": "alpha_m", "units": "per_millisecond", "component": "sodium_channel_m_gate", "type": VariableType.ALGEBRAIC},
{"name": "beta_m", "units": "per_millisecond", "component": "sodium_channel_m_gate", "type": VariableType.ALGEBRAIC},
{"name": "alpha_h", "units": "per_millisecond", "component": "sodium_channel_h_gate", "type": VariableType.ALGEBRAIC},
{"name": "beta_h", "units": "per_millisecond", "component": "sodium_channel_h_gate", "type": VariableType.ALGEBRAIC},
{"name": "E_K", "units": "millivolt", "component": "potassium_channel", "type": VariableType.COMPUTED_CONSTANT},
{"name": "i_K", "units": "microA_per_cm2", "component": "potassium_channel", "type": VariableType.ALGEBRAIC},
{"name": "alpha_n", "units": "per_millisecond", "component": "potassium_channel_n_gate", "type": VariableType.EXTERNAL},
{"name": "beta_n", "units": "per_millisecond", "component": "potassium_channel_n_gate", "type": VariableType.ALGEBRAIC}
]
def leq_func(x, y):
return 1.0 if x <= y else 0.0
def geq_func(x, y):
return 1.0 if x >= y else 0.0
def and_func(x, y):
return 1.0 if bool(x) & bool(y) else 0.0
def create_states_array():
return [nan]*STATE_COUNT
def create_variables_array():
return [nan]*VARIABLE_COUNT
def initialise_variables(voi, states, variables, external_variable):
variables[1] = 0.3
variables[2] = 1.0
variables[3] = 0.0
variables[4] = 36.0
variables[5] = 120.0
states[0] = 0.05
states[1] = 0.6
states[2] = 0.325
variables[0] = external_variable(voi, states, variables, 0)
variables[17] = external_variable(voi, states, variables, 17)
variables[10] = external_variable(voi, states, variables, 10)
def compute_computed_constants(variables):
variables[7] = variables[3]-10.613
variables[9] = variables[3]-115.0
variables[15] = variables[3]+12.0
def compute_rates(voi, states, rates, variables, external_variable):
variables[0] = external_variable(voi, states, variables, 0)
variables[11] = 0.1*(variables[0]+25.0)/(exp((variables[0]+25.0)/10.0)-1.0)
variables[12] = 4.0*exp(variables[0]/18.0)
rates[0] = variables[11]*(1.0-states[0])-variables[12]*states[0]
variables[13] = 0.07*exp(variables[0]/20.0)
variables[14] = 1.0/(exp((variables[0]+30.0)/10.0)+1.0)
rates[1] = variables[13]*(1.0-states[1])-variables[14]*states[1]
variables[17] = external_variable(voi, states, variables, 17)
variables[18] = 0.125*exp(variables[0]/80.0)
rates[2] = variables[17]*(1.0-states[2])-variables[18]*states[2]
def compute_variables(voi, states, rates, variables, external_variable):
variables[0] = external_variable(voi, states, variables, 0)
variables[6] = -20.0 if and_func(geq_func(voi, 10.0), leq_func(voi, 10.5)) else 0.0
variables[8] = variables[1]*(variables[0]-variables[7])
variables[17] = external_variable(voi, states, variables, 17)
variables[10] = external_variable(voi, states, variables, 10)
variables[16] = variables[4]*pow(states[2], 4.0)*(variables[0]-variables[15])
| cellml/libcellml | tests/resources/generator/hodgkin_huxley_squid_axon_model_1952/model.external.py | Python | apache-2.0 | 5,138 | 0.00506 |
#!/usr/bin/env python
#
# Copyright 2013 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
import numpy
from constellation_map_generator import *
'''
Note on the naming scheme. Each constellation is named using a prefix
for the type of constellation, the order of the constellation, and a
distinguishing feature, which comes in three modes:
- No extra feature: the basic Gray-coded constellation map; others
will be derived from this type.
- A single number: an indexed number to uniquely identify different
constellation maps.
- 0xN_x0_x1..._xM: A permutation of the base constellation, explained
below.
For rectangular constellations (BPSK, QPSK, QAM), we can define a
hyperspace and look for all symmetries. This is also known as the
automorphism group of the hypercube, aka the hyperoctahedral
group. What this means is that we can easily define all possible
rotations in terms of the first base mapping by creating the mapping:
f(x) = k XOR pi(x)
The x is the bit string for the symbol we are altering. Then k is a
bit string of n bits where n is the number of bits per symbol in the
constellation (e.g., 2 for QPSK or 6 for QAM64). The pi is a
permutation function specified as pi_0, pi_1..., pi_n-1. This permutes
the bits from the base constellation symbol to a new code, which is
then xor'd by k.
The value of k is from 0 to 2^n-1 and pi is a list of all bit
positions.
The total number of Gray coded modulations is (2^n)*(n!).
We create aliases for all possible naming schemes for the
constellations. So if a hyperoctahedral group is defined, we also set
this function equal to a function name using a unique ID number, and
we always select one rotation as our basic rotation that the other
rotations are based off of.
'''
# BPSK Constellation Mappings
def psk_2_0x0():
'''
0 | 1
'''
const_points = [-1, 1]
symbols = [0, 1]
return (const_points, symbols)
psk_2 = psk_2_0x0 # Basic BPSK rotation
psk_2_0 = psk_2 # First ID for BPSK rotations
def psk_2_0x1():
'''
1 | 0
'''
const_points = [-1, 1]
symbols = [1, 0]
return (const_points, symbols)
psk_2_1 = psk_2_0x1
############################################################
# BPSK Soft bit LUT generators
############################################################
def sd_psk_2_0x0(x, Es=1):
'''
0 | 1
'''
x_re = x.real
dist = Es*numpy.sqrt(2)
return [dist*x_re,]
sd_psk_2 = sd_psk_2_0x0 # Basic BPSK rotation
sd_psk_2_0 = sd_psk_2 # First ID for BPSK rotations
def sd_psk_2_0x1(x, Es=1):
'''
1 | 0
'''
x_re = [x.real,]
dist = Es*numpy.sqrt(2)
return -dist*x_re
sd_psk_2_1 = sd_psk_2_0x1
############################################################
# QPSK Constellation Mappings
############################################################
def psk_4_0x0_0_1():
'''
10 | 11
-------
00 | 01
'''
const_points = [-1-1j, 1-1j,
-1+1j, 1+1j]
symbols = [0, 1, 2, 3]
return (const_points, symbols)
psk_4 = psk_4_0x0_0_1
psk_4_0 = psk_4
def psk_4_0x1_0_1():
'''
11 | 10
-------
01 | 00
'''
k = 0x1
pi = [0, 1]
return constellation_map_generator(psk_4, k, pi)
psk_4_1 = psk_4_0x1_0_1
def psk_4_0x2_0_1():
'''
00 | 01
-------
10 | 11
'''
k = 0x2
pi = [0, 1]
return constellation_map_generator(psk_4, k, pi)
psk_4_2 = psk_4_0x2_0_1
def psk_4_0x3_0_1():
'''
01 | 00
-------
11 | 10
'''
k = 0x3
pi = [0, 1]
return constellation_map_generator(psk_4, k, pi)
psk_4_3 = psk_4_0x3_0_1
def psk_4_0x0_1_0():
'''
01 | 11
-------
00 | 10
'''
k = 0x0
pi = [1, 0]
return constellation_map_generator(psk_4, k, pi)
psk_4_4 = psk_4_0x0_1_0
def psk_4_0x1_1_0():
'''
00 | 10
-------
01 | 11
'''
k = 0x1
pi = [1, 0]
return constellation_map_generator(psk_4, k, pi)
psk_4_5 = psk_4_0x1_1_0
def psk_4_0x2_1_0():
'''
11 | 01
-------
10 | 00
'''
k = 0x2
pi = [1, 0]
return constellation_map_generator(psk_4, k, pi)
psk_4_6 = psk_4_0x2_1_0
def psk_4_0x3_1_0():
'''
10 | 00
-------
11 | 01
'''
k = 0x3
pi = [1, 0]
return constellation_map_generator(psk_4, k, pi)
psk_4_7 = psk_4_0x3_1_0
############################################################
# QPSK Constellation Softbit LUT generators
############################################################
def sd_psk_4_0x0_0_1(x, Es=1):
'''
10 | 11
-------
00 | 01
'''
x_re = x.real
x_im = x.imag
dist = Es*numpy.sqrt(2)
return [dist*x_im, dist*x_re]
sd_psk_4 = sd_psk_4_0x0_0_1
sd_psk_4_0 = sd_psk_4
def sd_psk_4_0x1_0_1(x, Es=1):
'''
11 | 10
-------
01 | 00
'''
x_re = x.real
x_im = x.imag
dist = Es*numpy.sqrt(2)
return [dist*x_im, -dist*x_re]
sd_psk_4_1 = sd_psk_4_0x1_0_1
def sd_psk_4_0x2_0_1(x, Es=1):
'''
00 | 01
-------
10 | 11
'''
x_re = x.real
x_im = x.imag
dist = Es*numpy.sqrt(2)
return [-dist*x_im, dist*x_re]
sd_psk_4_2 = sd_psk_4_0x2_0_1
def sd_psk_4_0x3_0_1(x, Es=1):
'''
01 | 00
-------
11 | 10
'''
x_re = x.real
x_im = x.imag
dist = Es*numpy.sqrt(2)
return [-dist*x_im, -dist*x_re]
sd_psk_4_3 = sd_psk_4_0x3_0_1
def sd_psk_4_0x0_1_0(x, Es=1):
'''
01 | 11
-------
00 | 10
'''
x_re = x.real
x_im = x.imag
dist = Es*numpy.sqrt(2)
return [dist*x_re, dist*x_im]
sd_psk_4_4 = sd_psk_4_0x0_1_0
def sd_psk_4_0x1_1_0(x, Es=1):
'''
00 | 10
-------
01 | 11
'''
x_re = x.real
x_im = x.imag
dist = Es*numpy.sqrt(2)
return [dist*x_re, -dist*x_im]
sd_psk_4_5 = sd_psk_4_0x1_1_0
def sd_psk_4_0x2_1_0(x, Es=1):
'''
11 | 01
-------
10 | 00
'''
x_re = x.real
x_im = x.imag
dist = Es*numpy.sqrt(2)
return [-dist*x_re, dist*x_im]
sd_psk_4_6 = sd_psk_4_0x2_1_0
def sd_psk_4_0x3_1_0(x, Es=1):
'''
10 | 00
-------
11 | 01
'''
x_re = x.real
x_im = x.imag
dist = Es*numpy.sqrt(2)
return [-dist*x_re, -dist*x_im]
sd_psk_4_7 = sd_psk_4_0x3_1_0
| riveridea/gnuradio | gr-digital/python/digital/psk_constellations.py | Python | gpl-3.0 | 6,937 | 0.006631 |
from __future__ import print_function
import os
import sys
sys.path.insert(1, os.path.join("..","..",".."))
import h2o
from h2o.estimators.infogram import H2OInfogram
from h2o.estimators.glm import H2OGeneralizedLinearEstimator
from tests import pyunit_utils
def test_infogram_personal_loan():
"""
Test to make sure predictor can be specified using infogram model.
"""
fr = h2o.import_file(path=pyunit_utils.locate("smalldata/admissibleml_test/Bank_Personal_Loan_Modelling.csv"))
target = "Personal Loan"
fr[target] = fr[target].asfactor()
x = ["Experience","Income","Family","CCAvg","Education","Mortgage",
"Securities Account","CD Account","Online","CreditCard"]
infogram_model = H2OInfogram(seed = 12345, protected_columns=["Age","ZIP Code"])
infogram_model.train(x=x, y=target, training_frame=fr)
glm_model1 = H2OGeneralizedLinearEstimator()
glm_model1.train(x=infogram_model._extract_x_from_model(), y=target, training_frame=fr)
coef1 = glm_model1.coef()
glm_model2 = H2OGeneralizedLinearEstimator()
glm_model2.train(x=infogram_model, y=target, training_frame=fr)
coef2 = glm_model2.coef()
pyunit_utils.assertCoefDictEqual(coef1, coef2, tol=1e-6)
if __name__ == "__main__":
pyunit_utils.standalone_test(test_infogram_personal_loan)
else:
test_infogram_personal_loan()
| h2oai/h2o-3 | h2o-py/tests/testdir_algos/infogram/pyunit_PUBDEV_8075_safe_infogram_personal_loan_x_att.py | Python | apache-2.0 | 1,365 | 0.01685 |
"""
Positional arguments:
1. INPUT - file path to FASTA file
2. OUTPUT - file path of output PIR file
"""
import sys
PIR = sys.argv[1]
FASTA = sys.argv[2]
from modeller import *
e = environ()
a = alignment(e, file = PIR, alignment_format = 'PIR')
a.write(file = FASTA, alignment_format = 'FASTA')
| meren/anvio | anvio/data/misc/MODELLER/scripts/pir_to_fasta.py | Python | gpl-3.0 | 298 | 0.030201 |
import sys
if sys.version_info < (3, 7):
from ._opacity import OpacityValidator
from ._color import ColorValidator
else:
from _plotly_utils.importers import relative_import
__all__, __getattr__, __dir__ = relative_import(
__name__, [], ["._opacity.OpacityValidator", "._color.ColorValidator"]
)
| plotly/python-api | packages/python/plotly/plotly/validators/histogram/unselected/marker/__init__.py | Python | mit | 325 | 0 |
#
# actions.py: routines that actually run the svn client.
#
# Subversion is a tool for revision control.
# See http://subversion.tigris.org for more information.
#
# ====================================================================
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
######################################################################
import os, shutil, re, sys, errno
import difflib, pprint, logging
import xml.parsers.expat
from xml.dom.minidom import parseString
if sys.version_info[0] >= 3:
# Python >=3.0
from io import StringIO
else:
# Python <3.0
from cStringIO import StringIO
import svntest
from svntest import main, verify, tree, wc, sandbox
from svntest import Failure
logger = logging.getLogger()
# (abbreviation)
Item = svntest.wc.StateItem
def _log_tree_state(msg, actual, subtree=""):
if subtree:
subtree += os.sep
o = StringIO()
o.write(msg + '\n')
tree.dump_tree_script(actual, subtree, stream=o)
logger.warn(o.getvalue())
o.close()
def no_sleep_for_timestamps():
os.environ['SVN_I_LOVE_CORRUPTED_WORKING_COPIES_SO_DISABLE_SLEEP_FOR_TIMESTAMPS'] = 'yes'
def do_sleep_for_timestamps():
os.environ['SVN_I_LOVE_CORRUPTED_WORKING_COPIES_SO_DISABLE_SLEEP_FOR_TIMESTAMPS'] = 'no'
def no_relocate_validation():
os.environ['SVN_I_LOVE_CORRUPTED_WORKING_COPIES_SO_DISABLE_RELOCATE_VALIDATION'] = 'yes'
def do_relocate_validation():
os.environ['SVN_I_LOVE_CORRUPTED_WORKING_COPIES_SO_DISABLE_RELOCATE_VALIDATION'] = 'no'
def setup_pristine_greek_repository():
"""Create the pristine repository and 'svn import' the greek tree"""
# these directories don't exist out of the box, so we may have to create them
if not os.path.exists(main.general_wc_dir):
os.makedirs(main.general_wc_dir)
if not os.path.exists(main.general_repo_dir):
os.makedirs(main.general_repo_dir) # this also creates all the intermediate dirs
# If there's no pristine repos, create one.
if not os.path.exists(main.pristine_greek_repos_dir):
main.create_repos(main.pristine_greek_repos_dir)
# if this is dav, gives us access rights to import the greek tree.
if main.is_ra_type_dav():
authz_file = os.path.join(main.work_dir, "authz")
main.file_write(authz_file, "[/]\n* = rw\n")
# dump the greek tree to disk.
main.greek_state.write_to_disk(main.greek_dump_dir)
# import the greek tree, using l:foo/p:bar
### todo: svn should not be prompting for auth info when using
### repositories with no auth/auth requirements
_, output, _ = main.run_svn(None, 'import', '-m',
'Log message for revision 1.',
main.greek_dump_dir,
main.pristine_greek_repos_url)
# verify the printed output of 'svn import'.
lastline = output.pop().strip()
match = re.search("(Committed|Imported) revision [0-9]+.", lastline)
if not match:
logger.error("import did not succeed, while creating greek repos.")
logger.error("The final line from 'svn import' was:")
logger.error(lastline)
sys.exit(1)
output_tree = wc.State.from_commit(output)
expected_output_tree = main.greek_state.copy(main.greek_dump_dir)
expected_output_tree.tweak(verb='Adding',
contents=None)
try:
expected_output_tree.compare_and_display('output', output_tree)
except tree.SVNTreeUnequal:
verify.display_trees("ERROR: output of import command is unexpected.",
"OUTPUT TREE",
expected_output_tree.old_tree(),
output_tree.old_tree())
sys.exit(1)
# Finally, disallow any changes to the "pristine" repos.
error_msg = "Don't modify the pristine repository"
create_failing_hook(main.pristine_greek_repos_dir, 'start-commit', error_msg)
create_failing_hook(main.pristine_greek_repos_dir, 'pre-lock', error_msg)
create_failing_hook(main.pristine_greek_repos_dir, 'pre-revprop-change', error_msg)
######################################################################
def guarantee_empty_repository(path):
"""Guarantee that a local svn repository exists at PATH, containing
nothing."""
if path == main.pristine_greek_repos_dir:
logger.error("attempt to overwrite the pristine repos! Aborting.")
sys.exit(1)
# create an empty repository at PATH.
main.safe_rmtree(path)
main.create_repos(path)
# Used by every test, so that they can run independently of one
# another. Every time this routine is called, it recursively copies
# the `pristine repos' to a new location.
# Note: make sure setup_pristine_greek_repository was called once before
# using this function.
def guarantee_greek_repository(path, minor_version):
"""Guarantee that a local svn repository exists at PATH, containing
nothing but the greek-tree at revision 1."""
if path == main.pristine_greek_repos_dir:
logger.error("attempt to overwrite the pristine repos! Aborting.")
sys.exit(1)
# copy the pristine repository to PATH.
main.safe_rmtree(path)
if main.copy_repos(main.pristine_greek_repos_dir, path, 1, 1, minor_version):
logger.error("copying repository failed.")
sys.exit(1)
# make the repos world-writeable, for mod_dav_svn's sake.
main.chmod_tree(path, 0666, 0666)
def run_and_verify_atomic_ra_revprop_change(message,
expected_stdout,
expected_stderr,
expected_exit,
url, revision, propname,
old_propval, propval,
want_error):
"""Run atomic-ra-revprop-change helper and check its output and exit code.
Transforms OLD_PROPVAL and PROPVAL into a skel.
For HTTP, the default HTTP library is used."""
KEY_OLD_PROPVAL = "old_value_p"
KEY_NEW_PROPVAL = "value"
def skel_make_atom(word):
return "%d %s" % (len(word), word)
def make_proplist_skel_part(nick, val):
if val is None:
return ""
else:
return "%s %s" % (skel_make_atom(nick), skel_make_atom(val))
skel = "( %s %s )" % (make_proplist_skel_part(KEY_OLD_PROPVAL, old_propval),
make_proplist_skel_part(KEY_NEW_PROPVAL, propval))
exit_code, out, err = main.run_atomic_ra_revprop_change(url, revision,
propname, skel,
want_error)
verify.verify_outputs("Unexpected output", out, err,
expected_stdout, expected_stderr)
verify.verify_exit_code(message, exit_code, expected_exit)
return exit_code, out, err
def run_and_verify_svnlook(message, expected_stdout,
expected_stderr, *varargs):
"""Like run_and_verify_svnlook2, but the expected exit code is
assumed to be 0 if no output is expected on stderr, and 1 otherwise."""
expected_exit = 0
if expected_stderr is not None and expected_stderr != []:
expected_exit = 1
return run_and_verify_svnlook2(message, expected_stdout, expected_stderr,
expected_exit, *varargs)
def run_and_verify_svnlook2(message, expected_stdout, expected_stderr,
expected_exit, *varargs):
"""Run svnlook command and check its output and exit code."""
exit_code, out, err = main.run_svnlook(*varargs)
verify.verify_outputs("Unexpected output", out, err,
expected_stdout, expected_stderr)
verify.verify_exit_code(message, exit_code, expected_exit)
return exit_code, out, err
def run_and_verify_svnadmin(message, expected_stdout,
expected_stderr, *varargs):
"""Like run_and_verify_svnadmin2, but the expected exit code is
assumed to be 0 if no output is expected on stderr, and 1 otherwise."""
expected_exit = 0
if expected_stderr is not None and expected_stderr != []:
expected_exit = 1
return run_and_verify_svnadmin2(message, expected_stdout, expected_stderr,
expected_exit, *varargs)
def run_and_verify_svnadmin2(message, expected_stdout, expected_stderr,
expected_exit, *varargs):
"""Run svnadmin command and check its output and exit code."""
exit_code, out, err = main.run_svnadmin(*varargs)
verify.verify_outputs("Unexpected output", out, err,
expected_stdout, expected_stderr)
verify.verify_exit_code(message, exit_code, expected_exit)
return exit_code, out, err
def run_and_verify_svnversion(message, wc_dir, trail_url,
expected_stdout, expected_stderr, *varargs):
"""like run_and_verify_svnversion2, but the expected exit code is
assumed to be 0 if no output is expected on stderr, and 1 otherwise."""
expected_exit = 0
if expected_stderr is not None and expected_stderr != []:
expected_exit = 1
return run_and_verify_svnversion2(message, wc_dir, trail_url,
expected_stdout, expected_stderr,
expected_exit, *varargs)
def run_and_verify_svnversion2(message, wc_dir, trail_url,
expected_stdout, expected_stderr,
expected_exit, *varargs):
"""Run svnversion command and check its output and exit code."""
if trail_url is None:
exit_code, out, err = main.run_svnversion(wc_dir, *varargs)
else:
exit_code, out, err = main.run_svnversion(wc_dir, trail_url, *varargs)
verify.verify_outputs("Unexpected output", out, err,
expected_stdout, expected_stderr)
verify.verify_exit_code(message, exit_code, expected_exit)
return exit_code, out, err
def run_and_verify_svn(message, expected_stdout, expected_stderr, *varargs):
"""like run_and_verify_svn2, but the expected exit code is assumed to
be 0 if no output is expected on stderr, and 1 otherwise."""
expected_exit = 0
if expected_stderr is not None:
if isinstance(expected_stderr, verify.ExpectedOutput):
if not expected_stderr.matches([]):
expected_exit = 1
elif expected_stderr != []:
expected_exit = 1
return run_and_verify_svn2(message, expected_stdout, expected_stderr,
expected_exit, *varargs)
def run_and_verify_svn2(message, expected_stdout, expected_stderr,
expected_exit, *varargs):
"""Invoke main.run_svn() with *VARARGS. Return exit code as int; stdout,
stderr as lists of lines (including line terminators). For both
EXPECTED_STDOUT and EXPECTED_STDERR, create an appropriate instance of
verify.ExpectedOutput (if necessary):
- If it is an array of strings, create a vanilla ExpectedOutput.
- If it is a single string, create a RegexOutput that must match every
line (for stdout) or any line (for stderr) of the expected output.
- If it is already an instance of ExpectedOutput
(e.g. UnorderedOutput), leave it alone.
...and invoke compare_and_display_lines() on MESSAGE, a label based
on the name of the stream being compared (e.g. STDOUT), the
ExpectedOutput instance, and the actual output.
If EXPECTED_STDOUT is None, do not check stdout.
EXPECTED_STDERR may not be None.
If output checks pass, the expected and actual codes are compared.
If a comparison fails, a Failure will be raised."""
if expected_stderr is None:
raise verify.SVNIncorrectDatatype("expected_stderr must not be None")
want_err = None
if isinstance(expected_stderr, verify.ExpectedOutput):
if not expected_stderr.matches([]):
want_err = True
elif expected_stderr != []:
want_err = True
exit_code, out, err = main.run_svn(want_err, *varargs)
verify.verify_outputs(message, out, err, expected_stdout, expected_stderr)
verify.verify_exit_code(message, exit_code, expected_exit)
return exit_code, out, err
def run_and_verify_load(repo_dir, dump_file_content,
bypass_prop_validation = False):
"Runs 'svnadmin load' and reports any errors."
if not isinstance(dump_file_content, list):
raise TypeError("dump_file_content argument should have list type")
expected_stderr = []
if bypass_prop_validation:
exit_code, output, errput = main.run_command_stdin(
main.svnadmin_binary, expected_stderr, 0, True, dump_file_content,
'load', '--force-uuid', '--quiet', '--bypass-prop-validation', repo_dir)
else:
exit_code, output, errput = main.run_command_stdin(
main.svnadmin_binary, expected_stderr, 0, True, dump_file_content,
'load', '--force-uuid', '--quiet', repo_dir)
verify.verify_outputs("Unexpected stderr output", None, errput,
None, expected_stderr)
def run_and_verify_dump(repo_dir, deltas=False):
"Runs 'svnadmin dump' and reports any errors, returning the dump content."
if deltas:
exit_code, output, errput = main.run_svnadmin('dump', '--deltas',
repo_dir)
else:
exit_code, output, errput = main.run_svnadmin('dump', repo_dir)
verify.verify_outputs("Missing expected output(s)", output, errput,
verify.AnyOutput, verify.AnyOutput)
return output
def run_and_verify_svnrdump(dumpfile_content, expected_stdout,
expected_stderr, expected_exit, *varargs):
"""Runs 'svnrdump dump|load' depending on dumpfile_content and
reports any errors."""
exit_code, output, err = main.run_svnrdump(dumpfile_content, *varargs)
# Since main.run_svnrdump() uses binary mode, normalize the stderr
# line endings on Windows ourselves.
if sys.platform == 'win32':
err = map(lambda x : x.replace('\r\n', '\n'), err)
for index, line in enumerate(err[:]):
if re.search("warning: W200007", line):
del err[index]
verify.verify_outputs("Unexpected output", output, err,
expected_stdout, expected_stderr)
verify.verify_exit_code("Unexpected return code", exit_code, expected_exit)
return output
def run_and_verify_svnmucc(message, expected_stdout, expected_stderr,
*varargs):
"""Run svnmucc command and check its output"""
expected_exit = 0
if expected_stderr is not None and expected_stderr != []:
expected_exit = 1
return run_and_verify_svnmucc2(message, expected_stdout, expected_stderr,
expected_exit, *varargs)
def run_and_verify_svnmucc2(message, expected_stdout, expected_stderr,
expected_exit, *varargs):
"""Run svnmucc command and check its output and exit code."""
exit_code, out, err = main.run_svnmucc(*varargs)
verify.verify_outputs("Unexpected output", out, err,
expected_stdout, expected_stderr)
verify.verify_exit_code(message, exit_code, expected_exit)
return exit_code, out, err
def load_repo(sbox, dumpfile_path = None, dump_str = None,
bypass_prop_validation = False):
"Loads the dumpfile into sbox"
if not dump_str:
dump_str = open(dumpfile_path, "rb").read()
# Create a virgin repos and working copy
main.safe_rmtree(sbox.repo_dir, 1)
main.safe_rmtree(sbox.wc_dir, 1)
main.create_repos(sbox.repo_dir)
# Load the mergetracking dumpfile into the repos, and check it out the repo
run_and_verify_load(sbox.repo_dir, dump_str.splitlines(True),
bypass_prop_validation)
run_and_verify_svn(None, None, [], "co", sbox.repo_url, sbox.wc_dir)
return dump_str
def expected_noop_update_output(rev):
"""Return an ExpectedOutput object describing what we'd expect to
see from an update to revision REV that was effectively a no-op (no
server changes transmitted)."""
return verify.createExpectedOutput("Updating '.*':|At revision %d."
% (rev),
"no-op update")
def run_and_verify_svnauthz(message, expected_stdout, expected_stderr,
expected_exit, compat_mode, *varargs):
"""Run svnauthz command and check its output and exit code.
If COMPAT_MODE is True then run the command in pre-1.8
compatibility mode"""
if compat_mode:
exit_code, out, err = main.run_svnauthz_validate(*varargs)
else:
exit_code, out, err = main.run_svnauthz(*varargs)
verify.verify_outputs("Unexpected output", out, err,
expected_stdout, expected_stderr)
verify.verify_exit_code(message, exit_code, expected_exit)
return exit_code, out, err
######################################################################
# Subversion Actions
#
# These are all routines that invoke 'svn' in particular ways, and
# then verify the results by comparing expected trees with actual
# trees.
#
def run_and_verify_checkout2(do_remove,
URL, wc_dir_name, output_tree, disk_tree,
singleton_handler_a = None,
a_baton = None,
singleton_handler_b = None,
b_baton = None,
*args):
"""Checkout the URL into a new directory WC_DIR_NAME. *ARGS are any
extra optional args to the checkout subcommand.
The subcommand output will be verified against OUTPUT_TREE,
and the working copy itself will be verified against DISK_TREE.
For the latter comparison, SINGLETON_HANDLER_A and
SINGLETON_HANDLER_B will be passed to tree.compare_trees -- see that
function's doc string for more details. Return if successful, raise
on failure.
WC_DIR_NAME is deleted if DO_REMOVE is True.
"""
if isinstance(output_tree, wc.State):
output_tree = output_tree.old_tree()
# Remove dir if it's already there, unless this is a forced checkout.
# In that case assume we want to test a forced checkout's toleration
# of obstructing paths.
if do_remove:
main.safe_rmtree(wc_dir_name)
# Checkout and make a tree of the output, using l:foo/p:bar
### todo: svn should not be prompting for auth info when using
### repositories with no auth/auth requirements
exit_code, output, errput = main.run_svn(None, 'co',
URL, wc_dir_name, *args)
actual = tree.build_tree_from_checkout(output)
# Verify actual output against expected output.
try:
tree.compare_trees("output", actual, output_tree)
except tree.SVNTreeUnequal:
_log_tree_state("ACTUAL OUTPUT TREE:", actual, wc_dir_name)
raise
if disk_tree:
verify_disk(wc_dir_name, disk_tree, False,
singleton_handler_a, a_baton,
singleton_handler_b, b_baton)
def run_and_verify_checkout(URL, wc_dir_name, output_tree, disk_tree,
singleton_handler_a = None,
a_baton = None,
singleton_handler_b = None,
b_baton = None,
*args):
"""Same as run_and_verify_checkout2(), but without the DO_REMOVE arg.
WC_DIR_NAME is deleted if present unless the '--force' option is passed
in *ARGS."""
# Remove dir if it's already there, unless this is a forced checkout.
# In that case assume we want to test a forced checkout's toleration
# of obstructing paths.
return run_and_verify_checkout2(('--force' not in args),
URL, wc_dir_name, output_tree, disk_tree,
singleton_handler_a,
a_baton,
singleton_handler_b,
b_baton,
*args)
def run_and_verify_export(URL, export_dir_name, output_tree, disk_tree,
*args):
"""Export the URL into a new directory WC_DIR_NAME.
The subcommand output will be verified against OUTPUT_TREE,
and the exported copy itself will be verified against DISK_TREE.
Return if successful, raise on failure.
"""
assert isinstance(output_tree, wc.State)
assert isinstance(disk_tree, wc.State)
disk_tree = disk_tree.old_tree()
output_tree = output_tree.old_tree()
# Export and make a tree of the output, using l:foo/p:bar
### todo: svn should not be prompting for auth info when using
### repositories with no auth/auth requirements
exit_code, output, errput = main.run_svn(None, 'export',
URL, export_dir_name, *args)
actual = tree.build_tree_from_checkout(output)
# Verify actual output against expected output.
try:
tree.compare_trees("output", actual, output_tree)
except tree.SVNTreeUnequal:
_log_tree_state("ACTUAL OUTPUT TREE:", actual, export_dir_name)
raise
# Create a tree by scanning the working copy. Don't ignore
# the .svn directories so that we generate an error if they
# happen to show up.
actual = tree.build_tree_from_wc(export_dir_name, ignore_svn=False)
# Verify expected disk against actual disk.
try:
tree.compare_trees("disk", actual, disk_tree)
except tree.SVNTreeUnequal:
_log_tree_state("ACTUAL DISK TREE:", actual, export_dir_name)
raise
# run_and_verify_log_xml
class LogEntry:
def __init__(self, revision, changed_paths=None, revprops=None):
self.revision = revision
if changed_paths == None:
self.changed_paths = {}
else:
self.changed_paths = changed_paths
if revprops == None:
self.revprops = {}
else:
self.revprops = revprops
def assert_changed_paths(self, changed_paths):
"""Assert that changed_paths is the same as this entry's changed_paths
Raises svntest.Failure if not.
"""
if self.changed_paths != changed_paths:
raise Failure('\n' + '\n'.join(difflib.ndiff(
pprint.pformat(changed_paths).splitlines(),
pprint.pformat(self.changed_paths).splitlines())))
def assert_revprops(self, revprops):
"""Assert that the dict revprops is the same as this entry's revprops.
Raises svntest.Failure if not.
"""
if self.revprops != revprops:
raise Failure('\n' + '\n'.join(difflib.ndiff(
pprint.pformat(revprops).splitlines(),
pprint.pformat(self.revprops).splitlines())))
class LogParser:
def parse(self, data):
"""Return a list of LogEntrys parsed from the sequence of strings data.
This is the only method of interest to callers.
"""
try:
for i in data:
self.parser.Parse(i)
self.parser.Parse('', True)
except xml.parsers.expat.ExpatError, e:
raise verify.SVNUnexpectedStdout('%s\n%s\n' % (e, ''.join(data),))
return self.entries
def __init__(self):
# for expat
self.parser = xml.parsers.expat.ParserCreate()
self.parser.StartElementHandler = self.handle_start_element
self.parser.EndElementHandler = self.handle_end_element
self.parser.CharacterDataHandler = self.handle_character_data
# Ignore some things.
self.ignore_elements('log', 'paths', 'revprops')
self.ignore_tags('logentry_end', 'author_start', 'date_start', 'msg_start')
# internal state
self.cdata = []
self.property = None
self.kind = None
self.action = None
# the result
self.entries = []
def ignore(self, *args, **kwargs):
del self.cdata[:]
def ignore_tags(self, *args):
for tag in args:
setattr(self, tag, self.ignore)
def ignore_elements(self, *args):
for element in args:
self.ignore_tags(element + '_start', element + '_end')
# expat handlers
def handle_start_element(self, name, attrs):
getattr(self, name + '_start')(attrs)
def handle_end_element(self, name):
getattr(self, name + '_end')()
def handle_character_data(self, data):
self.cdata.append(data)
# element handler utilities
def use_cdata(self):
result = ''.join(self.cdata).strip()
del self.cdata[:]
return result
def svn_prop(self, name):
self.entries[-1].revprops['svn:' + name] = self.use_cdata()
# element handlers
def logentry_start(self, attrs):
self.entries.append(LogEntry(int(attrs['revision'])))
def author_end(self):
self.svn_prop('author')
def msg_end(self):
self.svn_prop('log')
def date_end(self):
# svn:date could be anything, so just note its presence.
self.cdata[:] = ['']
self.svn_prop('date')
def property_start(self, attrs):
self.property = attrs['name']
def property_end(self):
self.entries[-1].revprops[self.property] = self.use_cdata()
def path_start(self, attrs):
self.kind = attrs['kind']
self.action = attrs['action']
def path_end(self):
self.entries[-1].changed_paths[self.use_cdata()] = [{'kind': self.kind,
'action': self.action}]
def run_and_verify_log_xml(message=None, expected_paths=None,
expected_revprops=None, expected_stdout=None,
expected_stderr=None, args=[]):
"""Call run_and_verify_svn with log --xml and args (optional) as command
arguments, and pass along message, expected_stdout, and expected_stderr.
If message is None, pass the svn log command as message.
expected_paths checking is not yet implemented.
expected_revprops is an optional list of dicts, compared to each
revision's revprops. The list must be in the same order the log entries
come in. Any svn:date revprops in the dicts must be '' in order to
match, as the actual dates could be anything.
expected_paths and expected_revprops are ignored if expected_stdout or
expected_stderr is specified.
"""
if message == None:
message = ' '.join(args)
# We'll parse the output unless the caller specifies expected_stderr or
# expected_stdout for run_and_verify_svn.
parse = True
if expected_stderr == None:
expected_stderr = []
else:
parse = False
if expected_stdout != None:
parse = False
log_args = list(args)
if expected_paths != None:
log_args.append('-v')
(exit_code, stdout, stderr) = run_and_verify_svn(
message, expected_stdout, expected_stderr,
'log', '--xml', *log_args)
if not parse:
return
entries = LogParser().parse(stdout)
for index in range(len(entries)):
entry = entries[index]
if expected_revprops != None:
entry.assert_revprops(expected_revprops[index])
if expected_paths != None:
entry.assert_changed_paths(expected_paths[index])
def verify_update(actual_output,
actual_mergeinfo_output,
actual_elision_output,
wc_dir_name,
output_tree,
mergeinfo_output_tree,
elision_output_tree,
disk_tree,
status_tree,
singleton_handler_a=None,
a_baton=None,
singleton_handler_b=None,
b_baton=None,
check_props=False):
"""Verify update of WC_DIR_NAME.
The subcommand output (found in ACTUAL_OUTPUT, ACTUAL_MERGEINFO_OUTPUT,
and ACTUAL_ELISION_OUTPUT) will be verified against OUTPUT_TREE,
MERGEINFO_OUTPUT_TREE, and ELISION_OUTPUT_TREE respectively (if any of
these is provided, they may be None in which case a comparison is not
done). The working copy itself will be verified against DISK_TREE (if
provided), and the working copy's 'svn status' output will be verified
against STATUS_TREE (if provided). (This is a good way to check that
revision numbers were bumped.)
Return if successful, raise on failure.
For the comparison with DISK_TREE, pass SINGLETON_HANDLER_A and
SINGLETON_HANDLER_B to tree.compare_trees -- see that function's doc
string for more details. If CHECK_PROPS is set, then disk
comparison will examine props."""
if isinstance(actual_output, wc.State):
actual_output = actual_output.old_tree()
if isinstance(actual_mergeinfo_output, wc.State):
actual_mergeinfo_output = actual_mergeinfo_output.old_tree()
if isinstance(actual_elision_output, wc.State):
actual_elision_output = actual_elision_output.old_tree()
if isinstance(output_tree, wc.State):
output_tree = output_tree.old_tree()
if isinstance(mergeinfo_output_tree, wc.State):
mergeinfo_output_tree = mergeinfo_output_tree.old_tree()
if isinstance(elision_output_tree, wc.State):
elision_output_tree = elision_output_tree.old_tree()
# Verify actual output against expected output.
if output_tree:
try:
tree.compare_trees("output", actual_output, output_tree)
except tree.SVNTreeUnequal:
_log_tree_state("ACTUAL OUTPUT TREE:", actual_output, wc_dir_name)
raise
# Verify actual mergeinfo recording output against expected output.
if mergeinfo_output_tree:
try:
tree.compare_trees("mergeinfo_output", actual_mergeinfo_output,
mergeinfo_output_tree)
except tree.SVNTreeUnequal:
_log_tree_state("ACTUAL MERGEINFO OUTPUT TREE:", actual_mergeinfo_output,
wc_dir_name)
raise
# Verify actual mergeinfo elision output against expected output.
if elision_output_tree:
try:
tree.compare_trees("elision_output", actual_elision_output,
elision_output_tree)
except tree.SVNTreeUnequal:
_log_tree_state("ACTUAL ELISION OUTPUT TREE:", actual_elision_output,
wc_dir_name)
raise
# Create a tree by scanning the working copy, and verify it
if disk_tree:
verify_disk(wc_dir_name, disk_tree, check_props,
singleton_handler_a, a_baton,
singleton_handler_b, b_baton)
# Verify via 'status' command too, if possible.
if status_tree:
run_and_verify_status(wc_dir_name, status_tree)
def verify_disk(wc_dir_name, disk_tree, check_props=False,
singleton_handler_a = None, a_baton = None,
singleton_handler_b = None, b_baton = None):
"""Verify WC_DIR_NAME against DISK_TREE. If CHECK_PROPS is set,
the comparison will examin props. Returns if successful, raises on
failure."""
if isinstance(disk_tree, wc.State):
disk_tree = disk_tree.old_tree()
actual_disk = tree.build_tree_from_wc(wc_dir_name, check_props)
try:
tree.compare_trees("disk", actual_disk, disk_tree,
singleton_handler_a, a_baton,
singleton_handler_b, b_baton)
except tree.SVNTreeUnequal:
_log_tree_state("EXPECTED DISK TREE:", disk_tree)
_log_tree_state("ACTUAL DISK TREE:", actual_disk)
raise
def run_and_verify_update(wc_dir_name,
output_tree, disk_tree, status_tree,
error_re_string = None,
singleton_handler_a = None,
a_baton = None,
singleton_handler_b = None,
b_baton = None,
check_props = False,
*args):
"""Update WC_DIR_NAME. *ARGS are any extra optional args to the
update subcommand. NOTE: If *ARGS is specified at all, explicit
target paths must be passed in *ARGS as well (or a default `.' will
be chosen by the 'svn' binary). This allows the caller to update
many items in a single working copy dir, but still verify the entire
working copy dir.
If ERROR_RE_STRING, the update must exit with error, and the error
message must match regular expression ERROR_RE_STRING.
If OUTPUT_TREE is not None, the subcommand output will be verified
against OUTPUT_TREE. If DISK_TREE is not None, the working copy
itself will be verified against DISK_TREE. If STATUS_TREE is not
None, the 'svn status' output will be verified against STATUS_TREE.
(This is a good way to check that revision numbers were bumped.)
For the DISK_TREE verification, SINGLETON_HANDLER_A and
SINGLETON_HANDLER_B will be passed to tree.compare_trees -- see that
function's doc string for more details.
If CHECK_PROPS is set, then disk comparison will examine props.
Return if successful, raise on failure."""
# Update and make a tree of the output.
if len(args):
exit_code, output, errput = main.run_svn(error_re_string, 'up', *args)
else:
exit_code, output, errput = main.run_svn(error_re_string,
'up', wc_dir_name,
*args)
if error_re_string:
rm = re.compile(error_re_string)
match = None
for line in errput:
match = rm.search(line)
if match:
break
if not match:
raise main.SVNUnmatchedError
actual = wc.State.from_checkout(output)
verify_update(actual, None, None, wc_dir_name,
output_tree, None, None, disk_tree, status_tree,
singleton_handler_a, a_baton,
singleton_handler_b, b_baton,
check_props)
def run_and_parse_info(*args):
"""Run 'svn info ARGS' and parse its output into a list of dicts,
one dict per reported node."""
# the returned array
all_infos = []
# per-target variables
iter_info = {}
prev_key = None
lock_comment_lines = 0
lock_comments = []
exit_code, output, errput = main.run_svn(None, 'info', *args)
for line in output:
line = line[:-1] # trim '\n'
if lock_comment_lines > 0:
# mop up any lock comment lines
lock_comments.append(line)
lock_comment_lines = lock_comment_lines - 1
if lock_comment_lines == 0:
iter_info[prev_key] = lock_comments
elif len(line) == 0:
# separator line between items
all_infos.append(iter_info)
iter_info = {}
prev_key = None
lock_comment_lines = 0
lock_comments = []
elif line[0].isspace():
# continuation line (for tree conflicts)
iter_info[prev_key] += line[1:]
else:
# normal line
key, value = line.split(':', 1)
if re.search(' \(\d+ lines?\)$', key):
# numbered continuation lines
match = re.match('^(.*) \((\d+) lines?\)$', key)
key = match.group(1)
lock_comment_lines = int(match.group(2))
elif len(value) > 1:
# normal normal line
iter_info[key] = value[1:]
else:
### originally added for "Tree conflict:\n" lines;
### tree-conflicts output format has changed since then
# continuation lines are implicit (prefixed by whitespace)
iter_info[key] = ''
prev_key = key
return all_infos
def run_and_verify_info(expected_infos, *args):
"""Run 'svn info' with the arguments in *ARGS and verify the results
against expected_infos. The latter should be a list of dicts, one dict
per reported node, in the order in which the 'Path' fields of the output
will appear after sorting them as Python strings. (The dicts in
EXPECTED_INFOS, however, need not have a 'Path' key.)
In the dicts, each key is the before-the-colon part of the 'svn info' output,
and each value is either None (meaning that the key should *not* appear in
the 'svn info' output) or a regex matching the output value. Output lines
not matching a key in the dict are ignored.
Return if successful, raise on failure."""
actual_infos = run_and_parse_info(*args)
actual_infos.sort(key=lambda info: info['Path'])
try:
# zip() won't complain, so check this manually
if len(actual_infos) != len(expected_infos):
raise verify.SVNUnexpectedStdout(
"Expected %d infos, found %d infos"
% (len(expected_infos), len(actual_infos)))
for actual, expected in zip(actual_infos, expected_infos):
# compare dicts
for key, value in expected.items():
assert ':' not in key # caller passed impossible expectations?
if value is None and key in actual:
raise main.SVNLineUnequal("Found unexpected key '%s' with value '%s'"
% (key, actual[key]))
if value is not None and key not in actual:
raise main.SVNLineUnequal("Expected key '%s' (with value '%s') "
"not found" % (key, value))
if value is not None and not re.match(value, actual[key]):
raise verify.SVNUnexpectedStdout("Values of key '%s' don't match:\n"
" Expected: '%s' (regex)\n"
" Found: '%s' (string)\n"
% (key, value, actual[key]))
except:
sys.stderr.write("Bad 'svn info' output:\n"
" Received: %s\n"
" Expected: %s\n"
% (actual_infos, expected_infos))
raise
def run_and_verify_merge(dir, rev1, rev2, url1, url2,
output_tree,
mergeinfo_output_tree,
elision_output_tree,
disk_tree, status_tree, skip_tree,
error_re_string = None,
singleton_handler_a = None,
a_baton = None,
singleton_handler_b = None,
b_baton = None,
check_props = False,
dry_run = True,
*args):
"""Run 'svn merge URL1@REV1 URL2@REV2 DIR' if URL2 is not None
(for a three-way merge between URLs and WC).
If URL2 is None, run 'svn merge -rREV1:REV2 URL1 DIR'. If both REV1
and REV2 are None, leave off the '-r' argument.
If ERROR_RE_STRING, the merge must exit with error, and the error
message must match regular expression ERROR_RE_STRING.
Else if ERROR_RE_STRING is None, then:
The subcommand output will be verified against OUTPUT_TREE. Output
related to mergeinfo notifications will be verified against
MERGEINFO_OUTPUT_TREE if that is not None. Output related to mergeinfo
elision will be verified against ELISION_OUTPUT_TREE if that is not None.
The working copy itself will be verified against DISK_TREE. If optional
STATUS_TREE is given, then 'svn status' output will be compared. The
'skipped' merge output will be compared to SKIP_TREE.
For the DISK_TREE verification, SINGLETON_HANDLER_A and
SINGLETON_HANDLER_B will be passed to tree.compare_trees -- see that
function's doc string for more details.
If CHECK_PROPS is set, then disk comparison will examine props.
If DRY_RUN is set then a --dry-run merge will be carried out first and
the output compared with that of the full merge.
Return if successful, raise on failure.
*ARGS are any extra optional args to the merge subcommand.
NOTE: If *ARGS is specified at all, an explicit target path must be passed
in *ARGS as well. This allows the caller to merge into single items inside
the working copy, but still verify the entire working copy dir. """
merge_command = [ "merge" ]
if url2:
merge_command.extend((url1 + "@" + str(rev1), url2 + "@" + str(rev2)))
else:
if not (rev1 is None and rev2 is None):
merge_command.append("-r" + str(rev1) + ":" + str(rev2))
merge_command.append(url1)
if len(args) == 0:
merge_command.append(dir)
merge_command = tuple(merge_command)
if dry_run:
pre_disk = tree.build_tree_from_wc(dir)
dry_run_command = merge_command + ('--dry-run',)
dry_run_command = dry_run_command + args
exit_code, out_dry, err_dry = main.run_svn(error_re_string,
*dry_run_command)
post_disk = tree.build_tree_from_wc(dir)
try:
tree.compare_trees("disk", post_disk, pre_disk)
except tree.SVNTreeError:
logger.warn("=============================================================")
logger.warn("Dry-run merge altered working copy")
logger.warn("=============================================================")
raise
# Update and make a tree of the output.
merge_command = merge_command + args
exit_code, out, err = main.run_svn(error_re_string, *merge_command)
if error_re_string:
if not error_re_string.startswith(".*"):
error_re_string = ".*(" + error_re_string + ")"
expected_err = verify.RegexOutput(error_re_string, match_all=False)
verify.verify_outputs(None, None, err, None, expected_err)
return
elif err:
raise verify.SVNUnexpectedStderr(err)
# Split the output into that related to application of the actual diff
# and that related to the recording of mergeinfo describing the merge.
merge_diff_out = []
mergeinfo_notification_out = []
mergeinfo_elision_out = []
mergeinfo_notifications = False
elision_notifications = False
for line in out:
if line.startswith('--- Recording'):
mergeinfo_notifications = True
elision_notifications = False
elif line.startswith('--- Eliding'):
mergeinfo_notifications = False
elision_notifications = True
elif line.startswith('--- Merging') or \
line.startswith('--- Reverse-merging') or \
line.startswith('Summary of conflicts') or \
line.startswith('Skipped missing target'):
mergeinfo_notifications = False
elision_notifications = False
if mergeinfo_notifications:
mergeinfo_notification_out.append(line)
elif elision_notifications:
mergeinfo_elision_out.append(line)
else:
merge_diff_out.append(line)
if dry_run and merge_diff_out != out_dry:
# Due to the way ra_serf works, it's possible that the dry-run and
# real merge operations did the same thing, but the output came in
# a different order. Let's see if maybe that's the case by comparing
# the outputs as unordered sets rather than as lists.
#
# This now happens for other RA layers with modern APR because the
# hash order now varies.
#
# The different orders of the real and dry-run merges may cause
# the "Merging rX through rY into" lines to be duplicated a
# different number of times in the two outputs. The list-set
# conversion removes duplicates so these differences are ignored.
# It also removes "U some/path" duplicate lines. Perhaps we
# should avoid that?
out_copy = set(merge_diff_out[:])
out_dry_copy = set(out_dry[:])
if out_copy != out_dry_copy:
logger.warn("=============================================================")
logger.warn("Merge outputs differ")
logger.warn("The dry-run merge output:")
for x in out_dry:
logger.warn(x)
logger.warn("The full merge output:")
for x in merge_diff_out:
logger.warn(x)
logger.warn("=============================================================")
raise main.SVNUnmatchedError
def missing_skip(a, b):
logger.warn("=============================================================")
logger.warn("Merge failed to skip: %s", a.path)
logger.warn("=============================================================")
raise Failure
def extra_skip(a, b):
logger.warn("=============================================================")
logger.warn("Merge unexpectedly skipped: %s", a.path)
logger.warn("=============================================================")
raise Failure
myskiptree = tree.build_tree_from_skipped(out)
if isinstance(skip_tree, wc.State):
skip_tree = skip_tree.old_tree()
try:
tree.compare_trees("skip", myskiptree, skip_tree,
extra_skip, None, missing_skip, None)
except tree.SVNTreeUnequal:
_log_tree_state("ACTUAL SKIP TREE:", myskiptree, dir)
raise
actual_diff = svntest.wc.State.from_checkout(merge_diff_out, False)
actual_mergeinfo = svntest.wc.State.from_checkout(mergeinfo_notification_out,
False)
actual_elision = svntest.wc.State.from_checkout(mergeinfo_elision_out,
False)
verify_update(actual_diff, actual_mergeinfo, actual_elision, dir,
output_tree, mergeinfo_output_tree, elision_output_tree,
disk_tree, status_tree,
singleton_handler_a, a_baton,
singleton_handler_b, b_baton,
check_props)
def run_and_verify_patch(dir, patch_path,
output_tree, disk_tree, status_tree, skip_tree,
error_re_string=None,
check_props=False,
dry_run=True,
*args):
"""Run 'svn patch patch_path DIR'.
If ERROR_RE_STRING, 'svn patch' must exit with error, and the error
message must match regular expression ERROR_RE_STRING.
The subcommand output will be verified against OUTPUT_TREE, and the
working copy itself will be verified against DISK_TREE. If optional
STATUS_TREE is given, then 'svn status' output will be compared.
The 'skipped' merge output will be compared to SKIP_TREE.
If CHECK_PROPS is set, then disk comparison will examine props.
If DRY_RUN is set then a --dry-run patch will be carried out first and
the output compared with that of the full patch application.
Returns if successful, raises on failure."""
patch_command = [ "patch" ]
patch_command.append(patch_path)
patch_command.append(dir)
patch_command = tuple(patch_command)
if dry_run:
pre_disk = tree.build_tree_from_wc(dir)
dry_run_command = patch_command + ('--dry-run',)
dry_run_command = dry_run_command + args
exit_code, out_dry, err_dry = main.run_svn(error_re_string,
*dry_run_command)
post_disk = tree.build_tree_from_wc(dir)
try:
tree.compare_trees("disk", post_disk, pre_disk)
except tree.SVNTreeError:
logger.warn("=============================================================")
logger.warn("'svn patch --dry-run' altered working copy")
logger.warn("=============================================================")
raise
# Update and make a tree of the output.
patch_command = patch_command + args
exit_code, out, err = main.run_svn(True, *patch_command)
if error_re_string:
rm = re.compile(error_re_string)
match = None
for line in err:
match = rm.search(line)
if match:
break
if not match:
raise main.SVNUnmatchedError
elif err:
logger.warn("UNEXPECTED STDERR:")
for x in err:
logger.warn(x)
raise verify.SVNUnexpectedStderr
if dry_run and out != out_dry:
# APR hash order means the output order can vary, assume everything is OK
# if only the order changes.
out_dry_expected = svntest.verify.UnorderedOutput(out)
verify.compare_and_display_lines('dry-run patch output not as expected',
'', out_dry_expected, out_dry)
def missing_skip(a, b):
logger.warn("=============================================================")
logger.warn("'svn patch' failed to skip: %s", a.path)
logger.warn("=============================================================")
raise Failure
def extra_skip(a, b):
logger.warn("=============================================================")
logger.warn("'svn patch' unexpectedly skipped: %s", a.path)
logger.warn("=============================================================")
raise Failure
myskiptree = tree.build_tree_from_skipped(out)
if isinstance(skip_tree, wc.State):
skip_tree = skip_tree.old_tree()
tree.compare_trees("skip", myskiptree, skip_tree,
extra_skip, None, missing_skip, None)
mytree = tree.build_tree_from_checkout(out, 0)
# when the expected output is a list, we want a line-by-line
# comparison to happen instead of a tree comparison
if (isinstance(output_tree, list)
or isinstance(output_tree, verify.UnorderedOutput)):
verify.verify_outputs(None, out, err, output_tree, error_re_string)
output_tree = None
verify_update(mytree, None, None, dir,
output_tree, None, None, disk_tree, status_tree,
check_props=check_props)
def run_and_verify_mergeinfo(error_re_string = None,
expected_output = [],
*args):
"""Run 'svn mergeinfo ARGS', and compare the result against
EXPECTED_OUTPUT, a list of string representations of revisions
expected in the output. Raise an exception if an unexpected
output is encountered."""
mergeinfo_command = ["mergeinfo"]
mergeinfo_command.extend(args)
exit_code, out, err = main.run_svn(error_re_string, *mergeinfo_command)
if error_re_string:
if not error_re_string.startswith(".*"):
error_re_string = ".*(" + error_re_string + ")"
expected_err = verify.RegexOutput(error_re_string, match_all=False)
verify.verify_outputs(None, None, err, None, expected_err)
return
out = [_f for _f in [x.rstrip()[1:] for x in out] if _f]
extra_out = []
if out != expected_output:
exp_hash = dict.fromkeys(expected_output)
for rev in out:
if rev in exp_hash:
del(exp_hash[rev])
else:
extra_out.append(rev)
extra_exp = list(exp_hash.keys())
raise Exception("Unexpected 'svn mergeinfo' output:\n"
" expected but not found: %s\n"
" found but not expected: %s"
% (', '.join([str(x) for x in extra_exp]),
', '.join([str(x) for x in extra_out])))
def run_and_verify_switch(wc_dir_name,
wc_target,
switch_url,
output_tree, disk_tree, status_tree,
error_re_string = None,
singleton_handler_a = None,
a_baton = None,
singleton_handler_b = None,
b_baton = None,
check_props = False,
*args):
"""Switch WC_TARGET (in working copy dir WC_DIR_NAME) to SWITCH_URL.
If ERROR_RE_STRING, the switch must exit with error, and the error
message must match regular expression ERROR_RE_STRING.
Else if ERROR_RE_STRING is None, then:
The subcommand output will be verified against OUTPUT_TREE, and the
working copy itself will be verified against DISK_TREE. If optional
STATUS_TREE is given, then 'svn status' output will be
compared. (This is a good way to check that revision numbers were
bumped.)
For the DISK_TREE verification, SINGLETON_HANDLER_A and
SINGLETON_HANDLER_B will be passed to tree.compare_trees -- see that
function's doc string for more details.
If CHECK_PROPS is set, then disk comparison will examine props.
Return if successful, raise on failure."""
# Update and make a tree of the output.
exit_code, output, errput = main.run_svn(error_re_string, 'switch',
switch_url, wc_target, *args)
if error_re_string:
if not error_re_string.startswith(".*"):
error_re_string = ".*(" + error_re_string + ")"
expected_err = verify.RegexOutput(error_re_string, match_all=False)
verify.verify_outputs(None, None, errput, None, expected_err)
elif errput:
raise verify.SVNUnexpectedStderr(err)
actual = wc.State.from_checkout(output)
verify_update(actual, None, None, wc_dir_name,
output_tree, None, None, disk_tree, status_tree,
singleton_handler_a, a_baton,
singleton_handler_b, b_baton,
check_props)
def process_output_for_commit(output, error_re_string):
"""Helper for run_and_verify_commit(), also used in the factory."""
# Remove the final output line, and verify that the commit succeeded.
lastline = ""
rest = []
def external_removal(line):
return line.startswith('Removing external') \
or line.startswith('Removed external')
if len(output):
lastline = output.pop().strip()
while len(output) and external_removal(lastline):
rest.append(lastline)
lastline = output.pop().strip()
cm = re.compile("(Committed|Imported) revision [0-9]+.")
match = cm.search(lastline)
if not match and not error_re_string:
logger.warn("ERROR: commit did not succeed.")
logger.warn("The final line from 'svn ci' was:")
logger.warn(lastline)
raise main.SVNCommitFailure
# The new 'final' line in the output is either a regular line that
# mentions {Adding, Deleting, Sending, ...}, or it could be a line
# that says "Transmitting file data ...". If the latter case, we
# want to remove the line from the output; it should be ignored when
# building a tree.
if len(output):
lastline = output.pop()
tm = re.compile("Transmitting file data.+")
match = tm.search(lastline)
if not match:
# whoops, it was important output, put it back.
output.append(lastline)
if len(rest):
output.extend(rest)
return output
def run_and_verify_commit(wc_dir_name, output_tree, status_tree,
error_re_string = None,
*args):
"""Commit and verify results within working copy WC_DIR_NAME,
sending ARGS to the commit subcommand.
The subcommand output will be verified against OUTPUT_TREE. If
optional STATUS_TREE is given, then 'svn status' output will
be compared. (This is a good way to check that revision numbers
were bumped.)
If ERROR_RE_STRING is None, the commit must not exit with error. If
ERROR_RE_STRING is a string, the commit must exit with error, and
the error message must match regular expression ERROR_RE_STRING.
Return if successful, raise on failure."""
if isinstance(output_tree, wc.State):
output_tree = output_tree.old_tree()
# Commit.
if '-m' not in args and '-F' not in args:
args = list(args) + ['-m', 'log msg']
exit_code, output, errput = main.run_svn(error_re_string, 'ci',
*args)
if error_re_string:
if not error_re_string.startswith(".*"):
error_re_string = ".*(" + error_re_string + ")"
expected_err = verify.RegexOutput(error_re_string, match_all=False)
verify.verify_outputs(None, None, errput, None, expected_err)
# Else not expecting error:
# Convert the output into a tree.
output = process_output_for_commit(output, error_re_string)
actual = tree.build_tree_from_commit(output)
# Verify actual output against expected output.
if output_tree:
try:
tree.compare_trees("output", actual, output_tree)
except tree.SVNTreeError:
verify.display_trees("Output of commit is unexpected",
"OUTPUT TREE", output_tree, actual)
_log_tree_state("ACTUAL OUTPUT TREE:", actual, wc_dir_name)
raise
# Verify via 'status' command too, if possible.
if status_tree:
run_and_verify_status(wc_dir_name, status_tree)
# This function always passes '-q' to the status command, which
# suppresses the printing of any unversioned or nonexistent items.
def run_and_verify_status(wc_dir_name, status_tree,
singleton_handler_a = None,
a_baton = None,
singleton_handler_b = None,
b_baton = None):
"""Run 'status' on WC_DIR_NAME and compare it with the
expected STATUS_TREE. SINGLETON_HANDLER_A and SINGLETON_HANDLER_B will
be passed to tree.compare_trees - see that function's doc string for
more details.
Returns on success, raises on failure."""
exit_code, output, errput = main.run_svn(None, 'status', '-v', '-u', '-q',
wc_dir_name)
actual_status = svntest.wc.State.from_status(output)
# Verify actual output against expected output.
if isinstance(status_tree, wc.State):
try:
status_tree.compare_and_display('status', actual_status)
except tree.SVNTreeError:
_log_tree_state("ACTUAL STATUS TREE:", actual_status.old_tree(),
wc_dir_name)
raise
else:
actual_status = actual_status.old_tree()
try:
tree.compare_trees("status", actual_status, status_tree,
singleton_handler_a, a_baton,
singleton_handler_b, b_baton)
except tree.SVNTreeError:
verify.display_trees(None, 'STATUS OUTPUT TREE', status_tree, actual_status)
_log_tree_state("ACTUAL STATUS TREE:", actual_status, wc_dir_name)
raise
# if we have an output State, and we can/are-allowed to create an
# entries-based State, then compare the two.
if isinstance(status_tree, wc.State):
actual_entries = wc.State.from_entries(wc_dir_name)
if actual_entries:
tweaked = status_tree.copy()
tweaked.tweak_for_entries_compare()
try:
tweaked.compare_and_display('entries', actual_entries)
except tree.SVNTreeUnequal:
### do something more
raise
# A variant of previous func, but doesn't pass '-q'. This allows us
# to verify unversioned or nonexistent items in the list.
def run_and_verify_unquiet_status(wc_dir_name, status_tree):
"""Run 'status' on WC_DIR_NAME and compare it with the
expected STATUS_TREE.
Returns on success, raises on failure."""
exit_code, output, errput = main.run_svn(None, 'status', '-v',
'-u', wc_dir_name)
actual_status = svntest.wc.State.from_status(output)
# Verify actual output against expected output.
if isinstance(status_tree, wc.State):
try:
status_tree.compare_and_display('unquiet status', actual_status)
except tree.SVNTreeError:
_log_tree_state("ACTUAL STATUS TREE:",
actual_status.normalize().old_tree(), wc_dir_name)
raise
else:
actual_status = actual_status.old_tree()
try:
tree.compare_trees("UNQUIET STATUS", actual_status, status_tree)
except tree.SVNTreeError:
_log_tree_state("ACTUAL UNQUIET STATUS TREE:", actual_status,
wc_dir_name)
raise
def run_and_verify_status_xml(expected_entries = [],
*args):
""" Run 'status --xml' with arguments *ARGS. If successful the output
is parsed into an XML document and will be verified by comparing against
EXPECTED_ENTRIES.
"""
exit_code, output, errput = run_and_verify_svn(None, None, [],
'status', '--xml', *args)
if len(errput) > 0:
raise Failure
doc = parseString(''.join(output))
entries = doc.getElementsByTagName('entry')
def getText(nodelist):
rc = []
for node in nodelist:
if node.nodeType == node.TEXT_NODE:
rc.append(node.data)
return ''.join(rc)
actual_entries = {}
for entry in entries:
wcstatus = entry.getElementsByTagName('wc-status')[0]
commit = entry.getElementsByTagName('commit')
author = entry.getElementsByTagName('author')
rstatus = entry.getElementsByTagName('repos-status')
actual_entry = {'wcprops' : wcstatus.getAttribute('props'),
'wcitem' : wcstatus.getAttribute('item'),
}
if wcstatus.hasAttribute('revision'):
actual_entry['wcrev'] = wcstatus.getAttribute('revision')
if (commit):
actual_entry['crev'] = commit[0].getAttribute('revision')
if (author):
actual_entry['author'] = getText(author[0].childNodes)
if (rstatus):
actual_entry['rprops'] = rstatus[0].getAttribute('props')
actual_entry['ritem'] = rstatus[0].getAttribute('item')
actual_entries[entry.getAttribute('path')] = actual_entry
if expected_entries != actual_entries:
raise Failure('\n' + '\n'.join(difflib.ndiff(
pprint.pformat(expected_entries).splitlines(),
pprint.pformat(actual_entries).splitlines())))
def run_and_verify_inherited_prop_xml(path_or_url,
expected_inherited_props,
expected_explicit_props,
propname=None,
peg_rev=None,
*args):
"""If PROPNAME is None, then call run_and_verify_svn with proplist -v --xml
--show-inherited-props on PATH_OR_URL, otherwise call run_and_verify_svn
with propget PROPNAME --xml --show-inherited-props.
PATH_OR_URL is pegged at PEG_REV if the latter is not None. If PEG_REV
is none, then PATH_OR_URL is pegged at HEAD if a url.
EXPECTED_INHERITED_PROPS is a (possibly empty) dict mapping working copy
paths or URLs to dicts of inherited properties. EXPECTED_EXPLICIT_PROPS is
a (possibly empty) dict of the explicit properties expected on PATH_OR_URL.
Returns on success, raises on failure if EXPECTED_INHERITED_PROPS or
EXPECTED_EXPLICIT_PROPS don't match the results of proplist/propget.
"""
if peg_rev is None:
if sandbox.is_url(path_or_url):
path_or_url = path_or_url + '@HEAD'
else:
path_or_url = path_or_url + '@' + str(peg_rev)
if (propname):
exit_code, output, errput = svntest.actions.run_and_verify_svn(
None, None, [], 'propget', propname, '--xml',
'--show-inherited-props', path_or_url, *args)
else:
exit_code, output, errput = svntest.actions.run_and_verify_svn(
None, None, [], 'proplist', '-v', '--xml', '--show-inherited-props',
path_or_url, *args)
if len(errput) > 0:
raise Failure
# Props inherited from within the WC are keyed on absolute paths.
expected_iprops = {}
for x in expected_inherited_props:
if sandbox.is_url(x):
expected_iprops[x] = expected_inherited_props[x]
else:
expected_iprops[os.path.abspath(x)] = expected_inherited_props[x]
actual_iprops = {}
actual_explicit_props = {}
doc = parseString(''.join(output))
targets = doc.getElementsByTagName('target')
for t in targets:
# Create actual inherited props.
iprops = t.getElementsByTagName('inherited_property')
if len(iprops) > 0:
actual_iprops[t.getAttribute('path')]={}
for i in iprops:
actual_iprops[t.getAttribute('path')][i.getAttribute('name')] = \
i.firstChild.nodeValue
# Create actual explicit props.
xprops = t.getElementsByTagName('property')
for x in xprops:
actual_explicit_props[x.getAttribute('name')] = x.firstChild.nodeValue
if expected_explicit_props != actual_explicit_props:
raise svntest.Failure(
'Actual and expected explicit props do not match\n' +
'\n'.join(difflib.ndiff(
pprint.pformat(expected_explicit_props).splitlines(),
pprint.pformat(actual_explicit_props).splitlines())))
if expected_iprops != actual_iprops:
raise svntest.Failure(
'Actual and expected inherited props do not match\n' +
'\n'.join(difflib.ndiff(
pprint.pformat(expected_iprops).splitlines(),
pprint.pformat(actual_iprops).splitlines())))
def run_and_verify_diff_summarize_xml(error_re_string = [],
expected_prefix = None,
expected_paths = [],
expected_items = [],
expected_props = [],
expected_kinds = [],
*args):
"""Run 'diff --summarize --xml' with the arguments *ARGS, which should
contain all arguments beyond for your 'diff --summarize --xml' omitting
said arguments. EXPECTED_PREFIX will store a "common" path prefix
expected to be at the beginning of each summarized path. If
EXPECTED_PREFIX is None, then EXPECTED_PATHS will need to be exactly
as 'svn diff --summarize --xml' will output. If ERROR_RE_STRING, the
command must exit with error, and the error message must match regular
expression ERROR_RE_STRING.
Else if ERROR_RE_STRING is None, the subcommand output will be parsed
into an XML document and will then be verified by comparing the parsed
output to the contents in the EXPECTED_PATHS, EXPECTED_ITEMS,
EXPECTED_PROPS and EXPECTED_KINDS. Returns on success, raises
on failure."""
exit_code, output, errput = run_and_verify_svn(None, None, error_re_string,
'diff', '--summarize',
'--xml', *args)
# Return if errors are present since they were expected
if len(errput) > 0:
return
doc = parseString(''.join(output))
paths = doc.getElementsByTagName("path")
items = expected_items
kinds = expected_kinds
for path in paths:
modified_path = path.childNodes[0].data
if (expected_prefix is not None
and modified_path.find(expected_prefix) == 0):
modified_path = modified_path.replace(expected_prefix, '')[1:].strip()
# Workaround single-object diff
if len(modified_path) == 0:
modified_path = path.childNodes[0].data.split(os.sep)[-1]
# From here on, we use '/' as path separator.
if os.sep != "/":
modified_path = modified_path.replace(os.sep, "/")
if modified_path not in expected_paths:
logger.warn("ERROR: %s not expected in the changed paths.", modified_path)
raise Failure
index = expected_paths.index(modified_path)
expected_item = items[index]
expected_kind = kinds[index]
expected_prop = expected_props[index]
actual_item = path.getAttribute('item')
actual_kind = path.getAttribute('kind')
actual_prop = path.getAttribute('props')
if expected_item != actual_item:
logger.warn("ERROR: expected: %s actual: %s", expected_item, actual_item)
raise Failure
if expected_kind != actual_kind:
logger.warn("ERROR: expected: %s actual: %s", expected_kind, actual_kind)
raise Failure
if expected_prop != actual_prop:
logger.warn("ERROR: expected: %s actual: %s", expected_prop, actual_prop)
raise Failure
def run_and_verify_diff_summarize(output_tree, *args):
"""Run 'diff --summarize' with the arguments *ARGS.
The subcommand output will be verified against OUTPUT_TREE. Returns
on success, raises on failure.
"""
if isinstance(output_tree, wc.State):
output_tree = output_tree.old_tree()
exit_code, output, errput = main.run_svn(None, 'diff', '--summarize',
*args)
actual = tree.build_tree_from_diff_summarize(output)
# Verify actual output against expected output.
try:
tree.compare_trees("output", actual, output_tree)
except tree.SVNTreeError:
verify.display_trees(None, 'DIFF OUTPUT TREE', output_tree, actual)
_log_tree_state("ACTUAL DIFF OUTPUT TREE:", actual)
raise
def run_and_validate_lock(path, username):
"""`svn lock' the given path and validate the contents of the lock.
Use the given username. This is important because locks are
user specific."""
comment = "Locking path:%s." % path
# lock the path
run_and_verify_svn(None, ".*locked by user", [], 'lock',
'--username', username,
'-m', comment, path)
# Run info and check that we get the lock fields.
exit_code, output, err = run_and_verify_svn(None, None, [],
'info','-R',
path)
### TODO: Leverage RegexOutput([...], match_all=True) here.
# prepare the regexs to compare against
token_re = re.compile(".*?Lock Token: opaquelocktoken:.*?", re.DOTALL)
author_re = re.compile(".*?Lock Owner: %s\n.*?" % username, re.DOTALL)
created_re = re.compile(".*?Lock Created:.*?", re.DOTALL)
comment_re = re.compile(".*?%s\n.*?" % re.escape(comment), re.DOTALL)
# join all output lines into one
output = "".join(output)
# Fail even if one regex does not match
if ( not (token_re.match(output) and
author_re.match(output) and
created_re.match(output) and
comment_re.match(output))):
raise Failure
def _run_and_verify_resolve(cmd, expected_paths, *args):
"""Run "svn CMD" (where CMD is 'resolve' or 'resolved') with arguments
ARGS, and verify that it resolves the paths in EXPECTED_PATHS and no others.
If no ARGS are specified, use the elements of EXPECTED_PATHS as the
arguments."""
# TODO: verify that the status of PATHS changes accordingly.
if len(args) == 0:
args = expected_paths
expected_output = verify.AlternateOutput([
verify.UnorderedOutput([
"Resolved conflicted state of '" + path + "'\n" for path in
expected_paths]),
verify.UnorderedOutput([
"Breaking move with source path '" + path + "'\n" for path in
expected_paths] + [
"Resolved conflicted state of '" + path + "'\n" for path in
expected_paths]),
],
match_all=False)
run_and_verify_svn(None, expected_output, [],
cmd, *args)
def run_and_verify_resolve(expected_paths, *args):
"""Run "svn resolve" with arguments ARGS, and verify that it resolves the
paths in EXPECTED_PATHS and no others. If no ARGS are specified, use the
elements of EXPECTED_PATHS as the arguments."""
_run_and_verify_resolve('resolve', expected_paths, *args)
def run_and_verify_resolved(expected_paths, *args):
"""Run "svn resolved" with arguments ARGS, and verify that it resolves the
paths in EXPECTED_PATHS and no others. If no ARGS are specified, use the
elements of EXPECTED_PATHS as the arguments."""
_run_and_verify_resolve('resolved', expected_paths, *args)
def run_and_verify_revert(expected_paths, *args):
"""Run "svn revert" with arguments ARGS, and verify that it reverts
the paths in EXPECTED_PATHS and no others. If no ARGS are
specified, use the elements of EXPECTED_PATHS as the arguments."""
if len(args) == 0:
args = expected_paths
expected_output = verify.UnorderedOutput([
"Reverted '" + path + "'\n" for path in
expected_paths])
run_and_verify_svn(None, expected_output, [],
"revert", *args)
######################################################################
# Other general utilities
# This allows a test to *quickly* bootstrap itself.
def make_repo_and_wc(sbox, create_wc = True, read_only = False,
minor_version = None):
"""Create a fresh 'Greek Tree' repository and check out a WC from it.
If READ_ONLY is False, a dedicated repository will be created, at the path
SBOX.repo_dir. If READ_ONLY is True, the pristine repository will be used.
In either case, SBOX.repo_url is assumed to point to the repository that
will be used.
If create_wc is True, a dedicated working copy will be checked out from
the repository, at the path SBOX.wc_dir.
Returns on success, raises on failure."""
# Create (or copy afresh) a new repos with a greek tree in it.
if not read_only:
guarantee_greek_repository(sbox.repo_dir, minor_version)
if create_wc:
# Generate the expected output tree.
expected_output = main.greek_state.copy()
expected_output.wc_dir = sbox.wc_dir
expected_output.tweak(status='A ', contents=None)
# Generate an expected wc tree.
expected_wc = main.greek_state
# Do a checkout, and verify the resulting output and disk contents.
run_and_verify_checkout(sbox.repo_url,
sbox.wc_dir,
expected_output,
expected_wc)
else:
# just make sure the parent folder of our working copy is created
try:
os.mkdir(main.general_wc_dir)
except OSError, err:
if err.errno != errno.EEXIST:
raise
# Duplicate a working copy or other dir.
def duplicate_dir(wc_name, wc_copy_name):
"""Copy the working copy WC_NAME to WC_COPY_NAME. Overwrite any
existing tree at that location."""
main.safe_rmtree(wc_copy_name)
shutil.copytree(wc_name, wc_copy_name)
def get_virginal_state(wc_dir, rev):
"Return a virginal greek tree state for a WC and repos at revision REV."
rev = str(rev) ### maybe switch rev to an integer?
# copy the greek tree, shift it to the new wc_dir, insert a root elem,
# then tweak all values
state = main.greek_state.copy()
state.wc_dir = wc_dir
state.desc[''] = wc.StateItem()
state.tweak(contents=None, status=' ', wc_rev=rev)
return state
# Cheap administrative directory locking
def lock_admin_dir(wc_dir, recursive=False):
"Lock a SVN administrative directory"
db, root_path, relpath = wc.open_wc_db(wc_dir)
svntest.main.run_wc_lock_tester(recursive, wc_dir)
def set_incomplete(wc_dir, revision):
"Make wc_dir incomplete at revision"
svntest.main.run_wc_incomplete_tester(wc_dir, revision)
def get_wc_uuid(wc_dir):
"Return the UUID of the working copy at WC_DIR."
return run_and_parse_info(wc_dir)[0]['Repository UUID']
def get_wc_base_rev(wc_dir):
"Return the BASE revision of the working copy at WC_DIR."
return run_and_parse_info(wc_dir)[0]['Revision']
def hook_failure_message(hook_name):
"""Return the error message that the client prints for failure of the
specified hook HOOK_NAME. The wording changed with Subversion 1.5."""
# Output depends on the server version, not the repository version.
# This gets the wrong result for modern servers with old format
# repositories.
if svntest.main.options.server_minor_version < 5 and not svntest.main.is_ra_type_file():
return "'%s' hook failed with error output:\n" % hook_name
else:
if hook_name in ["start-commit", "pre-commit"]:
action = "Commit"
elif hook_name == "pre-revprop-change":
action = "Revprop change"
elif hook_name == "pre-lock":
action = "Lock"
elif hook_name == "pre-unlock":
action = "Unlock"
else:
action = None
if action is None:
message = "%s hook failed (exit code 1)" % (hook_name,)
else:
message = "%s blocked by %s hook (exit code 1)" % (action, hook_name)
return message + " with output:\n"
def create_failing_hook(repo_dir, hook_name, text):
"""Create a HOOK_NAME hook in the repository at REPO_DIR that prints
TEXT to stderr and exits with an error."""
hook_path = os.path.join(repo_dir, 'hooks', hook_name)
# Embed the text carefully: it might include characters like "%" and "'".
main.create_python_hook_script(hook_path, 'import sys\n'
'sys.stderr.write(' + repr(text) + ')\n'
'sys.exit(1)\n')
def enable_revprop_changes(repo_dir):
"""Enable revprop changes in the repository at REPO_DIR by creating a
pre-revprop-change hook script and (if appropriate) making it executable."""
hook_path = main.get_pre_revprop_change_hook_path(repo_dir)
main.create_python_hook_script(hook_path, 'import sys; sys.exit(0)',
cmd_alternative='@exit 0')
def disable_revprop_changes(repo_dir):
"""Disable revprop changes in the repository at REPO_DIR by creating a
pre-revprop-change hook script that prints "pre-revprop-change" followed
by its arguments, and returns an error."""
hook_path = main.get_pre_revprop_change_hook_path(repo_dir)
main.create_python_hook_script(hook_path,
'import sys\n'
'sys.stderr.write("pre-revprop-change %s" %'
' " ".join(sys.argv[1:]))\n'
'sys.exit(1)\n',
cmd_alternative=
'@echo pre-revprop-change %* 1>&2\n'
'@exit 1\n')
def create_failing_post_commit_hook(repo_dir):
"""Create a post-commit hook script in the repository at REPO_DIR that always
reports an error."""
hook_path = main.get_post_commit_hook_path(repo_dir)
main.create_python_hook_script(hook_path, 'import sys\n'
'sys.stderr.write("Post-commit hook failed")\n'
'sys.exit(1)\n',
cmd_alternative=
'@echo Post-commit hook failed 1>&2\n'
'@exit 1\n')
# set_prop can be used for properties with NULL characters which are not
# handled correctly when passed to subprocess.Popen() and values like "*"
# which are not handled correctly on Windows.
def set_prop(name, value, path, expected_re_string=None, force=None):
"""Set a property with specified value"""
if not force:
propset = ('propset',)
else:
propset = ('propset', '--force')
if value and (value[0] == '-' or '\x00' in value or sys.platform == 'win32'):
from tempfile import mkstemp
(fd, value_file_path) = mkstemp()
os.close(fd)
value_file = open(value_file_path, 'wb')
value_file.write(value)
value_file.flush()
value_file.close()
propset += ('-F', value_file_path, name, path)
exit_code, out, err = main.run_svn(expected_re_string, *propset)
os.remove(value_file_path)
else:
propset += (name, value, path)
exit_code, out, err = main.run_svn(expected_re_string, *propset)
if expected_re_string:
if not expected_re_string.startswith(".*"):
expected_re_string = ".*(" + expected_re_string + ")"
expected_err = verify.RegexOutput(expected_re_string, match_all=False)
verify.verify_outputs(None, None, err, None, expected_err)
def check_prop(name, path, exp_out, revprop=None):
"""Verify that property NAME on PATH has a value of EXP_OUT.
If REVPROP is not None, then it is a revision number and
a revision property is sought."""
if revprop is not None:
revprop_options = ['--revprop', '-r', revprop]
else:
revprop_options = []
# Not using run_svn because binary_mode must be set
exit_code, out, err = main.run_command(main.svn_binary, None, True, 'pg',
'--strict', name, path,
'--config-dir',
main.default_config_dir,
'--username', main.wc_author,
'--password', main.wc_passwd,
*revprop_options)
if out != exp_out:
logger.warn("svn pg --strict %s output does not match expected.", name)
logger.warn("Expected standard output: %s\n", exp_out)
logger.warn("Actual standard output: %s\n", out)
raise Failure
def fill_file_with_lines(wc_path, line_nbr, line_descrip=None,
append=True):
"""Change the file at WC_PATH (adding some lines), and return its
new contents. LINE_NBR indicates the line number at which the new
contents should assume that it's being appended. LINE_DESCRIP is
something like 'This is line' (the default) or 'Conflicting line'."""
if line_descrip is None:
line_descrip = "This is line"
# Generate the new contents for the file.
contents = ""
for n in range(line_nbr, line_nbr + 3):
contents = contents + line_descrip + " " + repr(n) + " in '" + \
os.path.basename(wc_path) + "'.\n"
# Write the new contents to the file.
if append:
main.file_append(wc_path, contents)
else:
main.file_write(wc_path, contents)
return contents
def inject_conflict_into_wc(sbox, state_path, file_path,
expected_disk, expected_status, merged_rev):
"""Create a conflict at FILE_PATH by replacing its contents,
committing the change, backdating it to its previous revision,
changing its contents again, then updating it to merge in the
previous change."""
wc_dir = sbox.wc_dir
# Make a change to the file.
contents = fill_file_with_lines(file_path, 1, "This is line", append=False)
# Commit the changed file, first taking note of the current revision.
prev_rev = expected_status.desc[state_path].wc_rev
expected_output = wc.State(wc_dir, {
state_path : wc.StateItem(verb='Sending'),
})
if expected_status:
expected_status.tweak(state_path, wc_rev=merged_rev)
run_and_verify_commit(wc_dir, expected_output, expected_status,
None, file_path)
# Backdate the file.
exit_code, output, errput = main.run_svn(None, "up", "-r", str(prev_rev),
file_path)
if expected_status:
expected_status.tweak(state_path, wc_rev=prev_rev)
# Make a conflicting change to the file, and backdate the file.
conflicting_contents = fill_file_with_lines(file_path, 1, "Conflicting line",
append=False)
# Merge the previous change into the file to produce a conflict.
if expected_disk:
expected_disk.tweak(state_path, contents="")
expected_output = wc.State(wc_dir, {
state_path : wc.StateItem(status='C '),
})
inject_conflict_into_expected_state(state_path,
expected_disk, expected_status,
conflicting_contents, contents,
merged_rev)
exit_code, output, errput = main.run_svn(None, "up", "-r", str(merged_rev),
file_path)
if expected_status:
expected_status.tweak(state_path, wc_rev=merged_rev)
def inject_conflict_into_expected_state(state_path,
expected_disk, expected_status,
wc_text, merged_text, merged_rev):
"""Update the EXPECTED_DISK and EXPECTED_STATUS trees for the
conflict at STATE_PATH (ignored if None). WC_TEXT, MERGED_TEXT, and
MERGED_REV are used to determine the contents of the conflict (the
text parameters should be newline-terminated)."""
if expected_disk:
conflict_marker = make_conflict_marker_text(wc_text, merged_text,
merged_rev)
existing_text = expected_disk.desc[state_path].contents or ""
expected_disk.tweak(state_path, contents=existing_text + conflict_marker)
if expected_status:
expected_status.tweak(state_path, status='C ')
def make_conflict_marker_text(wc_text, merged_text, merged_rev):
"""Return the conflict marker text described by WC_TEXT (the current
text in the working copy, MERGED_TEXT (the conflicting text merged
in), and MERGED_REV (the revision from whence the conflicting text
came)."""
return "<<<<<<< .working\n" + wc_text + "=======\n" + \
merged_text + ">>>>>>> .merge-right.r" + str(merged_rev) + "\n"
def build_greek_tree_conflicts(sbox):
"""Create a working copy that has tree-conflict markings.
After this function has been called, sbox.wc_dir is a working
copy that has specific tree-conflict markings.
In particular, this does two conflicting sets of edits and performs an
update so that tree conflicts appear.
Note that this function calls sbox.build() because it needs a clean sbox.
So, there is no need to call sbox.build() before this.
The conflicts are the result of an 'update' on the following changes:
Incoming Local
A/D/G/pi text-mod del
A/D/G/rho del text-mod
A/D/G/tau del del
This function is useful for testing that tree-conflicts are handled
properly once they have appeared, e.g. that commits are blocked, that the
info output is correct, etc.
See also the tree-conflicts tests using deep_trees in various other
.py files, and tree_conflict_tests.py.
"""
sbox.build()
wc_dir = sbox.wc_dir
j = os.path.join
G = j(wc_dir, 'A', 'D', 'G')
pi = j(G, 'pi')
rho = j(G, 'rho')
tau = j(G, 'tau')
# Make incoming changes and "store them away" with a commit.
main.file_append(pi, "Incoming edit.\n")
main.run_svn(None, 'del', rho)
main.run_svn(None, 'del', tau)
expected_output = wc.State(wc_dir, {
'A/D/G/pi' : Item(verb='Sending'),
'A/D/G/rho' : Item(verb='Deleting'),
'A/D/G/tau' : Item(verb='Deleting'),
})
expected_status = get_virginal_state(wc_dir, 1)
expected_status.tweak('A/D/G/pi', wc_rev='2')
expected_status.remove('A/D/G/rho', 'A/D/G/tau')
run_and_verify_commit(wc_dir, expected_output, expected_status, None,
'-m', 'Incoming changes.', wc_dir )
# Update back to the pristine state ("time-warp").
expected_output = wc.State(wc_dir, {
'A/D/G/pi' : Item(status='U '),
'A/D/G/rho' : Item(status='A '),
'A/D/G/tau' : Item(status='A '),
})
expected_disk = main.greek_state
expected_status = get_virginal_state(wc_dir, 1)
run_and_verify_update(wc_dir, expected_output, expected_disk,
expected_status, None, None, None, None, None, False,
'-r', '1', wc_dir)
# Make local changes
main.run_svn(None, 'del', pi)
main.file_append(rho, "Local edit.\n")
main.run_svn(None, 'del', tau)
# Update, receiving the incoming changes on top of the local changes,
# causing tree conflicts. Don't check for any particular result: that is
# the job of other tests.
run_and_verify_svn(None, verify.AnyOutput, [], 'update', wc_dir)
| centic9/subversion-ppa | subversion/tests/cmdline/svntest/actions.py | Python | apache-2.0 | 86,069 | 0.011468 |
#cash register
#Samuel Armstrong
| samuelarm/A-Level_2016-18 | general/cash register.py | Python | gpl-3.0 | 45 | 0.088889 |
# coding: utf-8
"""
DocuSign REST API
The DocuSign REST API provides you with a powerful, convenient, and simple Web services API for interacting with DocuSign. # noqa: E501
OpenAPI spec version: v2.1
Contact: devcenter@docusign.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class NotaryJournalList(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'end_position': 'str',
'next_uri': 'str',
'notary_journals': 'list[NotaryJournal]',
'previous_uri': 'str',
'result_set_size': 'str',
'start_position': 'str',
'total_set_size': 'str'
}
attribute_map = {
'end_position': 'endPosition',
'next_uri': 'nextUri',
'notary_journals': 'notaryJournals',
'previous_uri': 'previousUri',
'result_set_size': 'resultSetSize',
'start_position': 'startPosition',
'total_set_size': 'totalSetSize'
}
def __init__(self, end_position=None, next_uri=None, notary_journals=None, previous_uri=None, result_set_size=None, start_position=None, total_set_size=None): # noqa: E501
"""NotaryJournalList - a model defined in Swagger""" # noqa: E501
self._end_position = None
self._next_uri = None
self._notary_journals = None
self._previous_uri = None
self._result_set_size = None
self._start_position = None
self._total_set_size = None
self.discriminator = None
if end_position is not None:
self.end_position = end_position
if next_uri is not None:
self.next_uri = next_uri
if notary_journals is not None:
self.notary_journals = notary_journals
if previous_uri is not None:
self.previous_uri = previous_uri
if result_set_size is not None:
self.result_set_size = result_set_size
if start_position is not None:
self.start_position = start_position
if total_set_size is not None:
self.total_set_size = total_set_size
@property
def end_position(self):
"""Gets the end_position of this NotaryJournalList. # noqa: E501
The last position in the result set. # noqa: E501
:return: The end_position of this NotaryJournalList. # noqa: E501
:rtype: str
"""
return self._end_position
@end_position.setter
def end_position(self, end_position):
"""Sets the end_position of this NotaryJournalList.
The last position in the result set. # noqa: E501
:param end_position: The end_position of this NotaryJournalList. # noqa: E501
:type: str
"""
self._end_position = end_position
@property
def next_uri(self):
"""Gets the next_uri of this NotaryJournalList. # noqa: E501
The URI to the next chunk of records based on the search request. If the endPosition is the entire results of the search, this is null. # noqa: E501
:return: The next_uri of this NotaryJournalList. # noqa: E501
:rtype: str
"""
return self._next_uri
@next_uri.setter
def next_uri(self, next_uri):
"""Sets the next_uri of this NotaryJournalList.
The URI to the next chunk of records based on the search request. If the endPosition is the entire results of the search, this is null. # noqa: E501
:param next_uri: The next_uri of this NotaryJournalList. # noqa: E501
:type: str
"""
self._next_uri = next_uri
@property
def notary_journals(self):
"""Gets the notary_journals of this NotaryJournalList. # noqa: E501
# noqa: E501
:return: The notary_journals of this NotaryJournalList. # noqa: E501
:rtype: list[NotaryJournal]
"""
return self._notary_journals
@notary_journals.setter
def notary_journals(self, notary_journals):
"""Sets the notary_journals of this NotaryJournalList.
# noqa: E501
:param notary_journals: The notary_journals of this NotaryJournalList. # noqa: E501
:type: list[NotaryJournal]
"""
self._notary_journals = notary_journals
@property
def previous_uri(self):
"""Gets the previous_uri of this NotaryJournalList. # noqa: E501
The postal code for the billing address. # noqa: E501
:return: The previous_uri of this NotaryJournalList. # noqa: E501
:rtype: str
"""
return self._previous_uri
@previous_uri.setter
def previous_uri(self, previous_uri):
"""Sets the previous_uri of this NotaryJournalList.
The postal code for the billing address. # noqa: E501
:param previous_uri: The previous_uri of this NotaryJournalList. # noqa: E501
:type: str
"""
self._previous_uri = previous_uri
@property
def result_set_size(self):
"""Gets the result_set_size of this NotaryJournalList. # noqa: E501
The number of results returned in this response. # noqa: E501
:return: The result_set_size of this NotaryJournalList. # noqa: E501
:rtype: str
"""
return self._result_set_size
@result_set_size.setter
def result_set_size(self, result_set_size):
"""Sets the result_set_size of this NotaryJournalList.
The number of results returned in this response. # noqa: E501
:param result_set_size: The result_set_size of this NotaryJournalList. # noqa: E501
:type: str
"""
self._result_set_size = result_set_size
@property
def start_position(self):
"""Gets the start_position of this NotaryJournalList. # noqa: E501
Starting position of the current result set. # noqa: E501
:return: The start_position of this NotaryJournalList. # noqa: E501
:rtype: str
"""
return self._start_position
@start_position.setter
def start_position(self, start_position):
"""Sets the start_position of this NotaryJournalList.
Starting position of the current result set. # noqa: E501
:param start_position: The start_position of this NotaryJournalList. # noqa: E501
:type: str
"""
self._start_position = start_position
@property
def total_set_size(self):
"""Gets the total_set_size of this NotaryJournalList. # noqa: E501
The total number of items available in the result set. This will always be greater than or equal to the value of the property returning the results in the in the response. # noqa: E501
:return: The total_set_size of this NotaryJournalList. # noqa: E501
:rtype: str
"""
return self._total_set_size
@total_set_size.setter
def total_set_size(self, total_set_size):
"""Sets the total_set_size of this NotaryJournalList.
The total number of items available in the result set. This will always be greater than or equal to the value of the property returning the results in the in the response. # noqa: E501
:param total_set_size: The total_set_size of this NotaryJournalList. # noqa: E501
:type: str
"""
self._total_set_size = total_set_size
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(NotaryJournalList, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, NotaryJournalList):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| docusign/docusign-python-client | docusign_esign/models/notary_journal_list.py | Python | mit | 9,255 | 0.000108 |
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
__protobuf__ = proto.module(
package="google.ads.googleads.v10.enums",
marshal="google.ads.googleads.v10",
manifest={"SeasonalityEventStatusEnum",},
)
class SeasonalityEventStatusEnum(proto.Message):
r"""Message describing seasonality event statuses. The two types
of seasonality events are BiddingSeasonalityAdjustments and
BiddingDataExclusions.
"""
class SeasonalityEventStatus(proto.Enum):
r"""The possible statuses of a Seasonality Event."""
UNSPECIFIED = 0
UNKNOWN = 1
ENABLED = 2
REMOVED = 4
__all__ = tuple(sorted(__protobuf__.manifest))
| googleads/google-ads-python | google/ads/googleads/v10/enums/types/seasonality_event_status.py | Python | apache-2.0 | 1,255 | 0.000797 |
'''
Integration Test Teardown case
@author: Youyk
'''
import zstacklib.utils.linux as linux
import zstacklib.utils.http as http
import zstackwoodpecker.setup_actions as setup_actions
import zstackwoodpecker.test_util as test_util
import zstackwoodpecker.clean_util as clean_util
import zstackwoodpecker.test_lib as test_lib
import zstacktestagent.plugins.host as host_plugin
import zstacktestagent.testagent as testagent
def test():
clean_util.cleanup_all_vms_violently()
clean_util.cleanup_none_vm_volumes_violently()
clean_util.umount_all_primary_storages_violently()
clean_util.cleanup_backup_storage()
#linux.remove_vlan_eth("eth0", 10)
#linux.remove_vlan_eth("eth0", 11)
cmd = host_plugin.DeleteVlanDeviceCmd()
cmd.vlan_ethname = 'eth0.10'
hosts = test_lib.lib_get_all_hosts_from_plan()
if type(hosts) != type([]):
hosts = [hosts]
for host in hosts:
http.json_dump_post(testagent.build_http_path(host.managementIp_, host_plugin.DELETE_VLAN_DEVICE_PATH), cmd)
cmd.vlan_ethname = 'eth0.11'
for host in hosts:
http.json_dump_post(testagent.build_http_path(host.managementIp_, host_plugin.DELETE_VLAN_DEVICE_PATH), cmd)
test_lib.setup_plan.stop_node()
test_lib.lib_cleanup_host_ip_dict()
test_util.test_pass('VPC Teardown Success')
| zstackio/zstack-woodpecker | integrationtest/vm/vpc_ha/suite_teardown.py | Python | apache-2.0 | 1,332 | 0.005255 |
# -*-mode: python; py-indent-offset: 4; tab-width: 8; coding: iso-8859-1 -*-
# DLLM (non-linear Differentiated Lifting Line Model, open source software)
#
# Copyright (C) 2013-2015 Airbus Group SAS
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# https://github.com/matthieu-meaux/DLLM.git
#
# @author : Matthieu MEAUX
#
from MDOTools.ValidGrad.FDValidGrad import FDValidGrad
from DLLM.DLLMGeom.wing_broken import Wing_Broken
from DLLM.DLLMKernel.DLLMSolver import DLLMSolver
from MDOTools.OC.operating_condition import OperatingCondition
import numpy
OC=OperatingCondition('cond1')
OC.set_Mach(0.8)
OC.set_AoA(3.5)
OC.set_altitude(10000.)
OC.set_T0_deg(15.)
OC.set_P0(101325.)
OC.set_humidity(0.)
OC.compute_atmosphere()
wing_param=Wing_Broken('broken_wing',n_sect=20)
wing_param.import_BC_from_file('input_parameters.par')
wing_param.build_linear_airfoil(OC, AoA0=0.0, set_as_ref=True)
wing_param.build_airfoils_from_ref()
wing_param.update()
print wing_param
DLLM = DLLMSolver('Simple',wing_param,OC)
DLLM.run_direct()
iAoA0=DLLM.get_iAoA()
print 'iAoA0 shape',iAoA0.shape
print 'iAoA0=',iAoA0
def f(x):
func=DLLM.comp_R(x)
return func
def df(x):
func_grad=DLLM.comp_dpR_dpiAoA(x)
return func_grad
val_grad=FDValidGrad(2,f,df,fd_step=1.e-8)
ok,df_fd,df=val_grad.compare(iAoA0,treshold=1.e-6,return_all=True)
print '\n****************************************************'
if ok:
print 'dpR_dpiAoA is valid.'
else:
print '!!!! dpR_dpiAoA is NOT valid !!!!'
print '****************************************************' | matthieu-meaux/DLLM | examples/broken_wing_validation/valid_dpR_dpiAoA.py | Python | gpl-2.0 | 2,233 | 0.012987 |
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
from tempest.api.compute import base
from tempest.common import utils
from tempest import config
from tempest.lib import decorators
CONF = config.CONF
LOG = logging.getLogger(__name__)
class ExtensionsTest(base.BaseV2ComputeTest):
@decorators.idempotent_id('3bb27738-b759-4e0d-a5fa-37d7a6df07d1')
def test_list_extensions(self):
# List of all extensions
if not CONF.compute_feature_enabled.api_extensions:
raise self.skipException('There are not any extensions configured')
extensions = self.extensions_client.list_extensions()['extensions']
ext = CONF.compute_feature_enabled.api_extensions[0]
# Log extensions list
extension_list = [x['alias'] for x in extensions]
LOG.debug("Nova extensions: %s", ','.join(extension_list))
if ext == 'all':
self.assertIn('Hosts', map(lambda x: x['name'], extensions))
elif ext:
self.assertIn(ext, extension_list)
else:
raise self.skipException('There are not any extensions configured')
@decorators.idempotent_id('05762f39-bdfa-4cdb-9b46-b78f8e78e2fd')
@utils.requires_ext(extension='os-consoles', service='compute')
def test_get_extension(self):
# get the specified extensions
extension = self.extensions_client.show_extension('os-consoles')
self.assertEqual('os-consoles', extension['extension']['alias'])
| masayukig/tempest | tempest/api/compute/test_extensions.py | Python | apache-2.0 | 2,098 | 0 |
"""
odtreader.py
Contains class ODTFile for reading OOMMF ODT data into a Pandas dataframe
Author: Ryan Pepper (2016)
University of Southampton
"""
import pandas as pd
import tempfile
import re
class ODTFile(object):
def __init__(self, filename):
f = open(filename)
# Can't use 'w+b' for compatibility with Py2
temporary_file = tempfile.NamedTemporaryFile(mode='w')
metadata = []
for line in f:
if line[0] == '#':
metadata.append(line)
else:
new_line = re.sub(r'\s+', ',', line.lstrip().rstrip()) + '\n'
temporary_file.write(new_line)
temporary_file.flush()
self.dataframe = pd.read_csv(temporary_file.name, header=None)
header = []
for column in metadata[3].split('Oxs_')[1:]:
column = column.replace('{', '')
column = column.replace('}', '')
column = column.rstrip().replace(' ', '_')
column = column.replace('::', '_')
column = column.replace(':', '_')
column = column.replace('RungeKuttaEvolve_evolve_', '')
column = column.replace('TimeDriver_', '')
column = column.replace('Simulation_', '')
header.append(column)
self.dataframe.columns = header
temporary_file.close()
self.df = self.dataframe
| fangohr/oommf-python | joommf/odtreader.py | Python | bsd-2-clause | 1,380 | 0 |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'Screen.page_id'
db.alter_column(u'synch_screen', 'page_id', self.gf('django.db.models.fields.CharField')(max_length=5))
def backwards(self, orm):
# Changing field 'Screen.page_id'
db.alter_column(u'synch_screen', 'page_id', self.gf('django.db.models.fields.IntegerField')(max_length=3, null=True))
models = {
u'synch.screen': {
'Meta': {'object_name': 'Screen'},
'baron_timer': ('django.db.models.fields.IntegerField', [], {'default': '0', 'max_length': '3', 'null': 'True'}),
'bottom_blue_timer': ('django.db.models.fields.IntegerField', [], {'default': '0', 'max_length': '3', 'null': 'True'}),
'bottom_inhib_blane': ('django.db.models.fields.IntegerField', [], {'default': '0', 'max_length': '3', 'null': 'True'}),
'bottom_inhib_mlane': ('django.db.models.fields.IntegerField', [], {'default': '0', 'max_length': '3', 'null': 'True'}),
'bottom_inhib_tlane': ('django.db.models.fields.IntegerField', [], {'default': '0', 'max_length': '3', 'null': 'True'}),
'bottom_red_timer': ('django.db.models.fields.IntegerField', [], {'default': '0', 'max_length': '3', 'null': 'True'}),
'dragon_timer': ('django.db.models.fields.IntegerField', [], {'default': '0', 'max_length': '3', 'null': 'True'}),
'drawing': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '30000'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'page_id': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '5'}),
'top_blue_timer': ('django.db.models.fields.IntegerField', [], {'default': '0', 'max_length': '3', 'null': 'True'}),
'top_inhib_blane': ('django.db.models.fields.IntegerField', [], {'default': '0', 'max_length': '3', 'null': 'True'}),
'top_inhib_mlane': ('django.db.models.fields.IntegerField', [], {'default': '0', 'max_length': '3', 'null': 'True'}),
'top_inhib_tlane': ('django.db.models.fields.IntegerField', [], {'default': '0', 'max_length': '3', 'null': 'True'}),
'top_red_timer': ('django.db.models.fields.IntegerField', [], {'default': '0', 'max_length': '3', 'null': 'True'})
},
u'synch.ward': {
'Meta': {'object_name': 'Ward'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'position_x': ('django.db.models.fields.IntegerField', [], {'max_length': '3', 'null': 'True'}),
'position_y': ('django.db.models.fields.IntegerField', [], {'max_length': '3', 'null': 'True'}),
'screen': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['synch.Screen']", 'null': 'True', 'blank': 'True'}),
'team': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'timer': ('django.db.models.fields.IntegerField', [], {'max_length': '3', 'null': 'True'})
}
}
complete_apps = ['synch'] | ianfhunter/LoLss | secondscreen/synch/migrations/0002_auto__chg_field_screen_page_id.py | Python | gpl-2.0 | 3,281 | 0.007315 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django import http
from django import shortcuts
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from mox import IsA
from horizon import tables
from horizon.tables import views as table_views
from horizon.test import helpers as test
class FakeObject(object):
def __init__(self, id, name, value, status, optional=None, excluded=None):
self.id = id
self.name = name
self.value = value
self.status = status
self.optional = optional
self.excluded = excluded
self.extra = "extra"
def __repr__(self):
return "<%s: %s>" % (self.__class__.__name__, self.name)
TEST_DATA = (
FakeObject('1', 'object_1', 'value_1', 'up', 'optional_1', 'excluded_1'),
FakeObject('2', 'object_2', '<strong>evil</strong>', 'down', 'optional_2'),
FakeObject('3', 'object_3', 'value_3', 'up'),
)
TEST_DATA_2 = (
FakeObject('1', 'object_1', 'value_1', 'down', 'optional_1', 'excluded_1'),
)
TEST_DATA_3 = (
FakeObject('1', 'object_1', 'value_1', 'up', 'optional_1', 'excluded_1'),
)
TEST_DATA_4 = (
FakeObject('1', 'object_1', 2, 'up'),
FakeObject('2', 'object_2', 4, 'up'),
)
TEST_DATA_5 = (
FakeObject('1', 'object_1', 'A Value That is longer than 35 characters!',
'down', 'optional_1'),
)
class MyLinkAction(tables.LinkAction):
name = "login"
verbose_name = "Log In"
url = "login"
attrs = {
"class": "ajax-modal",
}
def get_link_url(self, datum=None, *args, **kwargs):
return reverse(self.url)
class MyAction(tables.Action):
name = "delete"
verbose_name = "Delete Me"
verbose_name_plural = "Delete Them"
def allowed(self, request, obj=None):
return getattr(obj, 'status', None) != 'down'
def handle(self, data_table, request, object_ids):
return shortcuts.redirect('http://example.com/?ids=%s'
% ",".join(object_ids))
class MyColumn(tables.Column):
pass
class MyRow(tables.Row):
ajax = True
@classmethod
def get_data(cls, request, obj_id):
return TEST_DATA_2[0]
class MyBatchAction(tables.BatchAction):
name = "batch"
action_present = _("Batch")
action_past = _("Batched")
data_type_singular = _("Item")
data_type_plural = _("Items")
def action(self, request, object_ids):
pass
class MyToggleAction(tables.BatchAction):
name = "toggle"
action_present = (_("Down"), _("Up"))
action_past = (_("Downed"), _("Upped"))
data_type_singular = _("Item")
data_type_plural = _("Items")
def allowed(self, request, obj=None):
if not obj:
return False
self.down = getattr(obj, 'status', None) == 'down'
if self.down:
self.current_present_action = 1
return self.down or getattr(obj, 'status', None) == 'up'
def action(self, request, object_ids):
if self.down:
#up it
self.current_past_action = 1
class MyFilterAction(tables.FilterAction):
def filter(self, table, objs, filter_string):
q = filter_string.lower()
def comp(obj):
if q in obj.name.lower():
return True
return False
return filter(comp, objs)
def get_name(obj):
return "custom %s" % obj.name
def get_link(obj):
return reverse('login')
class MyTable(tables.DataTable):
id = tables.Column('id', hidden=True, sortable=False)
name = tables.Column(get_name, verbose_name="Verbose Name", sortable=True)
value = tables.Column('value',
sortable=True,
link='http://example.com/',
attrs={'class': 'green blue'},
summation="average",
truncate=35,
link_classes=('link-modal',))
status = tables.Column('status', link=get_link)
optional = tables.Column('optional', empty_value='N/A')
excluded = tables.Column('excluded')
class Meta:
name = "my_table"
verbose_name = "My Table"
status_columns = ["status"]
columns = ('id', 'name', 'value', 'optional', 'status')
row_class = MyRow
column_class = MyColumn
table_actions = (MyFilterAction, MyAction, MyBatchAction)
row_actions = (MyAction, MyLinkAction, MyBatchAction, MyToggleAction)
class NoActionsTable(tables.DataTable):
id = tables.Column('id')
class Meta:
name = "no_actions_table"
verbose_name = _("No Actions Table")
table_actions = ()
row_actions = ()
class DataTableTests(test.TestCase):
def test_table_instantiation(self):
""" Tests everything that happens when the table is instantiated. """
self.table = MyTable(self.request, TEST_DATA)
# Properties defined on the table
self.assertEqual(self.table.data, TEST_DATA)
self.assertEqual(self.table.name, "my_table")
# Verify calculated options that weren't specified explicitly
self.assertTrue(self.table._meta.actions_column)
self.assertTrue(self.table._meta.multi_select)
# Test for verbose_name
self.assertEqual(unicode(self.table), u"My Table")
# Column ordering and exclusion.
# This should include auto-columns for multi_select and actions,
# but should not contain the excluded column.
# Additionally, auto-generated columns should use the custom
# column class specified on the table.
self.assertQuerysetEqual(self.table.columns.values(),
['<MyColumn: multi_select>',
'<Column: id>',
'<Column: name>',
'<Column: value>',
'<Column: optional>',
'<Column: status>',
'<MyColumn: actions>'])
# Actions (these also test ordering)
self.assertQuerysetEqual(self.table.base_actions.values(),
['<MyBatchAction: batch>',
'<MyAction: delete>',
'<MyFilterAction: filter>',
'<MyLinkAction: login>',
'<MyToggleAction: toggle>'])
self.assertQuerysetEqual(self.table.get_table_actions(),
['<MyFilterAction: filter>',
'<MyAction: delete>',
'<MyBatchAction: batch>'])
self.assertQuerysetEqual(self.table.get_row_actions(TEST_DATA[0]),
['<MyAction: delete>',
'<MyLinkAction: login>',
'<MyBatchAction: batch>',
'<MyToggleAction: toggle>'])
# Auto-generated columns
multi_select = self.table.columns['multi_select']
self.assertEqual(multi_select.auto, "multi_select")
self.assertEqual(multi_select.get_final_attrs().get('class', ""),
"multi_select_column")
actions = self.table.columns['actions']
self.assertEqual(actions.auto, "actions")
self.assertEqual(actions.get_final_attrs().get('class', ""),
"actions_column")
def test_table_force_no_multiselect(self):
class TempTable(MyTable):
class Meta:
columns = ('id',)
table_actions = (MyFilterAction, MyAction,)
row_actions = (MyAction, MyLinkAction,)
multi_select = False
self.table = TempTable(self.request, TEST_DATA)
self.assertQuerysetEqual(self.table.columns.values(),
['<Column: id>',
'<Column: actions>'])
def test_table_force_no_actions_column(self):
class TempTable(MyTable):
class Meta:
columns = ('id',)
table_actions = (MyFilterAction, MyAction,)
row_actions = (MyAction, MyLinkAction,)
actions_column = False
self.table = TempTable(self.request, TEST_DATA)
self.assertQuerysetEqual(self.table.columns.values(),
['<Column: multi_select>',
'<Column: id>'])
def test_table_natural_no_actions_column(self):
class TempTable(MyTable):
class Meta:
columns = ('id',)
table_actions = (MyFilterAction, MyAction,)
self.table = TempTable(self.request, TEST_DATA)
self.assertQuerysetEqual(self.table.columns.values(),
['<Column: multi_select>',
'<Column: id>'])
def test_table_natural_no_multiselect(self):
class TempTable(MyTable):
class Meta:
columns = ('id',)
row_actions = (MyAction, MyLinkAction,)
self.table = TempTable(self.request, TEST_DATA)
self.assertQuerysetEqual(self.table.columns.values(),
['<Column: id>',
'<Column: actions>'])
def test_table_column_inheritance(self):
class TempTable(MyTable):
extra = tables.Column('extra')
class Meta:
name = "temp_table"
table_actions = (MyFilterAction, MyAction,)
row_actions = (MyAction, MyLinkAction,)
self.table = TempTable(self.request, TEST_DATA)
self.assertQuerysetEqual(self.table.columns.values(),
['<Column: multi_select>',
'<Column: id>',
'<Column: name>',
'<Column: value>',
'<Column: status>',
'<Column: optional>',
'<Column: excluded>',
'<Column: extra>',
'<Column: actions>'])
def test_table_construction(self):
self.table = MyTable(self.request, TEST_DATA)
# Verify we retrieve the right columns for headers
columns = self.table.get_columns()
self.assertQuerysetEqual(columns, ['<MyColumn: multi_select>',
'<Column: id>',
'<Column: name>',
'<Column: value>',
'<Column: optional>',
'<Column: status>',
'<MyColumn: actions>'])
# Verify we retrieve the right rows from our data
rows = self.table.get_rows()
self.assertQuerysetEqual(rows, ['<MyRow: my_table__row__1>',
'<MyRow: my_table__row__2>',
'<MyRow: my_table__row__3>'])
# Verify each row contains the right cells
self.assertQuerysetEqual(rows[0].get_cells(),
['<Cell: multi_select, my_table__row__1>',
'<Cell: id, my_table__row__1>',
'<Cell: name, my_table__row__1>',
'<Cell: value, my_table__row__1>',
'<Cell: optional, my_table__row__1>',
'<Cell: status, my_table__row__1>',
'<Cell: actions, my_table__row__1>'])
def test_table_column(self):
self.table = MyTable(self.request, TEST_DATA)
row = self.table.get_rows()[0]
row3 = self.table.get_rows()[2]
id_col = self.table.columns['id']
name_col = self.table.columns['name']
value_col = self.table.columns['value']
# transform
self.assertEqual(row.cells['id'].data, '1') # Standard attr access
self.assertEqual(row.cells['name'].data, 'custom object_1') # Callable
# name and verbose_name
self.assertEqual(unicode(id_col), "Id")
self.assertEqual(unicode(name_col), "Verbose Name")
# sortable
self.assertEqual(id_col.sortable, False)
self.assertNotIn("sortable", id_col.get_final_attrs().get('class', ""))
self.assertEqual(name_col.sortable, True)
self.assertIn("sortable", name_col.get_final_attrs().get('class', ""))
# hidden
self.assertEqual(id_col.hidden, True)
self.assertIn("hide", id_col.get_final_attrs().get('class', ""))
self.assertEqual(name_col.hidden, False)
self.assertNotIn("hide", name_col.get_final_attrs().get('class', ""))
# link, link_classes and get_link_url
self.assertIn('href="http://example.com/"', row.cells['value'].value)
self.assertIn('class="link-modal"', row.cells['value'].value)
self.assertIn('href="/auth/login/"', row.cells['status'].value)
# empty_value
self.assertEqual(row3.cells['optional'].value, "N/A")
# classes
self.assertEqual(value_col.get_final_attrs().get('class', ""),
"green blue sortable anchor normal_column")
# status
cell_status = row.cells['status'].status
self.assertEqual(cell_status, True)
self.assertEqual(row.cells['status'].get_status_class(cell_status),
'status_up')
# status_choices
id_col.status = True
id_col.status_choices = (('1', False), ('2', True), ('3', None))
cell_status = row.cells['id'].status
self.assertEqual(cell_status, False)
self.assertEqual(row.cells['id'].get_status_class(cell_status),
'status_down')
cell_status = row3.cells['id'].status
self.assertEqual(cell_status, None)
self.assertEqual(row.cells['id'].get_status_class(cell_status),
'status_unknown')
# Ensure data is not cached on the column across table instances
self.table = MyTable(self.request, TEST_DATA_2)
row = self.table.get_rows()[0]
self.assertTrue("down" in row.cells['status'].value)
def test_table_row(self):
self.table = MyTable(self.request, TEST_DATA)
row = self.table.get_rows()[0]
self.assertEqual(row.table, self.table)
self.assertEqual(row.datum, TEST_DATA[0])
self.assertEqual(row.id, 'my_table__row__1')
# Verify row status works even if status isn't set on the column
self.assertEqual(row.status, True)
self.assertEqual(row.status_class, 'status_up')
# Check the cells as well
cell_status = row.cells['status'].status
self.assertEqual(cell_status, True)
self.assertEqual(row.cells['status'].get_status_class(cell_status),
'status_up')
def test_table_column_truncation(self):
self.table = MyTable(self.request, TEST_DATA_5)
row = self.table.get_rows()[0]
self.assertEqual(len(row.cells['value'].data), 35)
self.assertEqual(row.cells['value'].data,
u'A Value That is longer than 35 c...')
def test_table_rendering(self):
self.table = MyTable(self.request, TEST_DATA)
# Table actions
table_actions = self.table.render_table_actions()
resp = http.HttpResponse(table_actions)
self.assertContains(resp, "table_search", 1)
self.assertContains(resp, "my_table__filter__q", 1)
self.assertContains(resp, "my_table__delete", 1)
self.assertContains(resp, 'id="my_table__action_delete"', 1)
# Row actions
row_actions = self.table.render_row_actions(TEST_DATA[0])
resp = http.HttpResponse(row_actions)
self.assertContains(resp, "<li", 3)
self.assertContains(resp, "my_table__delete__1", 1)
self.assertContains(resp, "my_table__toggle__1", 1)
self.assertContains(resp, "/auth/login/", 1)
self.assertContains(resp, "ajax-modal", 1)
self.assertContains(resp, 'id="my_table__row_1__action_delete"', 1)
# Whole table
resp = http.HttpResponse(self.table.render())
self.assertContains(resp, '<table id="my_table"', 1)
self.assertContains(resp, '<th ', 8)
self.assertContains(resp, 'id="my_table__row__1"', 1)
self.assertContains(resp, 'id="my_table__row__2"', 1)
self.assertContains(resp, 'id="my_table__row__3"', 1)
update_string = "action=row_update&table=my_table&obj_id="
self.assertContains(resp, update_string, 3)
self.assertContains(resp, "data-update-interval", 3)
# Verify our XSS protection
self.assertContains(resp, '<a href="http://example.com/" '
'class="link-modal">'
'<strong>evil</strong></a>', 1)
# Filter = False hides the search box
self.table._meta.filter = False
table_actions = self.table.render_table_actions()
resp = http.HttpResponse(table_actions)
self.assertContains(resp, "table_search", 0)
def test_table_actions(self):
# Single object action
action_string = "my_table__delete__1"
req = self.factory.post('/my_url/', {'action': action_string})
self.table = MyTable(req, TEST_DATA)
self.assertEqual(self.table.parse_action(action_string),
('my_table', 'delete', '1'))
handled = self.table.maybe_handle()
self.assertEqual(handled.status_code, 302)
self.assertEqual(handled["location"], "http://example.com/?ids=1")
# Batch action (without toggle) conjugation behavior
req = self.factory.get('/my_url/')
self.table = MyTable(req, TEST_DATA_3)
toggle_action = self.table.get_row_actions(TEST_DATA_3[0])[2]
self.assertEqual(unicode(toggle_action.verbose_name), "Batch Item")
# Single object toggle action
# GET page - 'up' to 'down'
req = self.factory.get('/my_url/')
self.table = MyTable(req, TEST_DATA_3)
self.assertEqual(len(self.table.get_row_actions(TEST_DATA_3[0])), 4)
toggle_action = self.table.get_row_actions(TEST_DATA_3[0])[3]
self.assertEqual(unicode(toggle_action.verbose_name), "Down Item")
# Toggle from status 'up' to 'down'
# POST page
action_string = "my_table__toggle__1"
req = self.factory.post('/my_url/', {'action': action_string})
self.table = MyTable(req, TEST_DATA)
self.assertEqual(self.table.parse_action(action_string),
('my_table', 'toggle', '1'))
handled = self.table.maybe_handle()
self.assertEqual(handled.status_code, 302)
self.assertEqual(handled["location"], "/my_url/")
self.assertEqual(list(req._messages)[0].message,
u"Downed Item: object_1")
# Toggle from status 'down' to 'up'
# GET page - 'down' to 'up'
req = self.factory.get('/my_url/')
self.table = MyTable(req, TEST_DATA_2)
self.assertEqual(len(self.table.get_row_actions(TEST_DATA_2[0])), 3)
toggle_action = self.table.get_row_actions(TEST_DATA_2[0])[2]
self.assertEqual(unicode(toggle_action.verbose_name), "Up Item")
# POST page
action_string = "my_table__toggle__2"
req = self.factory.post('/my_url/', {'action': action_string})
self.table = MyTable(req, TEST_DATA)
self.assertEqual(self.table.parse_action(action_string),
('my_table', 'toggle', '2'))
handled = self.table.maybe_handle()
self.assertEqual(handled.status_code, 302)
self.assertEqual(handled["location"], "/my_url/")
self.assertEqual(list(req._messages)[0].message,
u"Upped Item: object_2")
# Multiple object action
action_string = "my_table__delete"
req = self.factory.post('/my_url/', {'action': action_string,
'object_ids': [1, 2]})
self.table = MyTable(req, TEST_DATA)
self.assertEqual(self.table.parse_action(action_string),
('my_table', 'delete', None))
handled = self.table.maybe_handle()
self.assertEqual(handled.status_code, 302)
self.assertEqual(handled["location"], "http://example.com/?ids=1,2")
# Action with nothing selected
req = self.factory.post('/my_url/', {'action': action_string})
self.table = MyTable(req, TEST_DATA)
self.assertEqual(self.table.parse_action(action_string),
('my_table', 'delete', None))
handled = self.table.maybe_handle()
self.assertEqual(handled, None)
self.assertEqual(list(req._messages)[0].message,
"Please select a row before taking that action.")
# Action with specific id and multiple ids favors single id
action_string = "my_table__delete__3"
req = self.factory.post('/my_url/', {'action': action_string,
'object_ids': [1, 2]})
self.table = MyTable(req, TEST_DATA)
self.assertEqual(self.table.parse_action(action_string),
('my_table', 'delete', '3'))
handled = self.table.maybe_handle()
self.assertEqual(handled.status_code, 302)
self.assertEqual(handled["location"],
"http://example.com/?ids=3")
# At least one object in table
# BatchAction is available
req = self.factory.get('/my_url/')
self.table = MyTable(req, TEST_DATA_2)
self.assertQuerysetEqual(self.table.get_table_actions(),
['<MyFilterAction: filter>',
'<MyAction: delete>',
'<MyBatchAction: batch>'])
# Zero objects in table
# BatchAction not available
req = self.factory.get('/my_url/')
self.table = MyTable(req, None)
self.assertQuerysetEqual(self.table.get_table_actions(),
['<MyFilterAction: filter>',
'<MyAction: delete>'])
# Filtering
action_string = "my_table__filter__q"
req = self.factory.post('/my_url/', {action_string: '2'})
self.table = MyTable(req, TEST_DATA)
handled = self.table.maybe_handle()
self.assertEqual(handled, None)
self.assertQuerysetEqual(self.table.filtered_data,
['<FakeObject: object_2>'])
# Ensure fitering respects the request method, e.g. no filter here
req = self.factory.get('/my_url/', {action_string: '2'})
self.table = MyTable(req, TEST_DATA)
handled = self.table.maybe_handle()
self.assertEqual(handled, None)
self.assertQuerysetEqual(self.table.filtered_data,
['<FakeObject: object_1>',
'<FakeObject: object_2>',
'<FakeObject: object_3>'])
# Updating and preemptive actions
params = {"table": "my_table", "action": "row_update", "obj_id": "1"}
req = self.factory.get('/my_url/',
params,
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.table = MyTable(req)
resp = self.table.maybe_preempt()
self.assertEqual(resp.status_code, 200)
# Make sure the data returned differs from the original
self.assertContains(resp, "my_table__row__1")
self.assertContains(resp, "status_down")
# Verify that we don't get a response for a valid action with the
# wrong method.
params = {"table": "my_table", "action": "delete", "obj_id": "1"}
req = self.factory.get('/my_url/', params)
self.table = MyTable(req)
resp = self.table.maybe_preempt()
self.assertEqual(resp, None)
resp = self.table.maybe_handle()
self.assertEqual(resp, None)
# Verbose names
table_actions = self.table.get_table_actions()
self.assertEqual(unicode(table_actions[0].verbose_name), "filter")
self.assertEqual(unicode(table_actions[1].verbose_name), "Delete Me")
row_actions = self.table.get_row_actions(TEST_DATA[0])
self.assertEqual(unicode(row_actions[0].verbose_name), "Delete Me")
self.assertEqual(unicode(row_actions[1].verbose_name), "Log In")
def test_column_uniqueness(self):
table1 = MyTable(self.request)
table2 = MyTable(self.request)
# Regression test for launchpad bug 964345.
self.assertNotEqual(id(table1), id(table2))
self.assertNotEqual(id(table1.columns), id(table2.columns))
t1cols = table1.columns.values()
t2cols = table2.columns.values()
self.assertEqual(t1cols[0].name, t2cols[0].name)
self.assertNotEqual(id(t1cols[0]), id(t2cols[0]))
self.assertNotEqual(id(t1cols[0].table),
id(t2cols[0].table))
self.assertNotEqual(id(t1cols[0].table._data_cache),
id(t2cols[0].table._data_cache))
def test_summation_row(self):
# Test with the "average" method.
table = MyTable(self.request, TEST_DATA_4)
res = http.HttpResponse(table.render())
self.assertContains(res, '<tr class="summation"', 1)
self.assertContains(res, '<td>Summary</td>', 1)
self.assertContains(res, '<td>3.0</td>', 1)
# Test again with the "sum" method.
table.columns['value'].summation = "sum"
res = http.HttpResponse(table.render())
self.assertContains(res, '<tr class="summation"', 1)
self.assertContains(res, '<td>Summary</td>', 1)
self.assertContains(res, '<td>6</td>', 1)
# One last test with no summation.
table.columns['value'].summation = None
table.needs_summary_row = False
res = http.HttpResponse(table.render())
self.assertNotContains(res, '<tr class="summation"')
self.assertNotContains(res, '<td>3.0</td>')
self.assertNotContains(res, '<td>6</td>')
def test_table_action_attributes(self):
table = MyTable(self.request, TEST_DATA)
self.assertTrue(table.has_actions)
self.assertTrue(table.needs_form_wrapper)
res = http.HttpResponse(table.render())
self.assertContains(res, "<form")
table = MyTable(self.request, TEST_DATA, needs_form_wrapper=False)
self.assertTrue(table.has_actions)
self.assertFalse(table.needs_form_wrapper)
res = http.HttpResponse(table.render())
self.assertNotContains(res, "<form")
table = NoActionsTable(self.request, TEST_DATA)
self.assertFalse(table.has_actions)
self.assertFalse(table.needs_form_wrapper)
res = http.HttpResponse(table.render())
self.assertNotContains(res, "<form")
def test_table_action_object_display_is_none(self):
action_string = "my_table__toggle__1"
req = self.factory.post('/my_url/', {'action': action_string})
self.table = MyTable(req, TEST_DATA)
self.mox.StubOutWithMock(self.table, 'get_object_display')
self.table.get_object_display(IsA(FakeObject)).AndReturn(None)
self.mox.ReplayAll()
self.assertEqual(self.table.parse_action(action_string),
('my_table', 'toggle', '1'))
handled = self.table.maybe_handle()
self.assertEqual(handled.status_code, 302)
self.assertEqual(handled["location"], "/my_url/")
self.assertEqual(list(req._messages)[0].message,
u"Downed Item: N/A")
class SingleTableView(table_views.DataTableView):
table_class = MyTable
name = _("Single Table")
slug = "single"
template_name = "horizon/common/_detail_table.html"
def get_data(self):
return TEST_DATA
class TableWithPermissions(tables.DataTable):
id = tables.Column('id')
class Meta:
name = "table_with_permissions"
permissions = ('horizon.test',)
class SingleTableViewWithPermissions(SingleTableView):
table_class = TableWithPermissions
class MultiTableView(tables.MultiTableView):
table_classes = (TableWithPermissions, MyTable)
def get_table_with_permissions_data(self):
return TEST_DATA
def get_my_table_data(self):
return TEST_DATA
class DataTableViewTests(test.TestCase):
def _prepare_view(self, cls, *args, **kwargs):
req = self.factory.get('/my_url/')
req.user = self.user
view = cls()
view.request = req
view.args = args
view.kwargs = kwargs
return view
def test_data_table_view(self):
view = self._prepare_view(SingleTableView)
context = view.get_context_data()
self.assertEqual(context['table'].__class__,
SingleTableView.table_class)
def test_data_table_view_not_authorized(self):
view = self._prepare_view(SingleTableViewWithPermissions)
context = view.get_context_data()
self.assertNotIn('table', context)
def test_data_table_view_authorized(self):
view = self._prepare_view(SingleTableViewWithPermissions)
self.set_permissions(permissions=['test'])
context = view.get_context_data()
self.assertIn('table', context)
self.assertEqual(context['table'].__class__,
SingleTableViewWithPermissions.table_class)
def test_multi_table_view_not_authorized(self):
view = self._prepare_view(MultiTableView)
context = view.get_context_data()
self.assertEqual(context['my_table_table'].__class__, MyTable)
self.assertNotIn('table_with_permissions_table', context)
def test_multi_table_view_authorized(self):
view = self._prepare_view(MultiTableView)
self.set_permissions(permissions=['test'])
context = view.get_context_data()
self.assertEqual(context['my_table_table'].__class__, MyTable)
self.assertEqual(context['table_with_permissions_table'].__class__,
TableWithPermissions)
| 99cloud/keystone_register | horizon/test/tests/tables.py | Python | apache-2.0 | 31,356 | 0.000128 |
class Solution(object):
def combinationSum3(self, k, n):
"""
:type k: int
:type n: int
:rtype: List[List[int]]
"""
def dfs(s, n):
if len(path) >= k:
if n == 0:
result.append(path[:])
return
for i in xrange(s, 10):
if n < i:
return
path.append(i)
dfs(i + 1, n - i)
path.pop()
result, path = [], []
dfs(1, n)
return result
| zqfan/leetcode | algorithms/216. Combination Sum III/solution2.py | Python | gpl-3.0 | 557 | 0 |
#!/usr/bin/env python2
# -*- coding: utf-8 -*
#The MIT License (MIT)
# Copyright (c) 2015 daite
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from scrapy import Selector
import codecs
import requests
import argparse
import os
class ImageDownloader:
def __init__(self, root_dir_name):
'''
:: init function
:: set root directory path when given root dir name
'''
self.root_dir_name = root_dir_name
self.root_dir_path = os.path.join(os.getcwd(), root_dir_name)
if not os.path.exists(self.root_dir_path):
os.mkdir(self.root_dir_path)
def get_image_urls(self, res_text):
'''
:: getting image urls from response_text
'''
self.img_urls = Selector(text=res_text).xpath('//a/@href').re('.*jpg$')
return self.img_urls
def get_description(self, res_text):
'''
:: getting description from response_text
'''
self.desc_contents = Selector(text=res_text).xpath('//blockquote/text()').extract()
return self.desc_contents
def save_stuff(self, sub_dir_name, img_urls, desc_contents, text_file_name='description.txt'):
'''
:: save images and description each subdir
'''
self.sub_dir_path = os.path.join(self.root_dir_path, sub_dir_name)
self.sub_dir_desc_file_name = os.path.join(self.sub_dir_path, text_file_name)
if not os.path.exists(self.sub_dir_path):
os.mkdir(self.sub_dir_path)
os.chdir(self.sub_dir_path)
with codecs.open(self.sub_dir_desc_file_name, 'a', encoding='utf-8') as f:
for content in desc_contents:
f.write(content)
for img_url in img_urls:
cmd = 'wget -nc -t 1 %s &' %img_url
os.system(cmd)
os.chdir(self.root_dir_path)
def multi_save_stuff(self, urlgen, start_num, end_num):
'''
:: multi save function
'''
for movie_num in range(start_num, end_num + 1):
url = urlgen(movie_num)
res_text = requests.get(url).text
img_urls = self.get_image_urls(res_text)
desc_contents = self.get_description(res_text)
if not img_urls:
print('No images!!!!')
continue
sub_dir_name = url.split('/')[-1].strip('.html')
self.save_stuff(sub_dir_name, img_urls, desc_contents)
if __name__ == '__main__':
gana_urlgen = lambda x : 'http://blog.livedoor.jp/kirekawa39-siro/archives/200GANA-%d.html' %x
siro_urlgen = lambda x : 'http://blog.livedoor.jp/kirekawa39-siro/archives/siro-%d.html' %x
parser = argparse.ArgumentParser()
parser.add_argument("start", type=int, help='start number')
parser.add_argument("end", type=int, help='end number')
parser.add_argument('-g', '--gana',
help='download image from gana200',
action="store_true")
parser.add_argument('-s', '--siro',
help='download image from siro',
action="store_true")
args = parser.parse_args()
if args.gana:
i = ImageDownloader('GANA200')
i.multi_save_stuff(gana_urlgen, args.start, args.end)
elif args.siro:
i = ImageDownloader('SIRO')
i.multi_save_stuff(siro_urlgen, args.start, args.end)
else:
parser.print_help()
exit(1)
| daite/JAVImageDownloader | jav_image_download.py | Python | mit | 3,951 | 0.026069 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-09-15 17:18
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('bookmarks', '0005_auto_20170915_1015'),
]
operations = [
migrations.AddField(
model_name='bookmark',
name='collections',
field=models.ManyToManyField(to='bookmarks.Collection'),
),
]
| kennethlove/django_bookmarks | dj_bookmarks/bookmarks/migrations/0006_bookmark_collections.py | Python | bsd-3-clause | 476 | 0 |
from time import sleep
from icoin import app
from icoin.core.mail import mail, send
class TestMail:
def test_send_sync(self):
with app.app_context(), mail.record_messages() as outbox:
send("test@test.com", "subjectnow", "test", async=False)
assert len(outbox) == 1
assert outbox[0].subject == "subjectnow"
def test_send_async(self):
with app.app_context(), mail.record_messages() as outbox:
send("test@test.com", "subject", "test")
# message is not sent immediately
assert len(outbox) == 0
sleep(0.1)
assert len(outbox) == 1
assert outbox[0].subject == "subject"
| loomchild/icoin | test/integration_test/test_mail.py | Python | agpl-3.0 | 727 | 0.009629 |
# ~*~ coding: utf-8 ~*~
from celery import shared_task
from django.utils.translation import ugettext as _, gettext_noop
from common.utils import get_logger
from orgs.utils import org_aware_func
from ..models import Connectivity
from . import const
from .utils import check_asset_can_run_ansible
logger = get_logger(__file__)
__all__ = [
'test_account_connectivity_util', 'test_accounts_connectivity_manual',
'get_test_account_connectivity_tasks', 'test_user_connectivity',
'run_adhoc',
]
def get_test_account_connectivity_tasks(asset):
if asset.is_unixlike():
tasks = const.PING_UNIXLIKE_TASKS
elif asset.is_windows():
tasks = const.PING_WINDOWS_TASKS
else:
msg = _(
"The asset {} system platform {} does not "
"support run Ansible tasks".format(asset.hostname, asset.platform)
)
logger.info(msg)
tasks = []
return tasks
def run_adhoc(task_name, tasks, inventory):
"""
:param task_name
:param tasks
:param inventory
"""
from ops.ansible.runner import AdHocRunner
runner = AdHocRunner(inventory, options=const.TASK_OPTIONS)
result = runner.run(tasks, 'all', task_name)
return result.results_raw, result.results_summary
def test_user_connectivity(task_name, asset, username, password=None, private_key=None):
"""
:param task_name
:param asset
:param username
:param password
:param private_key
"""
from ops.inventory import JMSCustomInventory
tasks = get_test_account_connectivity_tasks(asset)
if not tasks:
logger.debug("No tasks ")
return {}, {}
inventory = JMSCustomInventory(
assets=[asset], username=username, password=password,
private_key=private_key
)
raw, summary = run_adhoc(
task_name=task_name, tasks=tasks, inventory=inventory
)
return raw, summary
@org_aware_func("account")
def test_account_connectivity_util(account, task_name):
"""
:param account: <AuthBook>对象
:param task_name:
:return:
"""
if not check_asset_can_run_ansible(account.asset):
return
account.load_auth()
try:
raw, summary = test_user_connectivity(
task_name=task_name, asset=account.asset,
username=account.username, password=account.password,
private_key=account.private_key_file
)
except Exception as e:
logger.warn("Failed run adhoc {}, {}".format(task_name, e))
return
if summary.get('success'):
account.set_connectivity(Connectivity.ok)
else:
account.set_connectivity(Connectivity.failed)
@shared_task(queue="ansible")
def test_accounts_connectivity_manual(accounts):
"""
:param accounts: <AuthBook>对象
"""
for account in accounts:
task_name = gettext_noop("Test account connectivity: ") + str(account)
test_account_connectivity_util(account, task_name)
print(".\n")
| jumpserver/jumpserver | apps/assets/tasks/account_connectivity.py | Python | gpl-3.0 | 2,991 | 0.000335 |
# -*- coding: utf-8 -*-
# © 2014 Elico Corp (https://www.elico-corp.com)
# Licence AGPL-3.0 or later(http://www.gnu.org/licenses/agpl.html)
{
'name': 'Account Alternate Invoice',
'version': '7.0.1.0.0',
'author': 'Elico Corp',
'website': 'https://www.elico-corp.com',
'description': """
Account Alternate Invoice
""",
'depends': ['base', 'account', ],
'sequence': 10,
'data': [
'account_invoice_view.xml',
'report.xml',
],
'installable': True,
'application': False,
'auto_install': False,
}
| Elico-Corp/openerp-7.0 | account_alternate_invoice/__openerp__.py | Python | agpl-3.0 | 580 | 0.003454 |
import six
from unittest import TestCase
from dark.reads import Read
from dark.local_align import LocalAlignment
class TestLocalAlign(TestCase):
"""
Test the LocalAlignment class.
With match +1, mismatch -1, gap open -1, gap extend -1 and
gap extend decay 0.0.
"""
def testPositiveMismatch(self):
"""
If the mismatch value passed is positive, an exception
must be raised.
"""
seq1 = Read('seq1', 'a')
seq2 = Read('seq2', 'a')
six.assertRaisesRegex(self, ValueError, 'Mismatch must be negative',
LocalAlignment, seq1, seq2, mismatch=3)
def testZeroMismatch(self):
"""
If the mismatch value passed is zero, an exception
must be raised.
"""
seq1 = Read('seq1', 'a')
seq2 = Read('seq2', 'a')
six.assertRaisesRegex(self, ValueError, 'Mismatch must be negative',
LocalAlignment, seq1, seq2, mismatch=0)
def testPositiveGap(self):
"""
If the gap value passed is positive, an exception
must be raised.
"""
seq1 = Read('seq1', 'a')
seq2 = Read('seq2', 'a')
six.assertRaisesRegex(self, ValueError, 'Gap must be negative',
LocalAlignment, seq1, seq2, gap=3)
def testZeroGap(self):
"""
If the gap value passed is zero, an exception
must be raised.
"""
seq1 = Read('seq1', 'a')
seq2 = Read('seq2', 'a')
six.assertRaisesRegex(self, ValueError, 'Gap must be negative',
LocalAlignment, seq1, seq2, gap=0)
def testPositiveGapExtend(self):
"""
If the gap extend value passed is positive, an exception
must be raised.
"""
seq1 = Read('seq1', 'a')
seq2 = Read('seq2', 'a')
six.assertRaisesRegex(self, ValueError,
'Gap extension penalty cannot be positive',
LocalAlignment, seq1, seq2, gapExtend=3)
def testFirstSequenceEmpty(self):
"""
If the first sequence passed is empty, an exception must be raised.
"""
seq1 = Read('seq1', '')
seq2 = Read('seq2', 'agtcagtcagtc')
six.assertRaisesRegex(self, ValueError, 'Empty sequence: seq1',
LocalAlignment, seq1, seq2)
def testSecondSequenceEmpty(self):
"""
If the second sequence passed is empty, an exception must be raised.
"""
seq1 = Read('seq1', 'agtcagtcagtc')
seq2 = Read('seq2', '')
six.assertRaisesRegex(self, ValueError, 'Empty sequence: seq2',
LocalAlignment, seq1, seq2)
def testBothSequencesEmpty(self):
"""
If two empty sequences are passed, an exception must be raised.
"""
seq1 = Read('seq1', '')
seq2 = Read('seq2', '')
six.assertRaisesRegex(self, ValueError, 'Empty sequence: seq1',
LocalAlignment, seq1, seq2)
def testGapAtStartOfSeq1(self):
seq1 = Read('seq1', 'gaatcg')
seq2 = Read('seq2', 'cgaatcg')
align = LocalAlignment(seq1, seq2)
result = align.createAlignment(resultFormat=str)
alignment = ('\nCigar string of aligned region: 6=\n'
'seq1 Match start: 1 Match end: 6\n'
'seq2 Match start: 2 Match end: 7\n'
'seq1 1 GAATCG 6\n'
' ||||||\n'
'seq2 2 GAATCG 7')
self.assertEqual(result, alignment)
def testGapAtStartOfSeq2(self):
seq1 = Read('seq1', 'cgaatcg')
seq2 = Read('seq2', 'gaatcg')
align = LocalAlignment(seq1, seq2)
result = align.createAlignment(resultFormat=str)
alignment = ('\nCigar string of aligned region: 6=\n'
'seq1 Match start: 2 Match end: 7\n'
'seq2 Match start: 1 Match end: 6\n'
'seq1 2 GAATCG 7\n'
' ||||||\n'
'seq2 1 GAATCG 6')
self.assertEqual(result, alignment)
def testGapAtEndOfSeq1(self):
seq1 = Read('seq1', 'cgaatc')
seq2 = Read('seq2', 'cgaatcg')
align = LocalAlignment(seq1, seq2)
result = align.createAlignment(resultFormat=str)
alignment = ('\nCigar string of aligned region: 6=\n'
'seq1 Match start: 1 Match end: 6\n'
'seq2 Match start: 1 Match end: 6\n'
'seq1 1 CGAATC 6\n'
' ||||||\n'
'seq2 1 CGAATC 6')
self.assertEqual(result, alignment)
def testGapAtEndOfSeq2(self):
seq1 = Read('seq1', 'cgaatcg')
seq2 = Read('seq2', 'cgaatc')
align = LocalAlignment(seq1, seq2)
result = align.createAlignment(resultFormat=str)
alignment = ('\nCigar string of aligned region: 6=\n'
'seq1 Match start: 1 Match end: 6\n'
'seq2 Match start: 1 Match end: 6\n'
'seq1 1 CGAATC 6\n'
' ||||||\n'
'seq2 1 CGAATC 6')
self.assertEqual(result, alignment)
def testGapAtBothEndsOfSeq1(self):
seq1 = Read('seq1', 'gaatc')
seq2 = Read('seq2', 'cgaatcg')
align = LocalAlignment(seq1, seq2)
result = align.createAlignment(resultFormat=str)
alignment = ('\nCigar string of aligned region: 5=\n'
'seq1 Match start: 1 Match end: 5\n'
'seq2 Match start: 2 Match end: 6\n'
'seq1 1 GAATC 5\n'
' |||||\n'
'seq2 2 GAATC 6')
self.assertEqual(result, alignment)
def testGapAtBothEndsOfSeq2(self):
seq1 = Read('seq1', 'cgaatcg')
seq2 = Read('seq2', 'gaatc')
align = LocalAlignment(seq1, seq2)
result = align.createAlignment(resultFormat=str)
align = LocalAlignment(seq1, seq2)
result = align.createAlignment(resultFormat=str)
alignment = ('\nCigar string of aligned region: 5=\n'
'seq1 Match start: 2 Match end: 6\n'
'seq2 Match start: 1 Match end: 5\n'
'seq1 2 GAATC 6\n'
' |||||\n'
'seq2 1 GAATC 5')
self.assertEqual(result, alignment)
def testAlignmentWithGapInMiddle(self):
seq1 = Read('seq1', 'agtcagtcagtc')
seq2 = Read('seq2', 'cgaatcg')
align = LocalAlignment(seq1, seq2)
result = align.createAlignment(resultFormat=str)
alignment = ('\nCigar string of aligned region: 2=1D1=\n'
'seq1 Match start: 7 Match end: 10\n'
'seq2 Match start: 5 Match end: 7\n'
'seq1 7 TCAG 10\n'
' || |\n'
'seq2 5 TC-G 7')
self.assertEqual(result, alignment)
def testTwoEqualSequences(self):
"""
When two identical sequences are given, the result should
show that the sequences completely match.
"""
seq1 = Read('seq1', 'cgaatcg')
seq2 = Read('seq2', 'cgaatcg')
align = LocalAlignment(seq1, seq2)
result = align.createAlignment(resultFormat=str)
alignment = ('\nCigar string of aligned region: 7=\n'
'seq1 Match start: 1 Match end: 7\n'
'seq2 Match start: 1 Match end: 7\n'
'seq1 1 CGAATCG 7\n'
' |||||||\n'
'seq2 1 CGAATCG 7')
self.assertEqual(result, alignment)
def testTwoCompletelyDifferentSequences(self):
"""
When two completely different sequences are given, the result
should be the two sequences with an empty alignment.
"""
seq1 = Read('seq1', 'aaaaaa')
seq2 = Read('seq2', 'gggggg')
align = LocalAlignment(seq1, seq2)
result = align.createAlignment(resultFormat=str)
alignment = ('\nNo alignment between seq1 and seq2\n')
self.assertEqual(result, alignment)
def testWikiAnswer(self):
"""
Test the example given in Wikipedia:
http://en.wikipedia.org/wiki/Smith%E2%80%93Waterman_algorithm
"""
seq1 = Read('seq1', 'ACACACTA')
seq2 = Read('seq2', 'AGCACACA')
align = LocalAlignment(seq1, seq2, match=2)
result = align.createAlignment(resultFormat=str)
alignment = ('\nCigar string of aligned region: 1=1I5=1D1=\n'
'seq1 Match start: 1 Match end: 8\n'
'seq2 Match start: 1 Match end: 8\n'
'seq1 1 A-CACACTA 8\n'
' | ||||| |\n'
'seq2 1 AGCACAC-A 8')
self.assertEqual(result, alignment)
def testWikiAnswerWithMatchOne(self):
"""
Test the example given in Wikipedia
http://en.wikipedia.org/wiki/Smith%E2%80%93Waterman_algorithm
Wikipedia uses a match score of two, here we use a score of one.
"""
seq1 = Read('seq1', 'ACACACTA')
seq2 = Read('seq2', 'AGCACACA')
align = LocalAlignment(seq1, seq2, match=1)
result = align.createAlignment(resultFormat=str)
alignment = ('\nCigar string of aligned region: 5=1D1=\n'
'seq1 Match start: 2 Match end: 8\n'
'seq2 Match start: 3 Match end: 8\n'
'seq1 2 CACACTA 8\n'
' ||||| |\n'
'seq2 3 CACAC-A 8')
self.assertEqual(result, alignment)
def testWikiAnswerAsDict(self):
"""
Test the example given in Wikipedia:
http://en.wikipedia.org/wiki/Smith%E2%80%93Waterman_algorithm
with the return result being a dict.
"""
seq1 = Read('seq1', 'ACACACTA')
seq2 = Read('seq2', 'AGCACACA')
align = LocalAlignment(seq1, seq2, match=2)
result = align.createAlignment()
self.assertEqual(
{
'cigar': '1=1I5=1D1=',
'sequence1Start': 1,
'sequence1End': 8,
'sequence2Start': 1,
'sequence2End': 8,
'text': [
'seq1 1 A-CACACTA 8',
' | ||||| |',
'seq2 1 AGCACAC-A 8',
]
},
result
)
def testWikiAnswerWithMatchOneAsDict(self):
"""
Test the example given in Wikipedia
http://en.wikipedia.org/wiki/Smith%E2%80%93Waterman_algorithm
Wikipedia uses a match score of two, here we use a score of one.
Get the result as a dict.
"""
seq1 = Read('seq1', 'ACACACTA')
seq2 = Read('seq2', 'AGCACACA')
align = LocalAlignment(seq1, seq2, match=1)
result = align.createAlignment()
self.assertEqual(
{
'cigar': '5=1D1=',
'sequence1Start': 2,
'sequence1End': 8,
'sequence2Start': 3,
'sequence2End': 8,
'text': [
'seq1 2 CACACTA 8',
' ||||| |',
'seq2 3 CACAC-A 8',
]
},
result
)
| terrycojones/dark-matter | test/test_local_align.py | Python | mit | 11,532 | 0 |
# -*- coding: utf-8 -*-
from twisted.web.resource import Resource
from bouser.helpers.plugin_helpers import Dependency
__author__ = 'viruzzz-kun'
class SimarglResource(Resource):
web = Dependency('bouser.web')
es = Dependency('bouser.ezekiel.eventsource', optional=True)
rpc = Dependency('bouser.ezekiel.rest', optional=True)
@web.on
def web_on(self, web):
web.root_resource.putChild('ezekiel', self)
@es.on
def es_on(self, es):
self.putChild('es', es)
@rpc.on
def rpc_on(self, rpc):
self.putChild('rpc', rpc)
| hitsl/bouser_simargl | bouser_simargl/web.py | Python | isc | 577 | 0 |
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
TEMPLATE_DIRS = [
os.path.join(BASE_DIR, 'tests'),
os.path.join(BASE_DIR, 'tri_form/templates'),
]
TEMPLATE_DEBUG = True
# Django >=1.9
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': TEMPLATE_DIRS,
'APP_DIRS': True,
'OPTIONS': {
'debug': TEMPLATE_DEBUG,
}
}
]
SECRET_KEY = "foobar"
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'tri_form',
'tests'
]
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
| TriOptima/tri.form | tests/settings.py | Python | bsd-3-clause | 710 | 0 |
# Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
import pytest
from selenium.webdriver.common.by import By
class PageLoadingTests(unittest.TestCase):
def testShouldWaitForDocumentToBeLoaded(self):
self._loadSimplePage()
self.assertEqual(self.driver.title, "Hello WebDriver")
# Disabled till Java WebServer is used
#def testShouldFollowRedirectsSentInTheHttpResponseHeaders(self):
# self.driver.get(pages.redirectPage);
# self.assertEqual(self.driver.title, "We Arrive Here")
# Disabled till the Java WebServer is used
#def testShouldFollowMetaRedirects(self):
# self._loadPage("metaRedirect")
# self.assertEqual(self.driver.title, "We Arrive Here")
def testShouldBeAbleToGetAFragmentOnTheCurrentPage(self):
self._loadPage("xhtmlTest")
location = self.driver.current_url
self.driver.get(location + "#text")
self.driver.find_element(by=By.ID, value="id1")
@pytest.mark.ignore_safari
def testShouldReturnWhenGettingAUrlThatDoesNotResolve(self):
try:
# Of course, we're up the creek if this ever does get registered
self.driver.get("http://www.thisurldoesnotexist.comx/")
except ValueError:
pass
@pytest.mark.ignore_safari
def testShouldReturnWhenGettingAUrlThatDoesNotConnect(self):
# Here's hoping that there's nothing here. There shouldn't be
self.driver.get("http://localhost:3001")
#@Ignore({IE, IPHONE, SELENESE})
#def testShouldBeAbleToLoadAPageWithFramesetsAndWaitUntilAllFramesAreLoaded() {
# self.driver.get(pages.framesetPage);
# self.driver.switchTo().frame(0);
# WebElement pageNumber = self.driver.findElement(By.xpath("#span[@id='pageNumber']"));
# self.assertEqual((pageNumber.getText().trim(), equalTo("1"));
# self.driver.switchTo().defaultContent().switchTo().frame(1);
# pageNumber = self.driver.findElement(By.xpath("#span[@id='pageNumber']"));
# self.assertEqual((pageNumber.getText().trim(), equalTo("2"));
#Need to implement this decorator
#@NeedsFreshDriver
#def testSouldDoNothingIfThereIsNothingToGoBackTo() {
# String originalTitle = self.driver.getTitle();
# self.driver.get(pages.formPage);
# self.driver.back();
# We may have returned to the browser's home page
# self.assertEqual(self.driver.title, anyOf(equalTo(originalTitle), equalTo("We Leave From Here")));
def testShouldBeAbleToNavigateBackInTheBrowserHistory(self):
self._loadPage("formPage")
self.driver.find_element(by=By.ID, value="imageButton").submit()
self.assertEqual(self.driver.title, "We Arrive Here")
self.driver.back()
self.assertEqual(self.driver.title, "We Leave From Here")
def testShouldBeAbleToNavigateBackInTheBrowserHistoryInPresenceOfIframes(self):
self._loadPage("xhtmlTest")
self.driver.find_element(by=By.NAME,value="sameWindow").click()
self.assertEqual(self.driver.title, "This page has iframes")
self.driver.back()
self.assertEqual(self.driver.title, "XHTML Test Page")
def testShouldBeAbleToNavigateForwardsInTheBrowserHistory(self):
self._loadPage("formPage")
self.driver.find_element(by=By.ID, value="imageButton").submit()
self.assertEqual(self.driver.title, "We Arrive Here")
self.driver.back()
self.assertEqual(self.driver.title, "We Leave From Here")
self.driver.forward()
self.assertEqual(self.driver.title, "We Arrive Here")
@pytest.mark.ignore_ie
def testShouldNotHangifDocumentOpenCallIsNeverFollowedByDocumentCloseCall(self):
self._loadPage("document_write_in_onload")
self.driver.find_element(By.XPATH, "//body")
def testShouldBeAbleToRefreshAPage(self):
self._loadPage("xhtmlTest")
self.driver.refresh()
self.assertEqual(self.driver.title, "XHTML Test Page")
def _pageURL(self, name):
return self.webserver.where_is(name + '.html')
def _loadSimplePage(self):
self._loadPage("simpleTest")
def _loadPage(self, name):
self.driver.get(self._pageURL(name))
| jerome-jacob/selenium | py/test/selenium/webdriver/common/page_loading_tests.py | Python | apache-2.0 | 5,014 | 0.007579 |
import copy
import re
import sys
import tempfile
import unittest
from mock.tests.support import ALWAYS_EQ
from mock.tests.support import is_instance
from mock import (
call, DEFAULT, patch, sentinel,
MagicMock, Mock, NonCallableMock,
NonCallableMagicMock, AsyncMock,
create_autospec, mock
)
from mock.mock import _Call, _CallList
import mock.mock as mock_module
class Iter(object):
def __init__(self):
self.thing = iter(['this', 'is', 'an', 'iter'])
def __iter__(self):
return self
def next(self):
return next(self.thing)
__next__ = next
class Something(object):
def meth(self, a, b, c, d=None): pass
@classmethod
def cmeth(cls, a, b, c, d=None): pass
@staticmethod
def smeth(a, b, c, d=None): pass
def something(a): pass
class MockTest(unittest.TestCase):
def test_all(self):
# if __all__ is badly defined then import * will raise an error
# We have to exec it because you can't import * inside a method
# in Python 3
exec("from mock.mock import *")
def test_constructor(self):
mock = Mock()
self.assertFalse(mock.called, "called not initialised correctly")
self.assertEqual(mock.call_count, 0,
"call_count not initialised correctly")
self.assertTrue(is_instance(mock.return_value, Mock),
"return_value not initialised correctly")
self.assertEqual(mock.call_args, None,
"call_args not initialised correctly")
self.assertEqual(mock.call_args_list, [],
"call_args_list not initialised correctly")
self.assertEqual(mock.method_calls, [],
"method_calls not initialised correctly")
# Can't use hasattr for this test as it always returns True on a mock
self.assertNotIn('_items', mock.__dict__,
"default mock should not have '_items' attribute")
self.assertIsNone(mock._mock_parent,
"parent not initialised correctly")
self.assertIsNone(mock._mock_methods,
"methods not initialised correctly")
self.assertEqual(mock._mock_children, {},
"children not initialised incorrectly")
def test_return_value_in_constructor(self):
mock = Mock(return_value=None)
self.assertIsNone(mock.return_value,
"return value in constructor not honoured")
def test_change_return_value_via_delegate(self):
def f(): pass
mock = create_autospec(f)
mock.mock.return_value = 1
self.assertEqual(mock(), 1)
def test_change_side_effect_via_delegate(self):
def f(): pass
mock = create_autospec(f)
mock.mock.side_effect = TypeError()
with self.assertRaises(TypeError):
mock()
def test_repr(self):
mock = Mock(name='foo')
self.assertIn('foo', repr(mock))
self.assertIn("'%s'" % id(mock), repr(mock))
mocks = [(Mock(), 'mock'), (Mock(name='bar'), 'bar')]
for mock, name in mocks:
self.assertIn('%s.bar' % name, repr(mock.bar))
self.assertIn('%s.foo()' % name, repr(mock.foo()))
self.assertIn('%s.foo().bing' % name, repr(mock.foo().bing))
self.assertIn('%s()' % name, repr(mock()))
self.assertIn('%s()()' % name, repr(mock()()))
self.assertIn('%s()().foo.bar.baz().bing' % name,
repr(mock()().foo.bar.baz().bing))
def test_repr_with_spec(self):
class X(object):
pass
mock = Mock(spec=X)
self.assertIn(" spec='X' ", repr(mock))
mock = Mock(spec=X())
self.assertIn(" spec='X' ", repr(mock))
mock = Mock(spec_set=X)
self.assertIn(" spec_set='X' ", repr(mock))
mock = Mock(spec_set=X())
self.assertIn(" spec_set='X' ", repr(mock))
mock = Mock(spec=X, name='foo')
self.assertIn(" spec='X' ", repr(mock))
self.assertIn(" name='foo' ", repr(mock))
mock = Mock(name='foo')
self.assertNotIn("spec", repr(mock))
mock = Mock()
self.assertNotIn("spec", repr(mock))
mock = Mock(spec=['foo'])
self.assertNotIn("spec", repr(mock))
def test_side_effect(self):
mock = Mock()
def effect(*args, **kwargs):
raise SystemError('kablooie')
mock.side_effect = effect
self.assertRaises(SystemError, mock, 1, 2, fish=3)
mock.assert_called_with(1, 2, fish=3)
results = [1, 2, 3]
def effect():
return results.pop()
mock.side_effect = effect
self.assertEqual([mock(), mock(), mock()], [3, 2, 1],
"side effect not used correctly")
mock = Mock(side_effect=sentinel.SideEffect)
self.assertEqual(mock.side_effect, sentinel.SideEffect,
"side effect in constructor not used")
def side_effect():
return DEFAULT
mock = Mock(side_effect=side_effect, return_value=sentinel.RETURN)
self.assertEqual(mock(), sentinel.RETURN)
def test_autospec_side_effect(self):
# Test for issue17826
results = [1, 2, 3]
def effect():
return results.pop()
def f(): pass
mock = create_autospec(f)
mock.side_effect = [1, 2, 3]
self.assertEqual([mock(), mock(), mock()], [1, 2, 3],
"side effect not used correctly in create_autospec")
# Test where side effect is a callable
results = [1, 2, 3]
mock = create_autospec(f)
mock.side_effect = effect
self.assertEqual([mock(), mock(), mock()], [3, 2, 1],
"callable side effect not used correctly")
def test_autospec_side_effect_exception(self):
# Test for issue 23661
def f(): pass
mock = create_autospec(f)
mock.side_effect = ValueError('Bazinga!')
self.assertRaisesRegex(ValueError, 'Bazinga!', mock)
def test_reset_mock(self):
parent = Mock()
spec = ["something"]
mock = Mock(name="child", parent=parent, spec=spec)
mock(sentinel.Something, something=sentinel.SomethingElse)
something = mock.something
mock.something()
mock.side_effect = sentinel.SideEffect
return_value = mock.return_value
return_value()
mock.reset_mock()
self.assertEqual(mock._mock_name, "child",
"name incorrectly reset")
self.assertEqual(mock._mock_parent, parent,
"parent incorrectly reset")
self.assertEqual(mock._mock_methods, spec,
"methods incorrectly reset")
self.assertFalse(mock.called, "called not reset")
self.assertEqual(mock.call_count, 0, "call_count not reset")
self.assertEqual(mock.call_args, None, "call_args not reset")
self.assertEqual(mock.call_args_list, [], "call_args_list not reset")
self.assertEqual(mock.method_calls, [],
"method_calls not initialised correctly: %r != %r" %
(mock.method_calls, []))
self.assertEqual(mock.mock_calls, [])
self.assertEqual(mock.side_effect, sentinel.SideEffect,
"side_effect incorrectly reset")
self.assertEqual(mock.return_value, return_value,
"return_value incorrectly reset")
self.assertFalse(return_value.called, "return value mock not reset")
self.assertEqual(mock._mock_children, {'something': something},
"children reset incorrectly")
self.assertEqual(mock.something, something,
"children incorrectly cleared")
self.assertFalse(mock.something.called, "child not reset")
def test_reset_mock_recursion(self):
mock = Mock()
mock.return_value = mock
# used to cause recursion
mock.reset_mock()
def test_reset_mock_on_mock_open_issue_18622(self):
a = mock.mock_open()
a.reset_mock()
def test_call(self):
mock = Mock()
self.assertTrue(is_instance(mock.return_value, Mock),
"Default return_value should be a Mock")
result = mock()
self.assertEqual(mock(), result,
"different result from consecutive calls")
mock.reset_mock()
ret_val = mock(sentinel.Arg)
self.assertTrue(mock.called, "called not set")
self.assertEqual(mock.call_count, 1, "call_count incorrect")
self.assertEqual(mock.call_args, ((sentinel.Arg,), {}),
"call_args not set")
self.assertEqual(mock.call_args.args, (sentinel.Arg,),
"call_args not set")
self.assertEqual(mock.call_args.kwargs, {},
"call_args not set")
self.assertEqual(mock.call_args_list, [((sentinel.Arg,), {})],
"call_args_list not initialised correctly")
mock.return_value = sentinel.ReturnValue
ret_val = mock(sentinel.Arg, key=sentinel.KeyArg)
self.assertEqual(ret_val, sentinel.ReturnValue,
"incorrect return value")
self.assertEqual(mock.call_count, 2, "call_count incorrect")
self.assertEqual(mock.call_args,
((sentinel.Arg,), {'key': sentinel.KeyArg}),
"call_args not set")
self.assertEqual(mock.call_args_list, [
((sentinel.Arg,), {}),
((sentinel.Arg,), {'key': sentinel.KeyArg})
],
"call_args_list not set")
def test_call_args_comparison(self):
mock = Mock()
mock()
mock(sentinel.Arg)
mock(kw=sentinel.Kwarg)
mock(sentinel.Arg, kw=sentinel.Kwarg)
self.assertEqual(mock.call_args_list, [
(),
((sentinel.Arg,),),
({"kw": sentinel.Kwarg},),
((sentinel.Arg,), {"kw": sentinel.Kwarg})
])
self.assertEqual(mock.call_args,
((sentinel.Arg,), {"kw": sentinel.Kwarg}))
self.assertEqual(mock.call_args.args, (sentinel.Arg,))
self.assertEqual(mock.call_args.kwargs, {"kw": sentinel.Kwarg})
# Comparing call_args to a long sequence should not raise
# an exception. See issue 24857.
self.assertFalse(mock.call_args == "a long sequence")
def test_calls_equal_with_any(self):
# Check that equality and non-equality is consistent even when
# comparing with mock.ANY
mm = mock.MagicMock()
self.assertTrue(mm == mm)
self.assertFalse(mm != mm)
self.assertFalse(mm == mock.MagicMock())
self.assertTrue(mm != mock.MagicMock())
self.assertTrue(mm == mock.ANY)
self.assertFalse(mm != mock.ANY)
self.assertTrue(mock.ANY == mm)
self.assertFalse(mock.ANY != mm)
self.assertTrue(mm == ALWAYS_EQ)
self.assertFalse(mm != ALWAYS_EQ)
call1 = mock.call(mock.MagicMock())
call2 = mock.call(mock.ANY)
self.assertTrue(call1 == call2)
self.assertFalse(call1 != call2)
self.assertTrue(call2 == call1)
self.assertFalse(call2 != call1)
self.assertTrue(call1 == ALWAYS_EQ)
self.assertFalse(call1 != ALWAYS_EQ)
self.assertFalse(call1 == 1)
self.assertTrue(call1 != 1)
def test_assert_called_with(self):
mock = Mock()
mock()
# Will raise an exception if it fails
mock.assert_called_with()
self.assertRaises(AssertionError, mock.assert_called_with, 1)
mock.reset_mock()
self.assertRaises(AssertionError, mock.assert_called_with)
mock(1, 2, 3, a='fish', b='nothing')
mock.assert_called_with(1, 2, 3, a='fish', b='nothing')
def test_assert_called_with_any(self):
m = MagicMock()
m(MagicMock())
m.assert_called_with(mock.ANY)
def test_assert_called_with_function_spec(self):
def f(a, b, c, d=None): pass
mock = Mock(spec=f)
mock(1, b=2, c=3)
mock.assert_called_with(1, 2, 3)
mock.assert_called_with(a=1, b=2, c=3)
self.assertRaises(AssertionError, mock.assert_called_with,
1, b=3, c=2)
# Expected call doesn't match the spec's signature
with self.assertRaises(AssertionError) as cm:
mock.assert_called_with(e=8)
self.assertIsInstance(cm.exception.__cause__, TypeError)
def test_assert_called_with_method_spec(self):
def _check(mock):
mock(1, b=2, c=3)
mock.assert_called_with(1, 2, 3)
mock.assert_called_with(a=1, b=2, c=3)
self.assertRaises(AssertionError, mock.assert_called_with,
1, b=3, c=2)
mock = Mock(spec=Something().meth)
_check(mock)
mock = Mock(spec=Something.cmeth)
_check(mock)
mock = Mock(spec=Something().cmeth)
_check(mock)
mock = Mock(spec=Something.smeth)
_check(mock)
mock = Mock(spec=Something().smeth)
_check(mock)
def test_assert_called_exception_message(self):
msg = "Expected '{0}' to have been called"
with self.assertRaisesRegex(AssertionError, msg.format('mock')):
Mock().assert_called()
with self.assertRaisesRegex(AssertionError, msg.format('test_name')):
Mock(name="test_name").assert_called()
def test_assert_called_once_with(self):
mock = Mock()
mock()
# Will raise an exception if it fails
mock.assert_called_once_with()
mock()
self.assertRaises(AssertionError, mock.assert_called_once_with)
mock.reset_mock()
self.assertRaises(AssertionError, mock.assert_called_once_with)
mock('foo', 'bar', baz=2)
mock.assert_called_once_with('foo', 'bar', baz=2)
mock.reset_mock()
mock('foo', 'bar', baz=2)
self.assertRaises(
AssertionError,
lambda: mock.assert_called_once_with('bob', 'bar', baz=2)
)
def test_assert_called_once_with_call_list(self):
m = Mock()
m(1)
m(2)
self.assertRaisesRegex(AssertionError,
re.escape("Calls: [call(1), call(2)]"),
lambda: m.assert_called_once_with(2))
def test_assert_called_once_with_function_spec(self):
def f(a, b, c, d=None): pass
mock = Mock(spec=f)
mock(1, b=2, c=3)
mock.assert_called_once_with(1, 2, 3)
mock.assert_called_once_with(a=1, b=2, c=3)
self.assertRaises(AssertionError, mock.assert_called_once_with,
1, b=3, c=2)
# Expected call doesn't match the spec's signature
with self.assertRaises(AssertionError) as cm:
mock.assert_called_once_with(e=8)
self.assertIsInstance(cm.exception.__cause__, TypeError)
# Mock called more than once => always fails
mock(4, 5, 6)
self.assertRaises(AssertionError, mock.assert_called_once_with,
1, 2, 3)
self.assertRaises(AssertionError, mock.assert_called_once_with,
4, 5, 6)
def test_attribute_access_returns_mocks(self):
mock = Mock()
something = mock.something
self.assertTrue(is_instance(something, Mock), "attribute isn't a mock")
self.assertEqual(mock.something, something,
"different attributes returned for same name")
# Usage example
mock = Mock()
mock.something.return_value = 3
self.assertEqual(mock.something(), 3, "method returned wrong value")
self.assertTrue(mock.something.called,
"method didn't record being called")
def test_attributes_have_name_and_parent_set(self):
mock = Mock()
something = mock.something
self.assertEqual(something._mock_name, "something",
"attribute name not set correctly")
self.assertEqual(something._mock_parent, mock,
"attribute parent not set correctly")
def test_method_calls_recorded(self):
mock = Mock()
mock.something(3, fish=None)
mock.something_else.something(6, cake=sentinel.Cake)
self.assertEqual(mock.something_else.method_calls,
[("something", (6,), {'cake': sentinel.Cake})],
"method calls not recorded correctly")
self.assertEqual(mock.method_calls, [
("something", (3,), {'fish': None}),
("something_else.something", (6,), {'cake': sentinel.Cake})
],
"method calls not recorded correctly")
def test_method_calls_compare_easily(self):
mock = Mock()
mock.something()
self.assertEqual(mock.method_calls, [('something',)])
self.assertEqual(mock.method_calls, [('something', (), {})])
mock = Mock()
mock.something('different')
self.assertEqual(mock.method_calls, [('something', ('different',))])
self.assertEqual(mock.method_calls,
[('something', ('different',), {})])
mock = Mock()
mock.something(x=1)
self.assertEqual(mock.method_calls, [('something', {'x': 1})])
self.assertEqual(mock.method_calls, [('something', (), {'x': 1})])
mock = Mock()
mock.something('different', some='more')
self.assertEqual(mock.method_calls, [
('something', ('different',), {'some': 'more'})
])
def test_only_allowed_methods_exist(self):
for spec in ['something'], ('something',):
for arg in 'spec', 'spec_set':
mock = Mock(**{arg: spec})
# this should be allowed
mock.something
self.assertRaisesRegex(
AttributeError,
"Mock object has no attribute 'something_else'",
getattr, mock, 'something_else'
)
def test_from_spec(self):
class Something(object):
x = 3
__something__ = None
def y(self): pass
def test_attributes(mock):
# should work
mock.x
mock.y
mock.__something__
self.assertRaisesRegex(
AttributeError,
"Mock object has no attribute 'z'",
getattr, mock, 'z'
)
self.assertRaisesRegex(
AttributeError,
"Mock object has no attribute '__foobar__'",
getattr, mock, '__foobar__'
)
test_attributes(Mock(spec=Something))
test_attributes(Mock(spec=Something()))
def test_wraps_calls(self):
real = Mock()
mock = Mock(wraps=real)
self.assertEqual(mock(), real())
real.reset_mock()
mock(1, 2, fish=3)
real.assert_called_with(1, 2, fish=3)
def test_wraps_prevents_automatic_creation_of_mocks(self):
class Real(object):
pass
real = Real()
mock = Mock(wraps=real)
self.assertRaises(AttributeError, lambda: mock.new_attr())
def test_wraps_call_with_nondefault_return_value(self):
real = Mock()
mock = Mock(wraps=real)
mock.return_value = 3
self.assertEqual(mock(), 3)
self.assertFalse(real.called)
def test_wraps_attributes(self):
class Real(object):
attribute = Mock()
real = Real()
mock = Mock(wraps=real)
self.assertEqual(mock.attribute(), real.attribute())
self.assertRaises(AttributeError, lambda: mock.fish)
self.assertNotEqual(mock.attribute, real.attribute)
result = mock.attribute.frog(1, 2, fish=3)
Real.attribute.frog.assert_called_with(1, 2, fish=3)
self.assertEqual(result, Real.attribute.frog())
def test_customize_wrapped_object_with_side_effect_iterable_with_default(self):
class Real(object):
def method(self):
return sentinel.ORIGINAL_VALUE
real = Real()
mock = Mock(wraps=real)
mock.method.side_effect = [sentinel.VALUE1, DEFAULT]
self.assertEqual(mock.method(), sentinel.VALUE1)
self.assertEqual(mock.method(), sentinel.ORIGINAL_VALUE)
self.assertRaises(StopIteration, mock.method)
def test_customize_wrapped_object_with_side_effect_iterable(self):
class Real(object):
def method(self): pass
real = Real()
mock = Mock(wraps=real)
mock.method.side_effect = [sentinel.VALUE1, sentinel.VALUE2]
self.assertEqual(mock.method(), sentinel.VALUE1)
self.assertEqual(mock.method(), sentinel.VALUE2)
self.assertRaises(StopIteration, mock.method)
def test_customize_wrapped_object_with_side_effect_exception(self):
class Real(object):
def method(self): pass
real = Real()
mock = Mock(wraps=real)
mock.method.side_effect = RuntimeError
self.assertRaises(RuntimeError, mock.method)
def test_customize_wrapped_object_with_side_effect_function(self):
class Real(object):
def method(self): pass
def side_effect():
return sentinel.VALUE
real = Real()
mock = Mock(wraps=real)
mock.method.side_effect = side_effect
self.assertEqual(mock.method(), sentinel.VALUE)
def test_customize_wrapped_object_with_return_value(self):
class Real(object):
def method(self): pass
real = Real()
mock = Mock(wraps=real)
mock.method.return_value = sentinel.VALUE
self.assertEqual(mock.method(), sentinel.VALUE)
def test_customize_wrapped_object_with_return_value_and_side_effect(self):
# side_effect should always take precedence over return_value.
class Real(object):
def method(self): pass
real = Real()
mock = Mock(wraps=real)
mock.method.side_effect = [sentinel.VALUE1, sentinel.VALUE2]
mock.method.return_value = sentinel.WRONG_VALUE
self.assertEqual(mock.method(), sentinel.VALUE1)
self.assertEqual(mock.method(), sentinel.VALUE2)
self.assertRaises(StopIteration, mock.method)
def test_customize_wrapped_object_with_return_value_and_side_effect2(self):
# side_effect can return DEFAULT to default to return_value
class Real(object):
def method(self): pass
real = Real()
mock = Mock(wraps=real)
mock.method.side_effect = lambda: DEFAULT
mock.method.return_value = sentinel.VALUE
self.assertEqual(mock.method(), sentinel.VALUE)
def test_customize_wrapped_object_with_return_value_and_side_effect_default(self):
class Real(object):
def method(self): pass
real = Real()
mock = Mock(wraps=real)
mock.method.side_effect = [sentinel.VALUE1, DEFAULT]
mock.method.return_value = sentinel.RETURN
self.assertEqual(mock.method(), sentinel.VALUE1)
self.assertEqual(mock.method(), sentinel.RETURN)
self.assertRaises(StopIteration, mock.method)
def test_magic_method_wraps_dict(self):
# bpo-25597: MagicMock with wrap doesn't call wrapped object's
# method for magic methods with default values.
data = {'foo': 'bar'}
wrapped_dict = MagicMock(wraps=data)
self.assertEqual(wrapped_dict.get('foo'), 'bar')
# Accessing key gives a MagicMock
self.assertIsInstance(wrapped_dict['foo'], MagicMock)
# __contains__ method has a default value of False
self.assertFalse('foo' in wrapped_dict)
# return_value is non-sentinel and takes precedence over wrapped value.
wrapped_dict.get.return_value = 'return_value'
self.assertEqual(wrapped_dict.get('foo'), 'return_value')
# return_value is sentinel and hence wrapped value is returned.
wrapped_dict.get.return_value = sentinel.DEFAULT
self.assertEqual(wrapped_dict.get('foo'), 'bar')
self.assertEqual(wrapped_dict.get('baz'), None)
self.assertIsInstance(wrapped_dict['baz'], MagicMock)
self.assertFalse('bar' in wrapped_dict)
data['baz'] = 'spam'
self.assertEqual(wrapped_dict.get('baz'), 'spam')
self.assertIsInstance(wrapped_dict['baz'], MagicMock)
self.assertFalse('bar' in wrapped_dict)
del data['baz']
self.assertEqual(wrapped_dict.get('baz'), None)
def test_magic_method_wraps_class(self):
class Foo:
def __getitem__(self, index):
return index
def __custom_method__(self):
return "foo"
klass = MagicMock(wraps=Foo)
obj = klass()
self.assertEqual(obj.__getitem__(2), 2)
self.assertEqual(obj[2], 2)
self.assertEqual(obj.__custom_method__(), "foo")
def test_exceptional_side_effect(self):
mock = Mock(side_effect=AttributeError)
self.assertRaises(AttributeError, mock)
mock = Mock(side_effect=AttributeError('foo'))
self.assertRaises(AttributeError, mock)
def test_baseexceptional_side_effect(self):
mock = Mock(side_effect=KeyboardInterrupt)
self.assertRaises(KeyboardInterrupt, mock)
mock = Mock(side_effect=KeyboardInterrupt('foo'))
self.assertRaises(KeyboardInterrupt, mock)
def test_assert_called_with_message(self):
mock = Mock()
self.assertRaisesRegex(AssertionError, 'not called',
mock.assert_called_with)
def test_assert_called_once_with_message(self):
mock = Mock(name='geoffrey')
self.assertRaisesRegex(AssertionError,
r"Expected 'geoffrey' to be called once\.",
mock.assert_called_once_with)
def test__name__(self):
mock = Mock()
self.assertRaises(AttributeError, lambda: mock.__name__)
mock.__name__ = 'foo'
self.assertEqual(mock.__name__, 'foo')
def test_spec_list_subclass(self):
class Sub(list):
pass
mock = Mock(spec=Sub(['foo']))
mock.append(3)
mock.append.assert_called_with(3)
self.assertRaises(AttributeError, getattr, mock, 'foo')
def test_spec_class(self):
class X(object):
pass
mock = Mock(spec=X)
self.assertIsInstance(mock, X)
mock = Mock(spec=X())
self.assertIsInstance(mock, X)
self.assertIs(mock.__class__, X)
self.assertEqual(Mock().__class__.__name__, 'Mock')
mock = Mock(spec_set=X)
self.assertIsInstance(mock, X)
mock = Mock(spec_set=X())
self.assertIsInstance(mock, X)
def test_spec_class_no_object_base(self):
class X:
pass
mock = Mock(spec=X)
self.assertIsInstance(mock, X)
mock = Mock(spec=X())
self.assertIsInstance(mock, X)
self.assertIs(mock.__class__, X)
self.assertEqual(Mock().__class__.__name__, 'Mock')
mock = Mock(spec_set=X)
self.assertIsInstance(mock, X)
mock = Mock(spec_set=X())
self.assertIsInstance(mock, X)
def test_setting_attribute_with_spec_set(self):
class X(object):
y = 3
mock = Mock(spec=X)
mock.x = 'foo'
mock = Mock(spec_set=X)
def set_attr():
mock.x = 'foo'
mock.y = 'foo'
self.assertRaises(AttributeError, set_attr)
def test_copy(self):
current = sys.getrecursionlimit()
self.addCleanup(sys.setrecursionlimit, current)
# can't use sys.maxint as this doesn't exist in Python 3
sys.setrecursionlimit(int(10e8))
# this segfaults without the fix in place
copy.copy(Mock())
def test_subclass_with_properties(self):
class SubClass(Mock):
def _get(self):
return 3
def _set(self, value):
raise NameError('strange error')
some_attribute = property(_get, _set)
s = SubClass(spec_set=SubClass)
self.assertEqual(s.some_attribute, 3)
def test():
s.some_attribute = 3
self.assertRaises(NameError, test)
def test():
s.foo = 'bar'
self.assertRaises(AttributeError, test)
def test_setting_call(self):
mock = Mock()
def __call__(self, a):
self._increment_mock_call(a)
return self._mock_call(a)
type(mock).__call__ = __call__
mock('one')
mock.assert_called_with('one')
self.assertRaises(TypeError, mock, 'one', 'two')
def test_dir(self):
mock = Mock()
attrs = set(dir(mock))
type_attrs = set([m for m in dir(Mock) if not m.startswith('_')])
# all public attributes from the type are included
self.assertEqual(set(), type_attrs - attrs)
# creates these attributes
mock.a, mock.b
self.assertIn('a', dir(mock))
self.assertIn('b', dir(mock))
# instance attributes
mock.c = mock.d = None
self.assertIn('c', dir(mock))
self.assertIn('d', dir(mock))
# magic methods
mock.__iter__ = lambda s: iter([])
self.assertIn('__iter__', dir(mock))
def test_dir_from_spec(self):
mock = Mock(spec=unittest.TestCase)
testcase_attrs = set(dir(unittest.TestCase))
attrs = set(dir(mock))
# all attributes from the spec are included
self.assertEqual(set(), testcase_attrs - attrs)
# shadow a sys attribute
mock.version = 3
self.assertEqual(dir(mock).count('version'), 1)
def test_filter_dir(self):
patcher = patch.object(mock, 'FILTER_DIR', False)
patcher.start()
try:
attrs = set(dir(Mock()))
type_attrs = set(dir(Mock))
# ALL attributes from the type are included
self.assertEqual(set(), type_attrs - attrs)
finally:
patcher.stop()
def test_dir_does_not_include_deleted_attributes(self):
mock = Mock()
mock.child.return_value = 1
self.assertIn('child', dir(mock))
del mock.child
self.assertNotIn('child', dir(mock))
def test_configure_mock(self):
mock = Mock(foo='bar')
self.assertEqual(mock.foo, 'bar')
mock = MagicMock(foo='bar')
self.assertEqual(mock.foo, 'bar')
kwargs = {'side_effect': KeyError, 'foo.bar.return_value': 33,
'foo': MagicMock()}
mock = Mock(**kwargs)
self.assertRaises(KeyError, mock)
self.assertEqual(mock.foo.bar(), 33)
self.assertIsInstance(mock.foo, MagicMock)
mock = Mock()
mock.configure_mock(**kwargs)
self.assertRaises(KeyError, mock)
self.assertEqual(mock.foo.bar(), 33)
self.assertIsInstance(mock.foo, MagicMock)
def assertRaisesWithMsg(self, exception, message, func, *args, **kwargs):
# needed because assertRaisesRegex doesn't work easily with newlines
with self.assertRaises(exception) as context:
func(*args, **kwargs)
msg = str(context.exception)
self.assertEqual(msg, message)
def test_assert_called_with_failure_message(self):
mock = NonCallableMock()
actual = 'not called.'
expected = "mock(1, '2', 3, bar='foo')"
message = 'expected call not found.\nExpected: %s\nActual: %s'
self.assertRaisesWithMsg(
AssertionError, message % (expected, actual),
mock.assert_called_with, 1, '2', 3, bar='foo'
)
mock.foo(1, '2', 3, foo='foo')
asserters = [
mock.foo.assert_called_with, mock.foo.assert_called_once_with
]
for meth in asserters:
actual = "foo(1, '2', 3, foo='foo')"
expected = "foo(1, '2', 3, bar='foo')"
message = 'expected call not found.\nExpected: %s\nActual: %s'
self.assertRaisesWithMsg(
AssertionError, message % (expected, actual),
meth, 1, '2', 3, bar='foo'
)
# just kwargs
for meth in asserters:
actual = "foo(1, '2', 3, foo='foo')"
expected = "foo(bar='foo')"
message = 'expected call not found.\nExpected: %s\nActual: %s'
self.assertRaisesWithMsg(
AssertionError, message % (expected, actual),
meth, bar='foo'
)
# just args
for meth in asserters:
actual = "foo(1, '2', 3, foo='foo')"
expected = "foo(1, 2, 3)"
message = 'expected call not found.\nExpected: %s\nActual: %s'
self.assertRaisesWithMsg(
AssertionError, message % (expected, actual),
meth, 1, 2, 3
)
# empty
for meth in asserters:
actual = "foo(1, '2', 3, foo='foo')"
expected = "foo()"
message = 'expected call not found.\nExpected: %s\nActual: %s'
self.assertRaisesWithMsg(
AssertionError, message % (expected, actual), meth
)
def test_mock_calls(self):
mock = MagicMock()
# need to do this because MagicMock.mock_calls used to just return
# a MagicMock which also returned a MagicMock when __eq__ was called
self.assertIs(mock.mock_calls == [], True)
mock = MagicMock()
mock()
expected = [('', (), {})]
self.assertEqual(mock.mock_calls, expected)
mock.foo()
expected.append(call.foo())
self.assertEqual(mock.mock_calls, expected)
# intermediate mock_calls work too
self.assertEqual(mock.foo.mock_calls, [('', (), {})])
mock = MagicMock()
mock().foo(1, 2, 3, a=4, b=5)
expected = [
('', (), {}), ('().foo', (1, 2, 3), dict(a=4, b=5))
]
self.assertEqual(mock.mock_calls, expected)
self.assertEqual(mock.return_value.foo.mock_calls,
[('', (1, 2, 3), dict(a=4, b=5))])
self.assertEqual(mock.return_value.mock_calls,
[('foo', (1, 2, 3), dict(a=4, b=5))])
mock = MagicMock()
mock().foo.bar().baz()
expected = [
('', (), {}), ('().foo.bar', (), {}),
('().foo.bar().baz', (), {})
]
self.assertEqual(mock.mock_calls, expected)
self.assertEqual(mock().mock_calls,
call.foo.bar().baz().call_list())
for kwargs in dict(), dict(name='bar'):
mock = MagicMock(**kwargs)
int(mock.foo)
expected = [('foo.__int__', (), {})]
self.assertEqual(mock.mock_calls, expected)
mock = MagicMock(**kwargs)
mock.a()()
expected = [('a', (), {}), ('a()', (), {})]
self.assertEqual(mock.mock_calls, expected)
self.assertEqual(mock.a().mock_calls, [call()])
mock = MagicMock(**kwargs)
mock(1)(2)(3)
self.assertEqual(mock.mock_calls, call(1)(2)(3).call_list())
self.assertEqual(mock().mock_calls, call(2)(3).call_list())
self.assertEqual(mock()().mock_calls, call(3).call_list())
mock = MagicMock(**kwargs)
mock(1)(2)(3).a.b.c(4)
self.assertEqual(mock.mock_calls,
call(1)(2)(3).a.b.c(4).call_list())
self.assertEqual(mock().mock_calls,
call(2)(3).a.b.c(4).call_list())
self.assertEqual(mock()().mock_calls,
call(3).a.b.c(4).call_list())
mock = MagicMock(**kwargs)
int(mock().foo.bar().baz())
last_call = ('().foo.bar().baz().__int__', (), {})
self.assertEqual(mock.mock_calls[-1], last_call)
self.assertEqual(mock().mock_calls,
call.foo.bar().baz().__int__().call_list())
self.assertEqual(mock().foo.bar().mock_calls,
call.baz().__int__().call_list())
self.assertEqual(mock().foo.bar().baz.mock_calls,
call().__int__().call_list())
def test_child_mock_call_equal(self):
m = Mock()
result = m()
result.wibble()
# parent looks like this:
self.assertEqual(m.mock_calls, [call(), call().wibble()])
# but child should look like this:
self.assertEqual(result.mock_calls, [call.wibble()])
def test_mock_call_not_equal_leaf(self):
m = Mock()
m.foo().something()
self.assertNotEqual(m.mock_calls[1], call.foo().different())
self.assertEqual(m.mock_calls[0], call.foo())
def test_mock_call_not_equal_non_leaf(self):
m = Mock()
m.foo().bar()
self.assertNotEqual(m.mock_calls[1], call.baz().bar())
self.assertNotEqual(m.mock_calls[0], call.baz())
def test_mock_call_not_equal_non_leaf_params_different(self):
m = Mock()
m.foo(x=1).bar()
# This isn't ideal, but there's no way to fix it without breaking backwards compatibility:
self.assertEqual(m.mock_calls[1], call.foo(x=2).bar())
def test_mock_call_not_equal_non_leaf_attr(self):
m = Mock()
m.foo.bar()
self.assertNotEqual(m.mock_calls[0], call.baz.bar())
def test_mock_call_not_equal_non_leaf_call_versus_attr(self):
m = Mock()
m.foo.bar()
self.assertNotEqual(m.mock_calls[0], call.foo().bar())
def test_mock_call_repr(self):
m = Mock()
m.foo().bar().baz.bob()
self.assertEqual(repr(m.mock_calls[0]), 'call.foo()')
self.assertEqual(repr(m.mock_calls[1]), 'call.foo().bar()')
self.assertEqual(repr(m.mock_calls[2]), 'call.foo().bar().baz.bob()')
def test_mock_call_repr_loop(self):
m = Mock()
m.foo = m
repr(m.foo())
self.assertRegex(repr(m.foo()), r"<Mock name='mock\(\)' id='\d+'>")
def test_mock_calls_contains(self):
m = Mock()
self.assertFalse([call()] in m.mock_calls)
def test_subclassing(self):
class Subclass(Mock):
pass
mock = Subclass()
self.assertIsInstance(mock.foo, Subclass)
self.assertIsInstance(mock(), Subclass)
class Subclass(Mock):
def _get_child_mock(self, **kwargs):
return Mock(**kwargs)
mock = Subclass()
self.assertNotIsInstance(mock.foo, Subclass)
self.assertNotIsInstance(mock(), Subclass)
def test_arg_lists(self):
mocks = [
Mock(),
MagicMock(),
NonCallableMock(),
NonCallableMagicMock()
]
def assert_attrs(mock):
names = 'call_args_list', 'method_calls', 'mock_calls'
for name in names:
attr = getattr(mock, name)
self.assertIsInstance(attr, _CallList)
self.assertIsInstance(attr, list)
self.assertEqual(attr, [])
for mock in mocks:
assert_attrs(mock)
if callable(mock):
mock()
mock(1, 2)
mock(a=3)
mock.reset_mock()
assert_attrs(mock)
mock.foo()
mock.foo.bar(1, a=3)
mock.foo(1).bar().baz(3)
mock.reset_mock()
assert_attrs(mock)
def test_call_args_two_tuple(self):
mock = Mock()
mock(1, a=3)
mock(2, b=4)
self.assertEqual(len(mock.call_args), 2)
self.assertEqual(mock.call_args.args, (2,))
self.assertEqual(mock.call_args.kwargs, dict(b=4))
expected_list = [((1,), dict(a=3)), ((2,), dict(b=4))]
for expected, call_args in zip(expected_list, mock.call_args_list):
self.assertEqual(len(call_args), 2)
self.assertEqual(expected[0], call_args[0])
self.assertEqual(expected[1], call_args[1])
def test_side_effect_iterator(self):
mock = Mock(side_effect=iter([1, 2, 3]))
self.assertEqual([mock(), mock(), mock()], [1, 2, 3])
self.assertRaises(StopIteration, mock)
mock = MagicMock(side_effect=['a', 'b', 'c'])
self.assertEqual([mock(), mock(), mock()], ['a', 'b', 'c'])
self.assertRaises(StopIteration, mock)
mock = Mock(side_effect='ghi')
self.assertEqual([mock(), mock(), mock()], ['g', 'h', 'i'])
self.assertRaises(StopIteration, mock)
class Foo(object):
pass
mock = MagicMock(side_effect=Foo)
self.assertIsInstance(mock(), Foo)
mock = Mock(side_effect=Iter())
self.assertEqual([mock(), mock(), mock(), mock()],
['this', 'is', 'an', 'iter'])
self.assertRaises(StopIteration, mock)
def test_side_effect_iterator_exceptions(self):
for Klass in Mock, MagicMock:
iterable = (ValueError, 3, KeyError, 6)
m = Klass(side_effect=iterable)
self.assertRaises(ValueError, m)
self.assertEqual(m(), 3)
self.assertRaises(KeyError, m)
self.assertEqual(m(), 6)
def test_side_effect_setting_iterator(self):
mock = Mock()
mock.side_effect = iter([1, 2, 3])
self.assertEqual([mock(), mock(), mock()], [1, 2, 3])
self.assertRaises(StopIteration, mock)
side_effect = mock.side_effect
self.assertIsInstance(side_effect, type(iter([])))
mock.side_effect = ['a', 'b', 'c']
self.assertEqual([mock(), mock(), mock()], ['a', 'b', 'c'])
self.assertRaises(StopIteration, mock)
side_effect = mock.side_effect
self.assertIsInstance(side_effect, type(iter([])))
this_iter = Iter()
mock.side_effect = this_iter
self.assertEqual([mock(), mock(), mock(), mock()],
['this', 'is', 'an', 'iter'])
self.assertRaises(StopIteration, mock)
self.assertIs(mock.side_effect, this_iter)
def test_side_effect_iterator_default(self):
mock = Mock(return_value=2)
mock.side_effect = iter([1, DEFAULT])
self.assertEqual([mock(), mock()], [1, 2])
def test_assert_has_calls_any_order(self):
mock = Mock()
mock(1, 2)
mock(a=3)
mock(3, 4)
mock(b=6)
mock(b=6)
kalls = [
call(1, 2), ({'a': 3},),
((3, 4),), ((), {'a': 3}),
('', (1, 2)), ('', {'a': 3}),
('', (1, 2), {}), ('', (), {'a': 3})
]
for kall in kalls:
mock.assert_has_calls([kall], any_order=True)
for kall in call(1, '2'), call(b=3), call(), 3, None, 'foo':
self.assertRaises(
AssertionError, mock.assert_has_calls,
[kall], any_order=True
)
kall_lists = [
[call(1, 2), call(b=6)],
[call(3, 4), call(1, 2)],
[call(b=6), call(b=6)],
]
for kall_list in kall_lists:
mock.assert_has_calls(kall_list, any_order=True)
kall_lists = [
[call(b=6), call(b=6), call(b=6)],
[call(1, 2), call(1, 2)],
[call(3, 4), call(1, 2), call(5, 7)],
[call(b=6), call(3, 4), call(b=6), call(1, 2), call(b=6)],
]
for kall_list in kall_lists:
self.assertRaises(
AssertionError, mock.assert_has_calls,
kall_list, any_order=True
)
def test_assert_has_calls(self):
kalls1 = [
call(1, 2), ({'a': 3},),
((3, 4),), call(b=6),
('', (1,), {'b': 6}),
]
kalls2 = [call.foo(), call.bar(1)]
kalls2.extend(call.spam().baz(a=3).call_list())
kalls2.extend(call.bam(set(), foo={}).fish([1]).call_list())
mocks = []
for mock in Mock(), MagicMock():
mock(1, 2)
mock(a=3)
mock(3, 4)
mock(b=6)
mock(1, b=6)
mocks.append((mock, kalls1))
mock = Mock()
mock.foo()
mock.bar(1)
mock.spam().baz(a=3)
mock.bam(set(), foo={}).fish([1])
mocks.append((mock, kalls2))
for mock, kalls in mocks:
for i in range(len(kalls)):
for step in 1, 2, 3:
these = kalls[i:i+step]
mock.assert_has_calls(these)
if len(these) > 1:
self.assertRaises(
AssertionError,
mock.assert_has_calls,
list(reversed(these))
)
def test_assert_has_calls_nested_spec(self):
class Something:
def __init__(self): pass
def meth(self, a, b, c, d=None): pass
class Foo:
def __init__(self, a): pass
def meth1(self, a, b): pass
mock_class = create_autospec(Something)
for m in [mock_class, mock_class()]:
m.meth(1, 2, 3, d=1)
m.assert_has_calls([call.meth(1, 2, 3, d=1)])
m.assert_has_calls([call.meth(1, 2, 3, 1)])
mock_class.reset_mock()
for m in [mock_class, mock_class()]:
self.assertRaises(AssertionError, m.assert_has_calls, [call.Foo()])
m.Foo(1).meth1(1, 2)
m.assert_has_calls([call.Foo(1), call.Foo(1).meth1(1, 2)])
m.Foo.assert_has_calls([call(1), call().meth1(1, 2)])
mock_class.reset_mock()
invalid_calls = [call.meth(1),
call.non_existent(1),
call.Foo().non_existent(1),
call.Foo().meth(1, 2, 3, 4)]
for kall in invalid_calls:
self.assertRaises(AssertionError,
mock_class.assert_has_calls,
[kall]
)
def test_assert_has_calls_nested_without_spec(self):
m = MagicMock()
m().foo().bar().baz()
m.one().two().three()
calls = call.one().two().three().call_list()
m.assert_has_calls(calls)
def test_assert_has_calls_with_function_spec(self):
def f(a, b, c, d=None): pass
mock = Mock(spec=f)
mock(1, b=2, c=3)
mock(4, 5, c=6, d=7)
mock(10, 11, c=12)
calls = [
('', (1, 2, 3), {}),
('', (4, 5, 6), {'d': 7}),
((10, 11, 12), {}),
]
mock.assert_has_calls(calls)
mock.assert_has_calls(calls, any_order=True)
mock.assert_has_calls(calls[1:])
mock.assert_has_calls(calls[1:], any_order=True)
mock.assert_has_calls(calls[:-1])
mock.assert_has_calls(calls[:-1], any_order=True)
# Reversed order
calls = list(reversed(calls))
with self.assertRaises(AssertionError):
mock.assert_has_calls(calls)
mock.assert_has_calls(calls, any_order=True)
with self.assertRaises(AssertionError):
mock.assert_has_calls(calls[1:])
mock.assert_has_calls(calls[1:], any_order=True)
with self.assertRaises(AssertionError):
mock.assert_has_calls(calls[:-1])
mock.assert_has_calls(calls[:-1], any_order=True)
def test_assert_has_calls_not_matching_spec_error(self):
def f(x=None): pass
mock = Mock(spec=f)
mock(1)
with self.assertRaisesRegex(
AssertionError,
'^{}$'.format(
re.escape('Calls not found.\n'
'Expected: [call()]\n'
'Actual: [call(1)]'))) as cm:
mock.assert_has_calls([call()])
self.assertIsNone(cm.exception.__cause__)
with self.assertRaisesRegex(
AssertionError,
'^{}$'.format(
re.escape(
'Error processing expected calls.\n'
"Errors: [None, TypeError('too many positional arguments')]\n"
"Expected: [call(), call(1, 2)]\n"
'Actual: [call(1)]').replace(
"arguments\\'", "arguments\\',?"
))) as cm:
mock.assert_has_calls([call(), call(1, 2)])
self.assertIsInstance(cm.exception.__cause__, TypeError)
def test_assert_any_call(self):
mock = Mock()
mock(1, 2)
mock(a=3)
mock(1, b=6)
mock.assert_any_call(1, 2)
mock.assert_any_call(a=3)
mock.assert_any_call(1, b=6)
self.assertRaises(
AssertionError,
mock.assert_any_call
)
self.assertRaises(
AssertionError,
mock.assert_any_call,
1, 3
)
self.assertRaises(
AssertionError,
mock.assert_any_call,
a=4
)
def test_assert_any_call_with_function_spec(self):
def f(a, b, c, d=None): pass
mock = Mock(spec=f)
mock(1, b=2, c=3)
mock(4, 5, c=6, d=7)
mock.assert_any_call(1, 2, 3)
mock.assert_any_call(a=1, b=2, c=3)
mock.assert_any_call(4, 5, 6, 7)
mock.assert_any_call(a=4, b=5, c=6, d=7)
self.assertRaises(AssertionError, mock.assert_any_call,
1, b=3, c=2)
# Expected call doesn't match the spec's signature
with self.assertRaises(AssertionError) as cm:
mock.assert_any_call(e=8)
self.assertIsInstance(cm.exception.__cause__, TypeError)
def test_mock_calls_create_autospec(self):
def f(a, b): pass
obj = Iter()
obj.f = f
funcs = [
create_autospec(f),
create_autospec(obj).f
]
for func in funcs:
func(1, 2)
func(3, 4)
self.assertEqual(
func.mock_calls, [call(1, 2), call(3, 4)]
)
#Issue21222
def test_create_autospec_with_name(self):
m = mock.create_autospec(object(), name='sweet_func')
self.assertIn('sweet_func', repr(m))
#Issue23078
def test_create_autospec_classmethod_and_staticmethod(self):
class TestClass:
@classmethod
def class_method(cls): pass
@staticmethod
def static_method(): pass
for method in ('class_method', 'static_method'):
with self.subTest(method=method):
mock_method = mock.create_autospec(getattr(TestClass, method))
mock_method()
mock_method.assert_called_once_with()
self.assertRaises(TypeError, mock_method, 'extra_arg')
#Issue21238
def test_mock_unsafe(self):
m = Mock()
msg = "Attributes cannot start with 'assert' or its misspellings"
with self.assertRaisesRegex(AttributeError, msg):
m.assert_foo_call()
with self.assertRaisesRegex(AttributeError, msg):
m.assret_foo_call()
with self.assertRaisesRegex(AttributeError, msg):
m.asert_foo_call()
with self.assertRaisesRegex(AttributeError, msg):
m.aseert_foo_call()
with self.assertRaisesRegex(AttributeError, msg):
m.assrt_foo_call()
m = Mock(unsafe=True)
m.assert_foo_call()
m.assret_foo_call()
m.asert_foo_call()
m.aseert_foo_call()
m.assrt_foo_call()
#Issue21262
def test_assert_not_called(self):
m = Mock()
m.hello.assert_not_called()
m.hello()
with self.assertRaises(AssertionError):
m.hello.assert_not_called()
def test_assert_not_called_message(self):
m = Mock()
m(1, 2)
self.assertRaisesRegex(AssertionError,
re.escape("Calls: [call(1, 2)]"),
m.assert_not_called)
def test_assert_called(self):
m = Mock()
with self.assertRaises(AssertionError):
m.hello.assert_called()
m.hello()
m.hello.assert_called()
m.hello()
m.hello.assert_called()
def test_assert_called_once(self):
m = Mock()
with self.assertRaises(AssertionError):
m.hello.assert_called_once()
m.hello()
m.hello.assert_called_once()
m.hello()
with self.assertRaises(AssertionError):
m.hello.assert_called_once()
def test_assert_called_once_message(self):
m = Mock()
m(1, 2)
m(3)
self.assertRaisesRegex(AssertionError,
re.escape("Calls: [call(1, 2), call(3)]"),
m.assert_called_once)
def test_assert_called_once_message_not_called(self):
m = Mock()
with self.assertRaises(AssertionError) as e:
m.assert_called_once()
self.assertNotIn("Calls:", str(e.exception))
#Issue37212 printout of keyword args now preserves the original order
def test_ordered_call_signature(self):
m = Mock()
m.hello(name='hello', daddy='hero')
text = "call(name='hello', daddy='hero')"
self.assertEqual(repr(m.hello.call_args), text)
#Issue21270 overrides tuple methods for mock.call objects
def test_override_tuple_methods(self):
c = call.count()
i = call.index(132,'hello')
m = Mock()
m.count()
m.index(132,"hello")
self.assertEqual(m.method_calls[0], c)
self.assertEqual(m.method_calls[1], i)
def test_reset_return_sideeffect(self):
m = Mock(return_value=10, side_effect=[2,3])
m.reset_mock(return_value=True, side_effect=True)
self.assertIsInstance(m.return_value, Mock)
self.assertEqual(m.side_effect, None)
def test_reset_return(self):
m = Mock(return_value=10, side_effect=[2,3])
m.reset_mock(return_value=True)
self.assertIsInstance(m.return_value, Mock)
self.assertNotEqual(m.side_effect, None)
def test_reset_sideeffect(self):
m = Mock(return_value=10, side_effect=[2, 3])
m.reset_mock(side_effect=True)
self.assertEqual(m.return_value, 10)
self.assertEqual(m.side_effect, None)
def test_reset_return_with_children(self):
m = MagicMock(f=MagicMock(return_value=1))
self.assertEqual(m.f(), 1)
m.reset_mock(return_value=True)
self.assertNotEqual(m.f(), 1)
def test_reset_return_with_children_side_effect(self):
m = MagicMock(f=MagicMock(side_effect=[2, 3]))
self.assertNotEqual(m.f.side_effect, None)
m.reset_mock(side_effect=True)
self.assertEqual(m.f.side_effect, None)
def test_mock_add_spec(self):
class _One(object):
one = 1
class _Two(object):
two = 2
class Anything(object):
one = two = three = 'four'
klasses = [
Mock, MagicMock, NonCallableMock, NonCallableMagicMock
]
for Klass in list(klasses):
klasses.append(lambda K=Klass: K(spec=Anything))
klasses.append(lambda K=Klass: K(spec_set=Anything))
for Klass in klasses:
for kwargs in dict(), dict(spec_set=True):
mock = Klass()
#no error
mock.one, mock.two, mock.three
for One, Two in [(_One, _Two), (['one'], ['two'])]:
for kwargs in dict(), dict(spec_set=True):
mock.mock_add_spec(One, **kwargs)
mock.one
self.assertRaises(
AttributeError, getattr, mock, 'two'
)
self.assertRaises(
AttributeError, getattr, mock, 'three'
)
if 'spec_set' in kwargs:
self.assertRaises(
AttributeError, setattr, mock, 'three', None
)
mock.mock_add_spec(Two, **kwargs)
self.assertRaises(
AttributeError, getattr, mock, 'one'
)
mock.two
self.assertRaises(
AttributeError, getattr, mock, 'three'
)
if 'spec_set' in kwargs:
self.assertRaises(
AttributeError, setattr, mock, 'three', None
)
# note that creating a mock, setting an instance attribute, and
# *then* setting a spec doesn't work. Not the intended use case
def test_mock_add_spec_magic_methods(self):
for Klass in MagicMock, NonCallableMagicMock:
mock = Klass()
int(mock)
mock.mock_add_spec(object)
self.assertRaises(TypeError, int, mock)
mock = Klass()
mock['foo']
mock.__int__.return_value =4
mock.mock_add_spec(int)
self.assertEqual(int(mock), 4)
self.assertRaises(TypeError, lambda: mock['foo'])
def test_adding_child_mock(self):
for Klass in (NonCallableMock, Mock, MagicMock, NonCallableMagicMock,
AsyncMock):
mock = Klass()
mock.foo = Mock()
mock.foo()
self.assertEqual(mock.method_calls, [call.foo()])
self.assertEqual(mock.mock_calls, [call.foo()])
mock = Klass()
mock.bar = Mock(name='name')
mock.bar()
self.assertEqual(mock.method_calls, [])
self.assertEqual(mock.mock_calls, [])
# mock with an existing _new_parent but no name
mock = Klass()
mock.baz = MagicMock()()
mock.baz()
self.assertEqual(mock.method_calls, [])
self.assertEqual(mock.mock_calls, [])
def test_adding_return_value_mock(self):
for Klass in Mock, MagicMock:
mock = Klass()
mock.return_value = MagicMock()
mock()()
self.assertEqual(mock.mock_calls, [call(), call()()])
def test_manager_mock(self):
class Foo(object):
one = 'one'
two = 'two'
manager = Mock()
p1 = patch.object(Foo, 'one')
p2 = patch.object(Foo, 'two')
mock_one = p1.start()
self.addCleanup(p1.stop)
mock_two = p2.start()
self.addCleanup(p2.stop)
manager.attach_mock(mock_one, 'one')
manager.attach_mock(mock_two, 'two')
Foo.two()
Foo.one()
self.assertEqual(manager.mock_calls, [call.two(), call.one()])
def test_magic_methods_mock_calls(self):
for Klass in Mock, MagicMock:
m = Klass()
m.__int__ = Mock(return_value=3)
m.__float__ = MagicMock(return_value=3.0)
int(m)
float(m)
self.assertEqual(m.mock_calls, [call.__int__(), call.__float__()])
self.assertEqual(m.method_calls, [])
def test_mock_open_reuse_issue_21750(self):
mocked_open = mock.mock_open(read_data='data')
f1 = mocked_open('a-name')
f1_data = f1.read()
f2 = mocked_open('another-name')
f2_data = f2.read()
self.assertEqual(f1_data, f2_data)
def test_mock_open_dunder_iter_issue(self):
# Test dunder_iter method generates the expected result and
# consumes the iterator.
mocked_open = mock.mock_open(read_data='Remarkable\nNorwegian Blue')
f1 = mocked_open('a-name')
lines = [line for line in f1]
self.assertEqual(lines[0], 'Remarkable\n')
self.assertEqual(lines[1], 'Norwegian Blue')
self.assertEqual(list(f1), [])
def test_mock_open_using_next(self):
mocked_open = mock.mock_open(read_data='1st line\n2nd line\n3rd line')
f1 = mocked_open('a-name')
line1 = next(f1)
line2 = f1.__next__()
lines = [line for line in f1]
self.assertEqual(line1, '1st line\n')
self.assertEqual(line2, '2nd line\n')
self.assertEqual(lines[0], '3rd line')
self.assertEqual(list(f1), [])
with self.assertRaises(StopIteration):
next(f1)
def test_mock_open_next_with_readline_with_return_value(self):
mopen = mock.mock_open(read_data='foo\nbarn')
mopen.return_value.readline.return_value = 'abc'
self.assertEqual('abc', next(mopen()))
def test_mock_open_write(self):
# Test exception in file writing write()
mock_namedtemp = mock.mock_open(mock.MagicMock(name='JLV'))
with mock.patch('tempfile.NamedTemporaryFile', mock_namedtemp):
mock_filehandle = mock_namedtemp.return_value
mock_write = mock_filehandle.write
mock_write.side_effect = OSError('Test 2 Error')
def attempt():
tempfile.NamedTemporaryFile().write('asd')
self.assertRaises(OSError, attempt)
def test_mock_open_alter_readline(self):
mopen = mock.mock_open(read_data='foo\nbarn')
mopen.return_value.readline.side_effect = lambda *args:'abc'
first = mopen().readline()
second = mopen().readline()
self.assertEqual('abc', first)
self.assertEqual('abc', second)
def test_mock_open_after_eof(self):
# read, readline and readlines should work after end of file.
_open = mock.mock_open(read_data='foo')
h = _open('bar')
h.read()
self.assertEqual('', h.read())
self.assertEqual('', h.read())
self.assertEqual('', h.readline())
self.assertEqual('', h.readline())
self.assertEqual([], h.readlines())
self.assertEqual([], h.readlines())
def test_mock_parents(self):
for Klass in Mock, MagicMock:
m = Klass()
original_repr = repr(m)
m.return_value = m
self.assertIs(m(), m)
self.assertEqual(repr(m), original_repr)
m.reset_mock()
self.assertIs(m(), m)
self.assertEqual(repr(m), original_repr)
m = Klass()
m.b = m.a
self.assertIn("name='mock.a'", repr(m.b))
self.assertIn("name='mock.a'", repr(m.a))
m.reset_mock()
self.assertIn("name='mock.a'", repr(m.b))
self.assertIn("name='mock.a'", repr(m.a))
m = Klass()
original_repr = repr(m)
m.a = m()
m.a.return_value = m
self.assertEqual(repr(m), original_repr)
self.assertEqual(repr(m.a()), original_repr)
def test_attach_mock(self):
classes = Mock, MagicMock, NonCallableMagicMock, NonCallableMock
for Klass in classes:
for Klass2 in classes:
m = Klass()
m2 = Klass2(name='foo')
m.attach_mock(m2, 'bar')
self.assertIs(m.bar, m2)
self.assertIn("name='mock.bar'", repr(m2))
m.bar.baz(1)
self.assertEqual(m.mock_calls, [call.bar.baz(1)])
self.assertEqual(m.method_calls, [call.bar.baz(1)])
def test_attach_mock_return_value(self):
classes = Mock, MagicMock, NonCallableMagicMock, NonCallableMock
for Klass in Mock, MagicMock:
for Klass2 in classes:
m = Klass()
m2 = Klass2(name='foo')
m.attach_mock(m2, 'return_value')
self.assertIs(m(), m2)
self.assertIn("name='mock()'", repr(m2))
m2.foo()
self.assertEqual(m.mock_calls, call().foo().call_list())
def test_attach_mock_patch_autospec(self):
parent = Mock()
with mock.patch(f'{__name__}.something', autospec=True) as mock_func:
self.assertEqual(mock_func.mock._extract_mock_name(), 'something')
parent.attach_mock(mock_func, 'child')
parent.child(1)
something(2)
mock_func(3)
parent_calls = [call.child(1), call.child(2), call.child(3)]
child_calls = [call(1), call(2), call(3)]
self.assertEqual(parent.mock_calls, parent_calls)
self.assertEqual(parent.child.mock_calls, child_calls)
self.assertEqual(something.mock_calls, child_calls)
self.assertEqual(mock_func.mock_calls, child_calls)
self.assertIn('mock.child', repr(parent.child.mock))
self.assertEqual(mock_func.mock._extract_mock_name(), 'mock.child')
def test_attach_mock_patch_autospec_signature(self):
with mock.patch(f'{__name__}.Something.meth', autospec=True) as mocked:
manager = Mock()
manager.attach_mock(mocked, 'attach_meth')
obj = Something()
obj.meth(1, 2, 3, d=4)
manager.assert_has_calls([call.attach_meth(mock.ANY, 1, 2, 3, d=4)])
obj.meth.assert_has_calls([call(mock.ANY, 1, 2, 3, d=4)])
mocked.assert_has_calls([call(mock.ANY, 1, 2, 3, d=4)])
with mock.patch(f'{__name__}.something', autospec=True) as mocked:
manager = Mock()
manager.attach_mock(mocked, 'attach_func')
something(1)
manager.assert_has_calls([call.attach_func(1)])
something.assert_has_calls([call(1)])
mocked.assert_has_calls([call(1)])
with mock.patch(f'{__name__}.Something', autospec=True) as mocked:
manager = Mock()
manager.attach_mock(mocked, 'attach_obj')
obj = Something()
obj.meth(1, 2, 3, d=4)
manager.assert_has_calls([call.attach_obj(),
call.attach_obj().meth(1, 2, 3, d=4)])
obj.meth.assert_has_calls([call(1, 2, 3, d=4)])
mocked.assert_has_calls([call(), call().meth(1, 2, 3, d=4)])
def test_attribute_deletion(self):
for mock in (Mock(), MagicMock(), NonCallableMagicMock(),
NonCallableMock()):
self.assertTrue(hasattr(mock, 'm'))
del mock.m
self.assertFalse(hasattr(mock, 'm'))
del mock.f
self.assertFalse(hasattr(mock, 'f'))
self.assertRaises(AttributeError, getattr, mock, 'f')
def test_mock_does_not_raise_on_repeated_attribute_deletion(self):
# bpo-20239: Assigning and deleting twice an attribute raises.
for mock in (Mock(), MagicMock(), NonCallableMagicMock(),
NonCallableMock()):
mock.foo = 3
self.assertTrue(hasattr(mock, 'foo'))
self.assertEqual(mock.foo, 3)
del mock.foo
self.assertFalse(hasattr(mock, 'foo'))
mock.foo = 4
self.assertTrue(hasattr(mock, 'foo'))
self.assertEqual(mock.foo, 4)
del mock.foo
self.assertFalse(hasattr(mock, 'foo'))
def test_mock_raises_when_deleting_nonexistent_attribute(self):
for mock in (Mock(), MagicMock(), NonCallableMagicMock(),
NonCallableMock()):
del mock.foo
with self.assertRaises(AttributeError):
del mock.foo
def test_reset_mock_does_not_raise_on_attr_deletion(self):
# bpo-31177: reset_mock should not raise AttributeError when attributes
# were deleted in a mock instance
mock = Mock()
mock.child = True
del mock.child
mock.reset_mock()
self.assertFalse(hasattr(mock, 'child'))
def test_class_assignable(self):
for mock in Mock(), MagicMock():
self.assertNotIsInstance(mock, int)
mock.__class__ = int
self.assertIsInstance(mock, int)
mock.foo
def test_name_attribute_of_call(self):
# bpo-35357: _Call should not disclose any attributes whose names
# may clash with popular ones (such as ".name")
self.assertIsNotNone(call.name)
self.assertEqual(type(call.name), _Call)
self.assertEqual(type(call.name().name), _Call)
def test_parent_attribute_of_call(self):
# bpo-35357: _Call should not disclose any attributes whose names
# may clash with popular ones (such as ".parent")
self.assertIsNotNone(call.parent)
self.assertEqual(type(call.parent), _Call)
self.assertEqual(type(call.parent().parent), _Call)
def test_parent_propagation_with_create_autospec(self):
def foo(a, b): pass
mock = Mock()
mock.child = create_autospec(foo)
mock.child(1, 2)
self.assertRaises(TypeError, mock.child, 1)
self.assertEqual(mock.mock_calls, [call.child(1, 2)])
self.assertIn('mock.child', repr(mock.child.mock))
def test_parent_propagation_with_autospec_attach_mock(self):
def foo(a, b): pass
parent = Mock()
parent.attach_mock(create_autospec(foo, name='bar'), 'child')
parent.child(1, 2)
self.assertRaises(TypeError, parent.child, 1)
self.assertEqual(parent.child.mock_calls, [call.child(1, 2)])
self.assertIn('mock.child', repr(parent.child.mock))
def test_isinstance_under_settrace(self):
# bpo-36593 : __class__ is not set for a class that has __class__
# property defined when it's used with sys.settrace(trace) set.
# Delete the module to force reimport with tracing function set
# restore the old reference later since there are other tests that are
# dependent on unittest.mock.patch. In testpatch.PatchTest
# test_patch_dict_test_prefix and test_patch_test_prefix not restoring
# causes the objects patched to go out of sync
old_patch = mock_module.patch
# Directly using __setattr__ on unittest.mock causes current imported
# reference to be updated. Use a lambda so that during cleanup the
# re-imported new reference is updated.
self.addCleanup(lambda patch: setattr(mock_module, 'patch', patch),
old_patch)
with patch.dict('sys.modules'):
del sys.modules['mock']
# This trace will stop coverage being measured ;-)
def trace(frame, event, arg): # pragma: no cover
return trace
self.addCleanup(sys.settrace, sys.gettrace())
sys.settrace(trace)
from mock.mock import (
Mock, MagicMock, NonCallableMock, NonCallableMagicMock
)
mocks = [
Mock, MagicMock, NonCallableMock, NonCallableMagicMock, AsyncMock
]
for mock in mocks:
obj = mock(spec=Something)
self.assertIsInstance(obj, Something)
def test_bool_not_called_when_passing_spec_arg(self):
class Something:
def __init__(self):
self.obj_with_bool_func = mock_module.MagicMock()
obj = Something()
with mock_module.patch.object(obj, 'obj_with_bool_func', autospec=True): pass
self.assertEqual(obj.obj_with_bool_func.__bool__.call_count, 0)
if __name__ == '__main__':
unittest.main()
| testing-cabal/mock | mock/tests/testmock.py | Python | bsd-2-clause | 72,280 | 0.002048 |
#
# Copyright (c) 2014 ThoughtWorks, Inc.
#
# Pixelated is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Pixelated is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Pixelated. If not, see <http://www.gnu.org/licenses/>.
from twisted.trial import unittest
from mockito import mock, when, verify, any as ANY
from pixelated.adapter.listeners.mailbox_indexer_listener import MailboxIndexerListener
from twisted.internet import defer
from pixelated.adapter.listeners.mailbox_indexer_listener import logger
class MailboxListenerTest(unittest.TestCase):
def setUp(self):
self.mail_store = mock()
self.account = mock()
self.account.mailboxes = []
def test_add_itself_to_mailbox_listeners(self):
self.account.mailboxes = ['INBOX']
mailbox = mock()
when(self.account).get_collection_by_mailbox('INBOX').thenReturn(mailbox)
mailbox.listeners = set()
when(mailbox).addListener = lambda x: mailbox.listeners.add(x)
self.assertNotIn(MailboxIndexerListener('INBOX', self.mail_store, mock()), mailbox.listeners)
MailboxIndexerListener.listen(self.account, 'INBOX', self.mail_store, mock())
self.assertIn(MailboxIndexerListener('INBOX', self.mail_store, mock()), mailbox.listeners)
def test_reindex_missing_idents(self):
mail = mock()
search_engine = mock()
when(search_engine).search('tag:inbox', all_mails=True).thenReturn(['ident1', 'ident2'])
listener = MailboxIndexerListener('INBOX', self.mail_store, search_engine)
when(self.mail_store).get_mailbox_mail_ids('INBOX').thenReturn({'ident1', 'ident2', 'missing_ident'})
when(self.mail_store).get_mails({'missing_ident'}, include_body=True).thenReturn([mail])
listener.notify_new()
verify(self.mail_store, times=1).get_mails({'missing_ident'}, include_body=True)
verify(search_engine).index_mails([mail])
@defer.inlineCallbacks
def test_catches_exceptions_to_not_break_other_listeners(self):
when(logger).error(ANY()).thenReturn(None)
listener = MailboxIndexerListener('INBOX', self.mail_store, mock())
yield listener.notify_new()
verify(logger).error(ANY())
| pixelated-project/pixelated-user-agent | service/test/unit/adapter/test_mailbox_indexer_listener.py | Python | agpl-3.0 | 2,690 | 0.003717 |
from django.db import models, migrations
import uuid
from django.contrib.auth.hashers import make_password
PUBLIC_ID = 1
def apply_migration(apps, schema_editor):
Group = apps.get_model('auth', 'Group')
public_group = Group()
public_group.name = "public"
public_group.id = PUBLIC_ID
public_group.save()
def revert_migration(apps, schema_editor):
Group = apps.get_model('auth', 'Group')
Group.objects.filter(id=PUBLIC_ID).delete()
class Migration(migrations.Migration):
dependencies = [
('main', '0002_auto_20200821_0710'),
]
operations = [
migrations.RunPython(apply_migration, revert_migration)
] | kartta-labs/noter-backend | noter_backend/main/migrations/0003_create_public_group.py | Python | apache-2.0 | 664 | 0.003012 |
############################################################################
#
# Copyright (C) 2016 The Qt Company Ltd.
# Contact: https://www.qt.io/licensing/
#
# This file is part of Qt Creator.
#
# Commercial License Usage
# Licensees holding valid commercial Qt licenses may use this file in
# accordance with the commercial license agreement provided with the
# Software or, alternatively, in accordance with the terms contained in
# a written agreement between you and The Qt Company. For licensing terms
# and conditions see https://www.qt.io/terms-conditions. For further
# information use the contact form at https://www.qt.io/contact-us.
#
# GNU General Public License Usage
# Alternatively, this file may be used under the terms of the GNU
# General Public License version 3 as published by the Free Software
# Foundation with exceptions as appearing in the file LICENSE.GPL3-EXCEPT
# included in the packaging of this file. Please review the following
# information to ensure the GNU General Public License requirements will
# be met: https://www.gnu.org/licenses/gpl-3.0.html.
#
############################################################################
import os
import locale
import shutil
import subprocess
import sys
encoding = locale.getdefaultlocale()[1]
def is_windows_platform():
return sys.platform.startswith('win')
def is_linux_platform():
return sys.platform.startswith('linux')
def is_mac_platform():
return sys.platform.startswith('darwin')
# copy of shutil.copytree that does not bail out if the target directory already exists
# and that does not create empty directories
def copytree(src, dst, symlinks=False, ignore=None):
def ensure_dir(destdir, ensure):
if ensure and not os.path.isdir(destdir):
os.makedirs(destdir)
return False
names = os.listdir(src)
if ignore is not None:
ignored_names = ignore(src, names)
else:
ignored_names = set()
needs_ensure_dest_dir = True
errors = []
for name in names:
if name in ignored_names:
continue
srcname = os.path.join(src, name)
dstname = os.path.join(dst, name)
try:
if symlinks and os.path.islink(srcname):
needs_ensure_dest_dir = ensure_dir(dst, needs_ensure_dest_dir)
linkto = os.readlink(srcname)
os.symlink(linkto, dstname)
elif os.path.isdir(srcname):
copytree(srcname, dstname, symlinks, ignore)
else:
needs_ensure_dest_dir = ensure_dir(dst, needs_ensure_dest_dir)
shutil.copy2(srcname, dstname)
# XXX What about devices, sockets etc.?
except (IOError, os.error) as why:
errors.append((srcname, dstname, str(why)))
# catch the Error from the recursive copytree so that we can
# continue with other files
except shutil.Error as err:
errors.extend(err.args[0])
try:
if os.path.exists(dst):
shutil.copystat(src, dst)
except shutil.WindowsError:
# can't copy file access times on Windows
pass
except OSError as why:
errors.extend((src, dst, str(why)))
if errors:
raise shutil.Error(errors)
def get_qt_install_info(qmake_bin):
output = subprocess.check_output([qmake_bin, '-query'])
lines = output.decode(encoding).strip().split('\n')
info = {}
for line in lines:
(var, sep, value) = line.partition(':')
info[var.strip()] = value.strip()
return info
def get_rpath(libfilepath, chrpath=None):
if chrpath is None:
chrpath = 'chrpath'
try:
output = subprocess.check_output([chrpath, '-l', libfilepath]).strip()
except subprocess.CalledProcessError: # no RPATH or RUNPATH
return []
marker = 'RPATH='
index = output.decode(encoding).find(marker)
if index < 0:
marker = 'RUNPATH='
index = output.find(marker)
if index < 0:
return []
return output[index + len(marker):].split(':')
def fix_rpaths(path, qt_deploy_path, qt_install_info, chrpath=None):
if chrpath is None:
chrpath = 'chrpath'
qt_install_prefix = qt_install_info['QT_INSTALL_PREFIX']
qt_install_libs = qt_install_info['QT_INSTALL_LIBS']
def fix_rpaths_helper(filepath):
rpath = get_rpath(filepath, chrpath)
if len(rpath) <= 0:
return
# remove previous Qt RPATH
new_rpath = filter(lambda path: not path.startswith(qt_install_prefix) and not path.startswith(qt_install_libs),
rpath)
# check for Qt linking
lddOutput = subprocess.check_output(['ldd', filepath])
if lddOutput.decode(encoding).find('libQt5') >= 0 or lddOutput.find('libicu') >= 0:
# add Qt RPATH if necessary
relative_path = os.path.relpath(qt_deploy_path, os.path.dirname(filepath))
if relative_path == '.':
relative_path = ''
else:
relative_path = '/' + relative_path
qt_rpath = '$ORIGIN' + relative_path
if not any((path == qt_rpath) for path in rpath):
new_rpath.append(qt_rpath)
# change RPATH
if len(new_rpath) > 0:
subprocess.check_call([chrpath, '-r', ':'.join(new_rpath), filepath])
else: # no RPATH / RUNPATH left. delete.
subprocess.check_call([chrpath, '-d', filepath])
def is_unix_executable(filepath):
# Whether a file is really a binary executable and not a script and not a symlink (unix only)
if os.path.exists(filepath) and os.access(filepath, os.X_OK) and not os.path.islink(filepath):
with open(filepath) as f:
return f.read(2) != "#!"
def is_unix_library(filepath):
# Whether a file is really a library and not a symlink (unix only)
return os.path.basename(filepath).find('.so') != -1 and not os.path.islink(filepath)
for dirpath, dirnames, filenames in os.walk(path):
for filename in filenames:
filepath = os.path.join(dirpath, filename)
if is_unix_executable(filepath) or is_unix_library(filepath):
fix_rpaths_helper(filepath)
def is_debug_file(filepath):
if is_mac_platform():
return filepath.endswith('.dSYM') or '.dSYM/' in filepath
elif is_linux_platform():
return filepath.endswith('.debug')
else:
return filepath.endswith('.pdb')
def is_debug(path, filenames):
return [fn for fn in filenames if is_debug_file(os.path.join(path, fn))]
def is_not_debug(path, filenames):
files = [fn for fn in filenames if os.path.isfile(os.path.join(path, fn))]
return [fn for fn in files if not is_debug_file(os.path.join(path, fn))]
def codesign(app_path):
signing_identity = os.environ.get('SIGNING_IDENTITY')
if is_mac_platform() and signing_identity:
codesign_call = ['codesign', '--force', '--deep', '-s', signing_identity, '-v']
signing_flags = os.environ.get('SIGNING_FLAGS')
if signing_flags:
codesign_call.extend(signing_flags.split())
codesign_call.append(app_path)
subprocess.check_call(codesign_call)
| sailfish-sdk/sailfish-qtcreator | scripts/common.py | Python | gpl-3.0 | 7,269 | 0.003027 |
"""
class to test test_simple_function.py
follows the standards in https://docs.python.org/2/library/unittest.html
"""
import unittest
from LOTlib.Examples.SymbolicRegression.old.test_simple_function import *
class test_simple_functionTest(unittest.TestCase):
# initialization that happens before each test is carried out
def setUp(self):
pass
# function that is executed after each test is carried out
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
| ebigelow/LOTlib | LOTlib/Testing/old/Examples/SymbolicRegression/old/test_simple_functionTest.py | Python | gpl-3.0 | 522 | 0.007663 |
from django.core.management.base import BaseCommand, CommandError
try:
from django.contrib.auth import get_user_model # Django 1.5
except ImportError:
from django_extensions.future_1_5 import get_user_model
from django.contrib.sessions.models import Session
import re
SESSION_RE = re.compile("^[0-9a-f]{20,40}$")
class Command(BaseCommand):
help = ("print the user information for the provided session key. "
"this is very helpful when trying to track down the person who "
"experienced a site crash.")
args = "session_key"
label = 'session key for the user'
requires_model_validation = True
can_import_settings = True
def handle(self, *args, **options):
if len(args) > 1:
raise CommandError("extra arguments supplied")
if len(args) < 1:
raise CommandError("session_key argument missing")
key = args[0].lower()
if not SESSION_RE.match(key):
raise CommandError("malformed session key")
try:
session = Session.objects.get(pk=key)
except Session.DoesNotExist:
print("Session Key does not exist. Expired?")
return
data = session.get_decoded()
print('Session to Expire: %s' % session.expire_date)
print('Raw Data: %s' % data)
uid = data.get('_auth_user_id', None)
if uid is None:
print('No user associated with session')
return
print("User id: %s" % uid)
User = get_user_model()
try:
user = User.objects.get(pk=uid)
except User.DoesNotExist:
print("No user associated with that id.")
return
for key in ['username', 'email', 'first_name', 'last_name']:
print("%s: %s" % (key, getattr(user, key)))
| shash/IconDB | django_extensions/management/commands/print_user_for_session.py | Python | agpl-3.0 | 1,826 | 0 |
from django import forms
from django.contrib.auth.models import User
from .models import Perfil,SolicitudColaboracion
class SolicitudColaboracionForm(forms.ModelForm):
class Meta:
model = SolicitudColaboracion
fields = ('name','licenciatura_leyes','telefono','fecha_nacimiento')
| SurielRuano/Orientador-Legal | colaboradores/forms.py | Python | mit | 287 | 0.031359 |
#!/usr/bin/env python
import glob
import copy
import cv2
import cv_bridge
import rospy
from sensor_msgs.msg import Image
from std_msgs.msg import Int32, Float32, String
import rospkg
class Hear_orders:
def __init__(self):
self.speech_subscriber = rospy.Subscriber("/speech_recognition", String, self.publish_emotion)
self.emotion_publisher = rospy.Publisher("/emotion", String, queue_size=10)
# self.timer = rospy.Timer(rospy.Duration(self.velocity), self.timer_cb)
def publish_emotion(self, data):
self.emotion_publisher.publish("heard_an_order")
def main():
rospy.init_node('hearing_node', anonymous=True)
rate = rospy.Rate(30)
rospack = rospkg.RosPack()
# path = rospack.get_path('baxter_face_animation') + "/data/"
Hear_orders()
while not rospy.is_shutdown():
rate.sleep()
if __name__ == "__main__":
main()
| UCRoboticsLab/BaxterTictactoe | src/baxter_face_animation/src/baxter_face_animation/hear_words.py | Python | apache-2.0 | 926 | 0.009719 |
__author__ = 'rcj1492'
__created__ = '2016.10'
__license__ = 'MIT'
def retrieve_service_name(service_root):
service_name = ''
# construct registry client
from os import path
from pocketlab import __module__
from labpack.storage.appdata import appdataClient
registry_client = appdataClient(collection_name='Registry Data', prod_name=__module__)
# walk registry for
from labpack.records.settings import load_settings
for file_path in registry_client.localhost.walk(registry_client.collection_folder):
try:
details = load_settings(file_path)
if details['service_root'] == path.abspath(service_root):
service_name = details['service_name']
break
except:
pass
return service_name
def retrieve_service_root(service_name, command_context=''):
# construct registry client
from os import path
from pocketlab import __module__
from labpack.storage.appdata import appdataClient
registry_client = appdataClient(collection_name='Registry Data', prod_name=__module__)
# validate service name exists in registry
file_name = '%s.yaml' % service_name
filter_function = registry_client.conditional_filter([{0:{'discrete_values':[file_name]}}])
service_list = registry_client.list(filter_function=filter_function)
if not file_name in service_list:
error_msg = '"%s" not found in the registry.' % service_name
if command_context:
error_msg += ' %s' % command_context
raise ValueError(error_msg)
# retrieve root path to service
import yaml
service_data = registry_client.load(file_name)
service_details = yaml.full_load(service_data.decode())
if not 'service_root' in service_details.keys():
error_msg = 'Record for project "%s" has been corrupted.' % service_name
if command_context:
error_msg += ' %s' % command_context
raise ValueError(error_msg)
service_root = service_details['service_root']
if not path.exists(service_root):
error_msg = 'Path %s to project "%s" no longer exists.' % (service_root, service_name)
if command_context:
error_msg += ' %s' % command_context
raise ValueError(error_msg)
return service_root
def retrieve_services(service_list=None, all=False):
'''
a method to generate the root path for one or more services
:param service_list: list of strings with name of services
:param all: boolean to indicate the retrieve all paths in registry
:return: list of dictionaries, string with exit message insert
'''
# define default returns
path_list = []
msg_insert = 'local service'
# add named service to service list
if service_list:
from labpack.parsing.grammar import join_words
word_list = []
for service in service_list:
service_root = retrieve_service_root(service)
service_details = {
'name': service,
'path': service_root
}
path_list.append(service_details)
word_list.append('"%s"' % service)
msg_insert = join_words(word_list)
# add all services in registry to service list
elif all:
msg_insert = 'all services'
from pocketlab import __module__
from labpack.storage.appdata import appdataClient
registry_client = appdataClient(collection_name='Registry Data', prod_name=__module__)
from labpack.records.settings import load_settings
for file_path in registry_client.localhost.walk(registry_client.collection_folder):
try:
details = load_settings(file_path)
service_details = {
'name': details['service_name'],
'path': details['service_root']
}
path_list.append(service_details)
except:
pass
# add local path to service list
else:
path_list.append({'name': '', 'path': './'})
return path_list, msg_insert
def retrieve_service_config(service_root, service_name, command_title):
from os import path
from pocketlab.methods.validation import validate_compose
from pocketlab import __module__
from jsonmodel.loader import jsonLoader
from jsonmodel.validators import jsonModel
compose_schema = jsonLoader(__module__, 'models/compose-config.json')
service_schema = jsonLoader(__module__, 'models/service-config.json')
compose_model = jsonModel(compose_schema)
service_model = jsonModel(service_schema)
compose_path = path.join(service_root, 'docker-compose.yaml')
compose_details = validate_compose(compose_model, service_model, compose_path, service_name)
service_config = {}
if service_name:
service_config = compose_details['services'][service_name]
elif len(compose_details['services'].keys()) > 1:
raise ValueError('docker-compose.yaml file in working directory contains more than one service.\nTry: lab %s [SERVICE]' % command_title)
else:
for key, value in compose_details['services'].items():
service_config = value
service_name = key
break
return service_config, service_name
def compile_services(registry_only=False):
# construct registry client
from pocketlab import __module__
from labpack.storage.appdata import appdataClient
registry_client = appdataClient(collection_name='Registry Data', prod_name=__module__)
# walk registry to compile list of services
service_list = []
path_list = []
from labpack.records.settings import load_settings
for file_path in registry_client.localhost.walk(registry_client.collection_folder):
try:
details = load_settings(file_path)
service_list.append({
'name': details['service_name'],
'path': details['service_root']
})
path_list.append(details['service_root'])
except:
pass
# add current directory
if not registry_only:
from os import path
current_path = path.abspath('./')
if current_path not in path_list:
try:
file_path = path.join(current_path, 'docker-compose.yaml')
from pocketlab.methods.validation import validate_compose
from jsonmodel.loader import jsonLoader
from jsonmodel.validators import jsonModel
compose_model = jsonModel(jsonLoader(__module__, 'models/compose-config.json'))
service_model = jsonModel(jsonLoader(__module__, 'models/service-config.json'))
compose_details = validate_compose(compose_model, service_model, file_path, '')
if len(compose_details['services'].keys()) == 1:
for key in compose_details['services'].keys():
service_list.append({
'name': key,
'path': current_path
})
except:
pass
return service_list
def compile_ports(service_config):
service_ports = []
# validate ports are available
if 'ports' in service_config.keys():
for i in range(len(service_config['ports'])):
port_string = service_config['ports'][i]
port_split = port_string.split(':')
sys_port = port_split[0]
range_split = sys_port.split('-')
port_start = range_split[0]
port_end = ''
if len(range_split) > 1:
port_end = range_split[1]
if not port_end:
service_ports.append(int(port_start))
else:
for j in range(int(port_start),int(port_end) + 1):
service_ports.append(j)
return service_ports
if __name__ == '__main__':
lab_root = retrieve_service_root('lab')
lab_name = retrieve_service_name(lab_root)
assert lab_name == 'lab'
from pprint import pprint
pprint(compile_services()) | collectiveacuity/pocketLab | pocketlab/methods/service.py | Python | mit | 8,378 | 0.005132 |
from __future__ import division
import numpy as np
# Non-monotonic Sobol G Function (8 parameters)
# First-order indices:
# x1: 0.7165
# x2: 0.1791
# x3: 0.0237
# x4: 0.0072
# x5-x8: 0.0001
def evaluate(values, a=None):
if type(values) != np.ndarray:
raise TypeError("The argument `values` must be a numpy ndarray")
if a is None:
a = [0, 1, 4.5, 9, 99, 99, 99, 99]
ltz = values < 0
gto = values > 1
if ltz.any() == True:
raise ValueError("Sobol G function called with values less than zero")
elif gto.any() == True:
raise ValueError("Sobol G function called with values greater than one")
Y = np.ones([values.shape[0]])
len_a = len(a)
for i, row in enumerate(values):
for j in range(len_a):
x = row[j]
a_j = a[j]
Y[i] *= (np.abs(4 * x - 2) + a_j) / (1 + a_j)
return Y
def partial_first_order_variance(a=None):
if a is None:
a = [0, 1, 4.5, 9, 99, 99, 99, 99]
a = np.array(a)
return np.divide(1, np.multiply(3, np.square(1 + a)))
def total_variance(a=None):
if a is None:
a = [0, 1, 4.5, 9, 99, 99, 99, 99]
a = np.array(a)
return np.add(-1, np.product(1 + partial_first_order_variance(a), axis=0))
def sensitivity_index(a):
a = np.array(a)
return np.divide(partial_first_order_variance(a), total_variance(a))
def total_sensitivity_index(a):
a = np.array(a)
pv = partial_first_order_variance(a)
tv = total_variance(a)
sum_pv = pv.sum(axis=0)
return np.subtract(1, np.divide(np.subtract(sum_pv, pv.T), tv))
| willu47/SALib | src/SALib/test_functions/Sobol_G.py | Python | mit | 1,685 | 0.003561 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2019, Jianfeng Chen <jchen37@ncsu.edu>
# vim: set ts=4 sts=4 sw=4 expandtab smartindent:
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, _distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import division
from deap import algorithms
from deap import tools
from deap.tools import emo
from deap.tools.emo import sortNondominated
from sklearn.cluster import KMeans
from sklearn.tree import DecisionTreeRegressor
from sklearn.linear_model import LinearRegression, LogisticRegression
from sklearn.neighbors import KNeighborsRegressor
from sklearn.neighbors.nearest_centroid import NearestCentroid
from sklearn.model_selection import train_test_split
from mpl_toolkits import mplot3d
from matplotlib.pyplot import figure
from matplotlib.ticker import PercentFormatter
import matplotlib.ticker as mtick
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import time
import sys
import os
import random
import random
import pdb
def _emo_sortNondominated_idx(pop, first_front_only=False):
fronts = emo.sortNondominated(
pop, len(pop), first_front_only=first_front_only)
return [[pop.index(i) for i in f] for f in fronts]
def random_pop(model, N):
pop = list()
for _ in range(N):
pop.append(
model.Individual([random.random() for _ in range(model.decsNum)]))
return pop
def action_expr(model):
startat = time.time()
samples = random_pop(model, 100)
for p in samples:
model.eval(p, normalized=False)
print("100 init pop evaluated.")
for round_ in range(10):
samples.extend(random_pop(model, 20))
for p in samples[-20:]:
model.eval(p, normalized=False)
D = pd.DataFrame(data=samples, columns=model.decs)
O = pd.DataFrame(data=list(map(lambda i: i.fitness.values, samples)))
front_idx = _emo_sortNondominated_idx(
samples, first_front_only=True)[0]
next_pop = list()
for fi in front_idx:
dist_order = (D - D.loc[fi]).abs().pow(2).sum(
axis=1).sort_values().index[1:int(len(samples) * 0.1) +
1] # fetch the top 10% of samples
dD, dO = list(), list()
for i in dist_order:
for j in dist_order:
if i == j: continue
dD.append(D.iloc[i] - D.iloc[j])
dO.append(O.iloc[i] - O.iloc[j])
dD = pd.DataFrame(dD, index=range(len(dD)))
dO = pd.DataFrame(dO, index=range(len(dO)))
assert not (dO.std() < 0).any()
regr = list()
for oi, obj in enumerate(dO.columns):
regr_tmp = KNeighborsRegressor(n_neighbors=4).fit(dD, dO[obj])
regr.append(regr_tmp)
mut_dD = list()
for _ in range(D.shape[1] * 2):
mut_dD.append(D.loc[fi] * np.random.normal(0, 0.5, D.shape[1]))
mut_dD = pd.DataFrame(mut_dD, index=range(len(mut_dD)))
mut_dO = pd.DataFrame(columns=dO.columns)
for oi, obj in enumerate(mut_dO.columns):
mut_dO[obj] = regr[oi].predict(mut_dD)
filtered = (mut_dO < -1 * mut_dO.std()).any(axis=1)
new_decs = D.loc[fi] + mut_dD[filtered]
print('new eval = ', str(new_decs.shape[0]))
for nd in new_decs.index:
candidate = model.Individual(new_decs.loc[nd])
model.eval(candidate, normalized=False)
next_pop.append(candidate)
samples.extend(emo.sortNondominated(next_pop, len(next_pop), True)[0])
print(f'Round {round_} done. Sample size = {len(samples)}')
return emo.sortNondominated(
samples, len(samples), first_front_only=True)[0]
def action_expr2(model):
startat = time.time()
samples = random_pop(model, 100)
for p in samples:
model.eval(p, normalized=False)
print("100 init pop evaluated.")
for round_ in range(10):
samples.extend(random_pop(model, 20))
for p in samples[-20:]:
model.eval(p, normalized=False)
D = pd.DataFrame(data=samples, columns=model.decs)
O = pd.DataFrame(data=list(map(lambda i: i.fitness.values, samples)))
front_idx = _emo_sortNondominated_idx(
samples, first_front_only=True)[0]
next_pop = list()
for fi in front_idx:
dist_order = (D - D.loc[fi]).abs().pow(2).sum(
axis=1).sort_values().index[1:int(len(samples) * 0.1) +
1] # fetch the top 10% of samples
dD, dO = list(), list()
for i in dist_order:
for j in dist_order:
if i == j: continue
dD.append(D.iloc[i] - D.iloc[j])
dO.append(O.iloc[i] - O.iloc[j])
dD = pd.DataFrame(dD, index=range(len(dD)))
dO = pd.DataFrame(dO, index=range(len(dO)))
assert not (dO.std() < 0).any()
regr = list()
for oi, obj in enumerate(dO.columns):
regr_tmp = KNeighborsRegressor(n_neighbors=4).fit(dD, dO[obj])
regr.append(regr_tmp)
mut_dD = list()
for _ in range(D.shape[1] * 2):
mut_dD.append(D.loc[fi] * np.random.normal(0, 0.5, D.shape[1]))
mut_dD = pd.DataFrame(mut_dD, index=range(len(mut_dD)))
mut_dO = pd.DataFrame(columns=dO.columns)
for oi, obj in enumerate(mut_dO.columns):
mut_dO[obj] = regr[oi].predict(mut_dD)
filtered = (mut_dO < -1 * mut_dO.std()).any(axis=1)
new_decs = D.loc[fi] + mut_dD[filtered]
print('new eval = ', str(new_decs.shape[0]))
for nd in new_decs.index:
candidate = model.Individual(new_decs.loc[nd])
candidate.fitness.values = O.loc[fi] + mut_dO.loc[nd]
next_pop.append(candidate)
tmp_pf = emo.sortNondominated(next_pop, len(next_pop), True)[0]
for p in tmp_pf:
model.eval(p, normalized=False)
samples.extend(tmp_pf)
print(f'Round {round_} done. Sample size = {len(samples)}')
return emo.sortNondominated(
samples, len(samples), first_front_only=True)[0] | Ginfung/FSSE | Algorithms/WORTHY.py | Python | mit | 7,356 | 0.000952 |
import unittest
from numpy import arange, linspace
from numpy.random import seed
from src.bases.root import Root
from src.examples.example_setups import setup_stat_scm
from src.utils.sem_utils.toy_sems import StationaryDependentSEM as StatSEM
from src.utils.sequential_intervention_functions import get_interventional_grids
from src.utils.sequential_sampling import sequentially_sample_model
from src.utils.utilities import convert_to_dict_of_temporal_lists, powerset
seed(seed=0)
class TestRoot(unittest.TestCase):
# Do NOT change the setUp method -- setUp is reserved by unittest.
def setUp(self):
# Use STAT DAG to test Root class
self.T = 3 # Time-steps in DAG
self.n = 4 # Number of observational samples per variable per time-step
self.N = 5 # Number of trials per time-step for method
(
self.init_sem,
self.sem,
_,
self.G,
self.exploration_sets,
self.intervention_domain,
self.true_objective_values,
) = setup_stat_scm(T=self.T)
# Sample observational data using SEM
D_O = sequentially_sample_model(
self.init_sem, self.sem, total_timesteps=self.T, sample_count=self.n, epsilon=None,
)
root_inputs = {
"G": self.G,
"sem": StatSEM,
"base_target_variable": "Y",
"observation_samples": D_O, # Observational samples
"intervention_domain": self.intervention_domain,
"number_of_trials": self.N,
}
self.root = Root(**root_inputs)
def test_setup_STAT_function(self):
self.assertEqual(self.exploration_sets, [("X",), ("Z",), ("X", "Z")])
self.assertEqual(self.intervention_domain, {"X": [-4, 1], "Z": [-3, 3]})
self.assertAlmostEqual(
self.true_objective_values, [-2.1518267393287287, -4.303653478657457, -6.455480217986186], places=7
)
self.assertEqual(self.init_sem.keys(), self.sem.keys())
def test_root_methods(self):
self.assertEqual(
self.root.node_pars,
{
"X_0": (),
"Z_0": ("X_0",),
"Y_0": ("Z_0",),
"X_1": ("X_0",),
"Z_1": ("Z_0", "X_1"),
"Y_1": ("Y_0", "Z_1"),
"X_2": ("X_1",),
"Z_2": ("Z_1", "X_2"),
"Y_2": ("Y_1", "Z_2"),
},
)
self.assertEqual(self.root.outcome_values, {0: [10000000.0], 1: [10000000.0], 2: [10000000.0]})
self.assertEqual(
self.root.sorted_nodes,
{"X_0": 0, "Z_0": 1, "X_1": 2, "Y_0": 3, "Z_1": 4, "X_2": 5, "Y_1": 6, "Z_2": 7, "Y_2": 8},
)
self.assertEqual(self.root.interventional_variable_limits, {"X": [-4, 1], "Z": [-3, 3]})
# If we do not pass any exploration set, then by default the Root class will assign all manipulative variables as the intervention set.
self.assertEqual(self.root.exploration_sets, [("X", "Z")])
self.assertEqual(
self.root.interventional_data_y, {0: {("X", "Z"): None}, 1: {("X", "Z"): None}, 2: {("X", "Z"): None}}
)
self.assertEqual(
self.root.interventional_data_x, {0: {("X", "Z"): None}, 1: {("X", "Z"): None}, 2: {("X", "Z"): None}}
)
def test_dict_to_list_conversion_of_observational_samples(self):
observational_samples = {
"X": arange(0, 9).reshape(3, -1),
"Y": arange(3, 12).reshape(3, -1),
"Z": arange(6, 15).reshape(3, -1),
}
out = convert_to_dict_of_temporal_lists(observational_samples)
self.assertEqual(len(out["X"]), 3)
self.assertEqual(len(out["Z"][0]), 3)
self.assertEqual(sum([len(out["Y"][t]) for t in range(3)]), 9)
def test_interventional_grids(self):
nr_samples = 10
interventional_variable_limits = {"X": [-15, 3], "Z": [-1, 10]}
exploration_sets = list(powerset(self.root.manipulative_variables))
grids = get_interventional_grids(exploration_sets, interventional_variable_limits, nr_samples)
compare_vector = linspace(
interventional_variable_limits["X"][0], interventional_variable_limits["X"][1], num=nr_samples
).reshape(-1, 1)
self.assertEqual(compare_vector.shape, grids[exploration_sets[0]].shape)
self.assertTrue((compare_vector == grids[exploration_sets[0]]).all())
def test_target_variables(self):
self.assertEqual(self.root.all_target_variables, ["Y_0", "Y_1", "Y_2"])
def test_canonical_variables(self):
self.assertEqual(self.root.observational_samples.keys(), {"X", "Y", "Z"})
def test_number_of_nodes_per_time_slice(self):
# Number of nodes per time-slice
v_n = len(self.root.G.nodes()) / self.root.G.T
nodes = list(self.root.G.nodes())
self.assertEqual(v_n, 3)
for t in range(self.G.T):
self.assertEquak(len([v for v in nodes if v.split("_")[1] == str(t)]), v_n)
if __name__ == "__main__":
unittest.main()
| neildhir/DCBO | tests/test_root.py | Python | mit | 5,136 | 0.003118 |
from ..config import GlobalConfig, LocalConfig
from kao_command.args import FlagArg
class ProjectArg(FlagArg):
""" Represents an CLI Argument that specifies a Project """
def __init__(self, *, help):
""" Initialize the Arg """
FlagArg.__init__(self, '-p', '--project', action="store", help=help)
def getValue(self, args):
""" Return the value from the args """
projectName = FlagArg.getValue(self, args)
return GlobalConfig.connection.projects.withName(projectName).first | cloew/TogglDriver | toggl_driver/args/project_arg.py | Python | mit | 547 | 0.007313 |
# -*- coding: utf-8 -*-
#=======================================================================
#
# Python Lexical Analyser
#
# Converting NFA to DFA
#
#=======================================================================
import Machines
from Machines import LOWEST_PRIORITY
from Transitions import TransitionMap
def nfa_to_dfa(old_machine, debug = None):
"""
Given a nondeterministic Machine, return a new equivalent
Machine which is deterministic.
"""
# We build a new machine whose states correspond to sets of states
# in the old machine. Initially we add a new state corresponding to
# the epsilon-closure of each initial old state. Then we give transitions
# to each new state which are the union of all transitions out of any
# of the corresponding old states. The new state reached on a given
# character is the one corresponding to the set of states reachable
# on that character from any of the old states. As new combinations of
# old states are created, new states are added as needed until closure
# is reached.
new_machine = Machines.FastMachine()
state_map = StateMap(new_machine)
# Seed the process using the initial states of the old machine.
# Make the corresponding new states into initial states of the new
# machine with the same names.
for (key, old_state) in old_machine.initial_states.items():
new_state = state_map.old_to_new(epsilon_closure(old_state))
new_machine.make_initial_state(key, new_state)
# Tricky bit here: we add things to the end of this list while we're
# iterating over it. The iteration stops when closure is achieved.
for new_state in new_machine.states:
transitions = TransitionMap()
for old_state in state_map.new_to_old(new_state).keys():
for event, old_target_states in old_state.transitions.items():
if event and old_target_states:
transitions.add_set(event, set_epsilon_closure(old_target_states))
for event, old_states in transitions.items():
new_machine.add_transitions(new_state, event, state_map.old_to_new(old_states))
if debug:
debug.write("\n===== State Mapping =====\n")
state_map.dump(debug)
return new_machine
def set_epsilon_closure(state_set):
"""
Given a set of states, return the union of the epsilon
closures of its member states.
"""
result = {}
for state1 in state_set.keys():
for state2 in epsilon_closure(state1).keys():
result[state2] = 1
return result
def epsilon_closure(state):
"""
Return the set of states reachable from the given state
by epsilon moves.
"""
# Cache the result
result = state.epsilon_closure
if result is None:
result = {}
state.epsilon_closure = result
add_to_epsilon_closure(result, state)
return result
def add_to_epsilon_closure(state_set, state):
"""
Recursively add to |state_set| states reachable from the given state
by epsilon moves.
"""
if not state_set.get(state, 0):
state_set[state] = 1
state_set_2 = state.transitions.get_epsilon()
if state_set_2:
for state2 in state_set_2.keys():
add_to_epsilon_closure(state_set, state2)
class StateMap:
"""
Helper class used by nfa_to_dfa() to map back and forth between
sets of states from the old machine and states of the new machine.
"""
new_machine = None # Machine
old_to_new_dict = None # {(old_state,...) : new_state}
new_to_old_dict = None # {id(new_state) : old_state_set}
def __init__(self, new_machine):
self.new_machine = new_machine
self.old_to_new_dict = {}
self.new_to_old_dict= {}
def old_to_new(self, old_state_set):
"""
Return the state of the new machine corresponding to the
set of old machine states represented by |state_set|. A new
state will be created if necessary. If any of the old states
are accepting states, the new state will be an accepting state
with the highest priority action from the old states.
"""
key = self.make_key(old_state_set)
new_state = self.old_to_new_dict.get(key, None)
if not new_state:
action = self.highest_priority_action(old_state_set)
new_state = self.new_machine.new_state(action)
self.old_to_new_dict[key] = new_state
self.new_to_old_dict[id(new_state)] = old_state_set
#for old_state in old_state_set.keys():
#new_state.merge_actions(old_state)
return new_state
def highest_priority_action(self, state_set):
best_action = None
best_priority = LOWEST_PRIORITY
for state in state_set.keys():
priority = state.action_priority
if priority > best_priority:
best_action = state.action
best_priority = priority
return best_action
# def old_to_new_set(self, old_state_set):
# """
# Return the new state corresponding to a set of old states as
# a singleton set.
# """
# return {self.old_to_new(old_state_set):1}
def new_to_old(self, new_state):
"""Given a new state, return a set of corresponding old states."""
return self.new_to_old_dict[id(new_state)]
def make_key(self, state_set):
"""
Convert a set of states into a uniquified
sorted tuple suitable for use as a dictionary key.
"""
lst = state_set.keys()
lst.sort()
return tuple(lst)
def dump(self, file):
from Transitions import state_set_str
for new_state in self.new_machine.states:
old_state_set = self.new_to_old_dict[id(new_state)]
file.write(" State %s <-- %s\n" % (
new_state['number'], state_set_str(old_state_set)))
| unioslo/cerebrum | Cerebrum/extlib/Plex/DFA.py | Python | gpl-2.0 | 5,539 | 0.015165 |
#!/usr/bin/env python3
# Copyright (C) 2016 Freie Universität Berlin
#
# This file is subject to the terms and conditions of the GNU Lesser
# General Public License v2.1. See the file LICENSE in the top level
# directory for more details.
import os
import sys
def testfunc(child):
child.expect(r"TRACE_SIZE: (\d+)")
trace_size = int(child.match.group(1))
for i in range(trace_size):
child.expect("0x[0-9a-f]{7,8}")
print("All tests successful")
if __name__ == "__main__":
sys.path.append(os.path.join(os.environ['RIOTTOOLS'], 'testrunner'))
import testrunner
sys.exit(testrunner.run(testfunc, timeout=1, echo=True, traceback=True))
| MichelRottleuthner/RIOT | tests/trace/tests/01-run.py | Python | lgpl-2.1 | 676 | 0 |
#!/usr/bin/env python
import os
from pyaxo import Axolotl
# start with a fresh database
try:
os.remove('./alice.db')
os.remove('./bob.db')
except OSError:
pass
# unencrypted databases
a = Axolotl('alice', dbname='alice.db', dbpassphrase=None)
b = Axolotl('bob', dbname='bob.db', dbpassphrase=None)
a.initState('bob', b.state['DHIs'], b.handshakePKey,
b.state['DHRs'], verify=False)
b.initState('alice', a.state['DHIs'], a.handshakePKey,
a.state['DHRs'], verify=False)
a.saveState()
b.saveState()
| ghtdak/pyaxo | examples/create_states.py | Python | gpl-3.0 | 539 | 0 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2018-04-22 07:21
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('voting', '0004_winner_annoucement'),
]
operations = [
migrations.AddField(
model_name='sacyear',
name='alahsa_results_datetime',
field=models.DateTimeField(blank=True, null=True, verbose_name='\u062a\u0627\u0631\u064a\u062e \u0625\u0639\u0644\u0627\u0646 \u0627\u0644\u0646\u062a\u0627\u0626\u062c \u0641\u064a \u0627\u0644\u0623\u062d\u0633\u0627\u0621'),
),
migrations.AddField(
model_name='sacyear',
name='jeddah_results_datetime',
field=models.DateTimeField(blank=True, null=True, verbose_name='\u062a\u0627\u0631\u064a\u062e \u0625\u0639\u0644\u0627\u0646 \u0627\u0644\u0646\u062a\u0627\u0626\u062c \u0641\u064a \u062c\u062f\u0629'),
),
migrations.AddField(
model_name='sacyear',
name='riyadh_results_datetime',
field=models.DateTimeField(blank=True, null=True, verbose_name='\u062a\u0627\u0631\u064a\u062e \u0625\u0639\u0644\u0627\u0646 \u0627\u0644\u0646\u062a\u0627\u0626\u062c \u0641\u064a \u0627\u0644\u0631\u064a\u0627\u0636'),
),
]
| SAlkhairy/trabd | voting/migrations/0005_add_is_city_results_due_for_SACYear.py | Python | agpl-3.0 | 1,341 | 0.002237 |
from __future__ import division
import numpy as np
import pytest
from pandas import Interval, Timedelta, Timestamp
import pandas.core.common as com
@pytest.fixture
def interval():
return Interval(0, 1)
class TestInterval(object):
def test_properties(self, interval):
assert interval.closed == 'right'
assert interval.left == 0
assert interval.right == 1
assert interval.mid == 0.5
def test_repr(self, interval):
assert repr(interval) == "Interval(0, 1, closed='right')"
assert str(interval) == "(0, 1]"
interval_left = Interval(0, 1, closed='left')
assert repr(interval_left) == "Interval(0, 1, closed='left')"
assert str(interval_left) == "[0, 1)"
def test_contains(self, interval):
assert 0.5 in interval
assert 1 in interval
assert 0 not in interval
msg = "__contains__ not defined for two intervals"
with pytest.raises(TypeError, match=msg):
interval in interval
interval_both = Interval(0, 1, closed='both')
assert 0 in interval_both
assert 1 in interval_both
interval_neither = Interval(0, 1, closed='neither')
assert 0 not in interval_neither
assert 0.5 in interval_neither
assert 1 not in interval_neither
def test_equal(self):
assert Interval(0, 1) == Interval(0, 1, closed='right')
assert Interval(0, 1) != Interval(0, 1, closed='left')
assert Interval(0, 1) != 0
def test_comparison(self):
with pytest.raises(TypeError, match='unorderable types'):
Interval(0, 1) < 2
assert Interval(0, 1) < Interval(1, 2)
assert Interval(0, 1) < Interval(0, 2)
assert Interval(0, 1) < Interval(0.5, 1.5)
assert Interval(0, 1) <= Interval(0, 1)
assert Interval(0, 1) > Interval(-1, 2)
assert Interval(0, 1) >= Interval(0, 1)
def test_hash(self, interval):
# should not raise
hash(interval)
@pytest.mark.parametrize('left, right, expected', [
(0, 5, 5),
(-2, 5.5, 7.5),
(10, 10, 0),
(10, np.inf, np.inf),
(-np.inf, -5, np.inf),
(-np.inf, np.inf, np.inf),
(Timedelta('0 days'), Timedelta('5 days'), Timedelta('5 days')),
(Timedelta('10 days'), Timedelta('10 days'), Timedelta('0 days')),
(Timedelta('1H10M'), Timedelta('5H5M'), Timedelta('3H55M')),
(Timedelta('5S'), Timedelta('1H'), Timedelta('59M55S'))])
def test_length(self, left, right, expected):
# GH 18789
iv = Interval(left, right)
result = iv.length
assert result == expected
@pytest.mark.parametrize('left, right, expected', [
('2017-01-01', '2017-01-06', '5 days'),
('2017-01-01', '2017-01-01 12:00:00', '12 hours'),
('2017-01-01 12:00', '2017-01-01 12:00:00', '0 days'),
('2017-01-01 12:01', '2017-01-05 17:31:00', '4 days 5 hours 30 min')])
@pytest.mark.parametrize('tz', (None, 'UTC', 'CET', 'US/Eastern'))
def test_length_timestamp(self, tz, left, right, expected):
# GH 18789
iv = Interval(Timestamp(left, tz=tz), Timestamp(right, tz=tz))
result = iv.length
expected = Timedelta(expected)
assert result == expected
@pytest.mark.parametrize('left, right', [
('a', 'z'),
(('a', 'b'), ('c', 'd')),
(list('AB'), list('ab')),
(Interval(0, 1), Interval(1, 2))])
def test_length_errors(self, left, right):
# GH 18789
iv = Interval(left, right)
msg = 'cannot compute length between .* and .*'
with pytest.raises(TypeError, match=msg):
iv.length
def test_math_add(self, closed):
interval = Interval(0, 1, closed=closed)
expected = Interval(1, 2, closed=closed)
result = interval + 1
assert result == expected
result = 1 + interval
assert result == expected
result = interval
result += 1
assert result == expected
msg = r"unsupported operand type\(s\) for \+"
with pytest.raises(TypeError, match=msg):
interval + interval
with pytest.raises(TypeError, match=msg):
interval + 'foo'
def test_math_sub(self, closed):
interval = Interval(0, 1, closed=closed)
expected = Interval(-1, 0, closed=closed)
result = interval - 1
assert result == expected
result = interval
result -= 1
assert result == expected
msg = r"unsupported operand type\(s\) for -"
with pytest.raises(TypeError, match=msg):
interval - interval
with pytest.raises(TypeError, match=msg):
interval - 'foo'
def test_math_mult(self, closed):
interval = Interval(0, 1, closed=closed)
expected = Interval(0, 2, closed=closed)
result = interval * 2
assert result == expected
result = 2 * interval
assert result == expected
result = interval
result *= 2
assert result == expected
msg = r"unsupported operand type\(s\) for \*"
with pytest.raises(TypeError, match=msg):
interval * interval
msg = r"can\'t multiply sequence by non-int"
with pytest.raises(TypeError, match=msg):
interval * 'foo'
def test_math_div(self, closed):
interval = Interval(0, 1, closed=closed)
expected = Interval(0, 0.5, closed=closed)
result = interval / 2.0
assert result == expected
result = interval
result /= 2.0
assert result == expected
msg = r"unsupported operand type\(s\) for /"
with pytest.raises(TypeError, match=msg):
interval / interval
with pytest.raises(TypeError, match=msg):
interval / 'foo'
def test_math_floordiv(self, closed):
interval = Interval(1, 2, closed=closed)
expected = Interval(0, 1, closed=closed)
result = interval // 2
assert result == expected
result = interval
result //= 2
assert result == expected
msg = r"unsupported operand type\(s\) for //"
with pytest.raises(TypeError, match=msg):
interval // interval
with pytest.raises(TypeError, match=msg):
interval // 'foo'
def test_constructor_errors(self):
msg = "invalid option for 'closed': foo"
with pytest.raises(ValueError, match=msg):
Interval(0, 1, closed='foo')
msg = 'left side of interval must be <= right side'
with pytest.raises(ValueError, match=msg):
Interval(1, 0)
@pytest.mark.parametrize('tz_left, tz_right', [
(None, 'UTC'), ('UTC', None), ('UTC', 'US/Eastern')])
def test_constructor_errors_tz(self, tz_left, tz_right):
# GH 18538
left = Timestamp('2017-01-01', tz=tz_left)
right = Timestamp('2017-01-02', tz=tz_right)
error = TypeError if com._any_none(tz_left, tz_right) else ValueError
with pytest.raises(error):
Interval(left, right)
| GuessWhoSamFoo/pandas | pandas/tests/scalar/interval/test_interval.py | Python | bsd-3-clause | 7,179 | 0 |
#!/usr/bin/env python
import sys,logging,optparse
from AlpgenArgoJob import AlpgenArgoJob
sys.path.append('/users/hpcusers/balsam/argo_deploy/argo_core')
from MessageInterface import MessageInterface
def main():
parser = optparse.OptionParser(description='submit alpgen job to ARGO')
parser.add_option('-e','--evts-per-iter',dest='evts_per_iter',help='number of events per warmup iteration',type='int')
parser.add_option('-i','--num-iter',dest='numiters',help='number of iterations for the warmup',type='int')
parser.add_option('-w','--warmup-weighted',dest='num_warmup',help='number of event to in the warmup, after the iterations complete',type='int')
parser.add_option('-n','--num-weighted',dest='num_weighted',help='number of weighted events to generate.',type='int')
parser.add_option('-p','--process',dest='process',help='define the process to generate, 2Q,4Q,hjet,top,wjet,zjet,Njet,etc.')
parser.add_option('-o','--num-nodes',dest='numnodes',help='number of nodes to use on destination machine',type='int')
parser.add_option('-c','--cpus-per-node',dest='cpus_per_node',help='number of CPUs per node to use on destination machine',type='int')
parser.add_option('-a','--alpgen-input',dest='alpgen_input_file',help='The AlpGen input file which carries all the options for this generation job')
parser.add_option('-t','--wall-time',dest='walltime',help='The wall time to submit to the queue in minutes.',type='int')
options,args = parser.parse_args()
if options.numiters is None:
parser.error('Must define the number of warmup iterations')
if options.process is None:
parser.error('Must define the process to generate')
if options.numnodes is None:
parser.error('Must define the number of nodes to use')
if options.cpus_per_node is None:
parser.error('Must define the number of CPUs per node to use')
if options.evts_per_iter is None:
parser.error('Must define the number of events per warmup iteration')
if options.num_weighted is None:
parser.error('Must define the number of weighted events to produce')
if options.num_warmup is None:
parser.error('Must define the number of weighted events to produce in the warmup step.')
if options.alpgen_input_file is None:
parser.error('Must define the AlpGen input file')
if options.walltime is None:
parser.error('Must specify a wall time')
user = os.environ.get('USER','nobody')
if(user == 'apf'): # AutoPyFactory
user= os.environ.get('prodUserID','nobody')
jobID = taskID + '0'
if options.resubmitjobid is not None:
jobID = int(options.resubmitjobid)
TOP_PATH = os.getcwd() # directory in which script was run
RUNPATH = os.path.join(TOP_PATH,str(jobID)) # directory in which to store files
if not os.path.exists(RUNPATH):
os.makedirs(RUNPATH) # make directories recursively like 'mkdir -p'
logger.info('JobID: ' + str(jobID))
if __name__ == '__main__':
main()
| hep-cce/hpc-edge-service | argo/test_jobs/test_submit_alpgen.py | Python | bsd-3-clause | 2,987 | 0.035152 |
#
# Copyright 2016 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Refer to the README and COPYING files for full details of the license
#
from __future__ import absolute_import
from __future__ import division
import logging
from vdsm import throttledlog
from monkeypatch import MonkeyPatch
from testlib import VdsmTestCase
class FakeLogger(object):
def __init__(self, level):
self.level = level
self.messages = []
def isEnabledFor(self, level):
return level >= self.level
def log(self, level, message, *args):
if not self.isEnabledFor(level):
return
self.messages.append(message % args)
class FakeTime(object):
def __init__(self):
self.time = 0.0
def __call__(self):
return self.time
class TestThrottledLogging(VdsmTestCase):
@MonkeyPatch(throttledlog, "_logger", FakeLogger(logging.DEBUG))
def test_throttled_logging(self):
throttledlog.throttle('test', 3)
for i in range(5):
throttledlog.debug('test', "Cycle: %s", i)
self.assertEqual(throttledlog._logger.messages,
['Cycle: 0', 'Cycle: 3'])
@MonkeyPatch(throttledlog, "_logger", FakeLogger(logging.INFO))
def test_no_logging(self):
throttledlog.throttle('test', 3)
for i in range(5):
throttledlog.debug('test', "Cycle: %s", i)
self.assertEqual(throttledlog._logger.messages, [])
@MonkeyPatch(throttledlog, "_logger", FakeLogger(logging.DEBUG))
def test_default(self):
throttledlog.throttle('test', 3)
for i in range(5):
throttledlog.debug('other', "Cycle: %s", i)
self.assertEqual(throttledlog._logger.messages,
['Cycle: %s' % (i,) for i in range(5)])
@MonkeyPatch(throttledlog, "_logger", FakeLogger(logging.DEBUG))
@MonkeyPatch(throttledlog, "monotonic_time", FakeTime())
def test_timeout(self):
throttledlog.throttle('test', 10, timeout=7)
for i in range(12):
throttledlog.debug('test', "Cycle: %s", i)
throttledlog.monotonic_time.time += 1.0
self.assertEqual(throttledlog._logger.messages,
['Cycle: %s' % (i,) for i in (0, 7, 10,)])
@MonkeyPatch(throttledlog, "_logger", FakeLogger(logging.WARNING))
def test_logging_warning(self):
throttledlog.throttle('test', 4)
for i in range(7):
throttledlog.warning('test', "Cycle: %s", i)
self.assertEqual(throttledlog._logger.messages,
['Cycle: 0', 'Cycle: 4'])
| oVirt/vdsm | tests/throttledlog_test.py | Python | gpl-2.0 | 3,280 | 0 |
import logging
from django.http import HttpResponseBadRequest, HttpResponseForbidden
from functools import wraps
import groupbank_crypto.ec_secp256k1 as crypto
logger = logging.getLogger(__name__)
# decorator for verifying the payload is signed by the author of the request
def verify_author(view):
@wraps(view) # to get features like showing the original function name in trace backs
def wrapper(request):
# https://docs.djangoproject.com/en/1.11/topics/http/middleware/#process-view
# verify the JSON B64 string. return None if it's fine,
# return an HTTPResponse with an error if not
try:
author, signature, payload = request.POST['author'], request.POST['signature'], request.POST['payload']
except KeyError:
logger.info('Request with missing author, signature or payload')
return HttpResponseBadRequest()
# get user pubkey
# what if the author CAN'T already be registered? i.e.: group key
# maybe check view_func and ignore a few?
# or let the view itself verify if the author is registered...
# NOTE: This does not verify if the signer is authorized for the operation.
# It only verifies if the signature matches the given pub key
try:
crypto.verify(author, signature, payload)
return view(request)
except (crypto.InvalidSignature, crypto.InvalidKey):
logger.info('Request with invalid author key or signature')
return HttpResponseForbidden()
# or 401 Unauthorized...
return wrapper
| GroupBank/global-server | rest_app/decorators.py | Python | agpl-3.0 | 1,619 | 0.001853 |
# -*- coding: utf-8 -*-
"""
morphsnakes
===========
This is a Python implementation of the algorithms introduced in the paper
Márquez-Neila, P., Baumela, L., Álvarez, L., "A morphological approach
to curvature-based evolution of curves and surfaces". IEEE Transactions
on Pattern Analysis and Machine Intelligence (PAMI), 2013.
This implementation is intended to be as brief, understandable and self-contained
as possible. It does not include any enhancement to make it fast or efficient.
Any practical implementation of this algorithm should work only over the
neighbor pixels of the 0.5-levelset, not over all the embedding function,
and perhaps should feature multi-threading or GPU capabilities.
The classes MorphGAC and MorphACWE provide most of the functionality of this
module. They implement the Morphological Geodesic Active Contours and the
Morphological Active Contours without Edges, respectively. See the
aforementioned paper for full details.
See test.py for examples of usage.
"""
__author__ = "P. Márquez Neila <p.mneila@upm.es>"
from itertools import cycle
import numpy as np
from scipy import ndimage
from scipy.ndimage import binary_dilation, binary_erosion, \
gaussian_filter, gaussian_gradient_magnitude
class fcycle(object):
def __init__(self, iterable):
"""Call functions from the iterable each time it is called."""
self.funcs = cycle(iterable)
def __call__(self, *args, **kwargs):
f = next(self.funcs)
return f(*args, **kwargs)
# SI and IS operators for 2D and 3D.
_P2 = [np.eye(3), np.array([[0,1,0]]*3), np.flipud(np.eye(3)), np.rot90([[0,1,0]]*3)]
_P3 = [np.zeros((3,3,3)) for i in range(9)]
_P3[0][:,:,1] = 1
_P3[1][:,1,:] = 1
_P3[2][1,:,:] = 1
_P3[3][:,[0,1,2],[0,1,2]] = 1
_P3[4][:,[0,1,2],[2,1,0]] = 1
_P3[5][[0,1,2],:,[0,1,2]] = 1
_P3[6][[0,1,2],:,[2,1,0]] = 1
_P3[7][[0,1,2],[0,1,2],:] = 1
_P3[8][[0,1,2],[2,1,0],:] = 1
_aux = np.zeros((0))
def SI(u):
"""SI operator."""
global _aux
if np.ndim(u) == 2:
P = _P2
elif np.ndim(u) == 3:
P = _P3
else:
raise ValueError("u has an invalid number of dimensions (should be 2 or 3)")
if u.shape != _aux.shape[1:]:
_aux = np.zeros((len(P),) + u.shape)
for i in range(len(P)):
_aux[i] = binary_erosion(u, P[i])
return _aux.max(0)
def IS(u):
"""IS operator."""
global _aux
if np.ndim(u) == 2:
P = _P2
elif np.ndim(u) == 3:
P = _P3
else:
raise ValueError("u has an invalid number of dimensions (should be 2 or 3)")
if u.shape != _aux.shape[1:]:
_aux = np.zeros((len(P),) + u.shape)
for i in range(len(P)):
_aux[i] = binary_dilation(u, P[i])
return _aux.min(0)
# SIoIS operator.
SIoIS = lambda u: SI(IS(u))
ISoSI = lambda u: IS(SI(u))
curvop = fcycle([SIoIS, ISoSI])
# Stopping factors (function g(I) in the paper).
def gborders(img, alpha=1.0, sigma=1.0):
"""Stopping criterion for image borders."""
# The norm of the gradient.
gradnorm = gaussian_gradient_magnitude(img, sigma, mode='constant')
return 1.0/np.sqrt(1.0 + alpha*gradnorm)
def glines(img, sigma=1.0):
"""Stopping criterion for image black lines."""
return gaussian_filter(img, sigma)
class MorphACWE(object):
"""Morphological ACWE based on the Chan-Vese energy functional."""
def __init__(self, data, smoothing=1, lambda1=1, lambda2=1):
"""Create a Morphological ACWE solver.
Parameters
----------
data : ndarray
The image data.
smoothing : scalar
The number of repetitions of the smoothing step (the
curv operator) in each iteration. In other terms,
this is the strength of the smoothing. This is the
parameter µ.
lambda1, lambda2 : scalars
Relative importance of the inside pixels (lambda1)
against the outside pixels (lambda2).
"""
self._u = None
self.smoothing = smoothing
self.lambda1 = lambda1
self.lambda2 = lambda2
self.data = data
def set_levelset(self, u):
self._u = np.double(u)
self._u[u>0] = 1
self._u[u<=0] = 0
levelset = property(lambda self: self._u,
set_levelset,
doc="The level set embedding function (u).")
def step(self):
"""Perform a single step of the morphological Chan-Vese evolution."""
# Assign attributes to local variables for convenience.
u = self._u
if u is None:
raise ValueError("the levelset function is not set (use set_levelset)")
data = self.data
# Determine c0 and c1.
inside = u>0
outside = u<=0
c0 = data[outside].sum() / float(outside.sum())
c1 = data[inside].sum() / float(inside.sum())
# Image attachment.
dres = np.array(np.gradient(u))
abs_dres = np.abs(dres).sum(0)
#aux = abs_dres * (c0 - c1) * (c0 + c1 - 2*data)
aux = abs_dres * (self.lambda1*(data - c1)**2 - self.lambda2*(data - c0)**2)
res = np.copy(u)
res[aux < 0] = 1
res[aux > 0] = 0
# Smoothing.
for i in range(self.smoothing):
res = curvop(res)
self._u = res
def run(self, iterations):
"""Run several iterations of the morphological Chan-Vese method."""
for i in range(iterations):
self.step()
class MorphGAC(object):
"""Morphological GAC based on the Geodesic Active Contours."""
def __init__(self, data, smoothing=1, threshold=0, balloon=0):
"""Create a Morphological GAC solver.
Parameters
----------
data : array-like
The stopping criterion g(I). See functions gborders and glines.
smoothing : scalar
The number of repetitions of the smoothing step in each
iteration. This is the parameter µ.
threshold : scalar
The threshold that determines which areas are affected
by the morphological balloon. This is the parameter θ.
balloon : scalar
The strength of the morphological balloon. This is the parameter ν.
"""
self._u = None
self._v = balloon
self._theta = threshold
self.smoothing = smoothing
self.set_data(data)
def set_levelset(self, u):
self._u = np.double(u)
self._u[u>0] = 1
self._u[u<=0] = 0
def set_balloon(self, v):
self._v = v
self._update_mask()
def set_threshold(self, theta):
self._theta = theta
self._update_mask()
def set_data(self, data):
self._data = data
self._ddata = np.gradient(data)
self._update_mask()
# The structure element for binary dilation and erosion.
self.structure = np.ones((3,)*np.ndim(data))
def _update_mask(self):
"""Pre-compute masks for speed."""
self._threshold_mask = self._data > self._theta
self._threshold_mask_v = self._data > self._theta/np.abs(self._v)
levelset = property(lambda self: self._u,
set_levelset,
doc="The level set embedding function (u).")
data = property(lambda self: self._data,
set_data,
doc="The data that controls the snake evolution (the image or g(I)).")
balloon = property(lambda self: self._v,
set_balloon,
doc="The morphological balloon parameter (ν (nu, not v)).")
threshold = property(lambda self: self._theta,
set_threshold,
doc="The threshold value (θ).")
def step(self):
"""Perform a single step of the morphological snake evolution."""
# Assign attributes to local variables for convenience.
u = self._u
gI = self._data
dgI = self._ddata
theta = self._theta
v = self._v
if u is None:
raise ValueError("the levelset is not set (use set_levelset)")
res = np.copy(u)
# Balloon.
if v > 0:
aux = binary_dilation(u, self.structure)
elif v < 0:
aux = binary_erosion(u, self.structure)
if v!= 0:
res[self._threshold_mask_v] = aux[self._threshold_mask_v]
# Image attachment.
aux = np.zeros_like(res)
dres = np.gradient(res)
for el1, el2 in zip(dgI, dres):
aux += el1*el2
res[aux > 0] = 1
res[aux < 0] = 0
# Smoothing.
for i in range(self.smoothing):
res = curvop(res)
self._u = res
def run(self, iterations):
"""Run several iterations of the morphological snakes method."""
for i in range(iterations):
self.step()
def evolve_visual(msnake, levelset=None, num_iters=20, background=None):
"""
Visual evolution of a morphological snake.
Parameters
----------
msnake : MorphGAC or MorphACWE instance
The morphological snake solver.
levelset : array-like, optional
If given, the levelset of the solver is initialized to this. If not
given, the evolution will use the levelset already set in msnake.
num_iters : int, optional
The number of iterations.
background : array-like, optional
If given, background will be shown behind the contours instead of
msnake.data.
"""
from matplotlib import pyplot as ppl
if levelset is not None:
msnake.levelset = levelset
# Prepare the visual environment.
fig = ppl.gcf()
fig.clf()
ax1 = fig.add_subplot(1,2,1)
if background is None:
ax1.imshow(msnake.data, cmap=ppl.cm.gray)
else:
ax1.imshow(background, cmap=ppl.cm.gray)
ax1.contour(msnake.levelset, [0.5], colors='r')
ax2 = fig.add_subplot(1,2,2)
ax_u = ax2.imshow(msnake.levelset)
ppl.pause(0.001)
# Iterate.
for i in range(num_iters):
# Evolve.
msnake.step()
# Update figure.
del ax1.collections[0]
ax1.contour(msnake.levelset, [0.5], colors='r')
ax_u.set_data(msnake.levelset)
fig.canvas.draw()
#ppl.pause(0.001)
# Return the last levelset.
return msnake.levelset
def evolve_visual3d(msnake, levelset=None, num_iters=20):
"""
Visual evolution of a three-dimensional morphological snake.
Parameters
----------
msnake : MorphGAC or MorphACWE instance
The morphological snake solver.
levelset : array-like, optional
If given, the levelset of the solver is initialized to this. If not
given, the evolution will use the levelset already set in msnake.
num_iters : int, optional
The number of iterations.
"""
from mayavi import mlab
import matplotlib.pyplot as ppl
if levelset is not None:
msnake.levelset = levelset
fig = mlab.gcf()
mlab.clf()
src = mlab.pipeline.scalar_field(msnake.data)
mlab.pipeline.image_plane_widget(src, plane_orientation='x_axes', colormap='gray')
cnt = mlab.contour3d(msnake.levelset, contours=[0.5])
@mlab.animate(ui=True)
def anim():
for i in range(num_iters):
msnake.step()
cnt.mlab_source.scalars = msnake.levelset
print("Iteration %s/%s..." % (i + 1, num_iters))
yield
anim()
mlab.show()
# Return the last levelset.
return msnake.levelset | braysia/CellTK | celltk/utils/morphsnakes.py | Python | mit | 11,905 | 0.011937 |
# -*- coding: utf-8 -*-
from Screens.Screen import Screen
from Screens.Standby import TryQuitMainloop
from Screens.MessageBox import MessageBox
from Components.ActionMap import NumberActionMap
from Components.Pixmap import Pixmap
from Components.Sources.StaticText import StaticText
from Components.MenuList import MenuList
from Components.config import config, configfile
from Tools.Directories import resolveFilename, SCOPE_ACTIVE_SKIN
from enigma import eEnv, ePicLoad
import os
class SkinSelectorBase:
def __init__(self, session, args = None):
self.skinlist = []
self.previewPath = ""
if self.SKINXML and os.path.exists(os.path.join(self.root, self.SKINXML)):
self.skinlist.append(self.DEFAULTSKIN)
if self.PICONSKINXML and os.path.exists(os.path.join(self.root, self.PICONSKINXML)):
self.skinlist.append(self.PICONDEFAULTSKIN)
for root, dirs, files in os.walk(self.root, followlinks=True):
for subdir in dirs:
dir = os.path.join(root,subdir)
if os.path.exists(os.path.join(dir,self.SKINXML)):
self.skinlist.append(subdir)
dirs = []
self["key_red"] = StaticText(_("Close"))
self["key_green"] = StaticText(_("Save"))
self["introduction"] = StaticText(_("Press OK to activate the selected skin."))
self["SkinList"] = MenuList(self.skinlist)
self["Preview"] = Pixmap()
self.skinlist.sort()
self["actions"] = NumberActionMap(["SetupActions", "DirectionActions", "TimerEditActions", "ColorActions"],
{
"ok": self.ok,
"cancel": self.close,
"red": self.close,
"green": self.ok,
"up": self.up,
"down": self.down,
"left": self.left,
"right": self.right,
"log": self.info,
}, -1)
self.picload = ePicLoad()
self.picload.PictureData.get().append(self.showPic)
self.onLayoutFinish.append(self.layoutFinished)
def showPic(self, picInfo=""):
ptr = self.picload.getData()
if ptr is not None:
self["Preview"].instance.setPixmap(ptr.__deref__())
self["Preview"].show()
def layoutFinished(self):
self.picload.setPara((self["Preview"].instance.size().width(), self["Preview"].instance.size().height(), 0, 0, 1, 1, "#00000000"))
tmp = self.config.value.find("/"+self.SKINXML)
if tmp != -1:
tmp = self.config.value[:tmp]
idx = 0
for skin in self.skinlist:
if skin == tmp:
break
idx += 1
if idx < len(self.skinlist):
self["SkinList"].moveToIndex(idx)
self.loadPreview()
def ok(self):
if self["SkinList"].getCurrent() == self.DEFAULTSKIN:
skinfile = ""
skinfile = os.path.join(skinfile, self.SKINXML)
elif self["SkinList"].getCurrent() == self.PICONDEFAULTSKIN:
skinfile = ""
skinfile = os.path.join(skinfile, self.PICONSKINXML)
else:
skinfile = self["SkinList"].getCurrent()
skinfile = os.path.join(skinfile, self.SKINXML)
print "Skinselector: Selected Skin: "+self.root+skinfile
self.config.value = skinfile
self.config.save()
configfile.save()
restartbox = self.session.openWithCallback(self.restartGUI,MessageBox,_("GUI needs a restart to apply a new skin\nDo you want to restart the GUI now?"), MessageBox.TYPE_YESNO)
restartbox.setTitle(_("Restart GUI now?"))
def up(self):
self["SkinList"].up()
self.loadPreview()
def down(self):
self["SkinList"].down()
self.loadPreview()
def left(self):
self["SkinList"].pageUp()
self.loadPreview()
def right(self):
self["SkinList"].pageDown()
self.loadPreview()
def info(self):
aboutbox = self.session.open(MessageBox,_("Enigma2 skin selector"), MessageBox.TYPE_INFO)
aboutbox.setTitle(_("About..."))
def loadPreview(self):
if self["SkinList"].getCurrent() == self.DEFAULTSKIN:
pngpath = "."
pngpath = os.path.join(os.path.join(self.root, pngpath), "prev.png")
elif self["SkinList"].getCurrent() == self.PICONDEFAULTSKIN:
pngpath = "."
pngpath = os.path.join(os.path.join(self.root, pngpath), "piconprev.png")
else:
pngpath = self["SkinList"].getCurrent()
try:
pngpath = os.path.join(os.path.join(self.root, pngpath), "prev.png")
except:
pass
if not os.path.exists(pngpath):
pngpath = resolveFilename(SCOPE_ACTIVE_SKIN, "noprev.png")
if self.previewPath != pngpath:
self.previewPath = pngpath
self.picload.startDecode(self.previewPath)
def restartGUI(self, answer):
if answer is True:
self.session.open(TryQuitMainloop, 3)
class SkinSelector(Screen, SkinSelectorBase):
SKINXML = "skin.xml"
DEFAULTSKIN = "< Default >"
PICONSKINXML = None
PICONDEFAULTSKIN = None
skinlist = []
root = os.path.join(eEnv.resolve("${datadir}"),"enigma2")
def __init__(self, session, args = None):
Screen.__init__(self, session)
SkinSelectorBase.__init__(self, args)
Screen.setTitle(self, _("Skin setup"))
self.skinName = "SkinSelector"
self.config = config.skin.primary_skin
class LcdSkinSelector(Screen, SkinSelectorBase):
SKINXML = "skin_display.xml"
DEFAULTSKIN = "< Default >"
PICONSKINXML = "skin_display_picon.xml"
PICONDEFAULTSKIN = "< Default with Picon >"
skinlist = []
root = os.path.join(eEnv.resolve("${datadir}"),"enigma2/display/")
def __init__(self, session, args = None):
Screen.__init__(self, session)
SkinSelectorBase.__init__(self, args)
Screen.setTitle(self, _("Skin setup"))
self.skinName = "SkinSelector"
self.config = config.skin.display_skin
| XTAv2/Enigma2 | lib/python/Screens/SkinSelector.py | Python | gpl-2.0 | 5,279 | 0.028793 |
# vim: ai ts=4 sts=4 et sw=4 encoding=utf-8
from __future__ import print_function, unicode_literals
import logging
from django.http import HttpResponse, HttpResponseServerError, HttpResponseBadRequest
from django.conf import settings
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_POST
logger = logging.getLogger('textit.views')
if 'rapidsms' in settings.INSTALLED_APPS:
from rapidsms.router import receive, lookup_connections
else:
print('NOTE: loading test stub for RapidSMS.')
from tests.rapidsms_stub import receive, lookup_connections
@csrf_exempt
@require_POST
def message_received(request, backend_name):
"""Handle HTTP requests from TextIt.
"""
try:
backend = settings.INSTALLED_BACKENDS[backend_name]
except KeyError:
logger.error('Name "{}" not found in settings INSTALLED_BACKENDS.'.format(backend_name))
return HttpResponseBadRequest('Name "{}" not found in settings INSTALLED_BACKENDS.'.format(backend_name))
try:
if request.META['QUERY_STRING'] != backend['config']['query_key']:
r = 'query_key "{}" does not match configured value from django settings "{}"'.format(
request.META['QUERY_STRING'], backend['config']['query_key'])
logger.error(r)
return HttpResponseBadRequest(r)
except KeyError:
logger.error("No query_key set up in settings INSTALLED_BACKENDS[backend_name]")
return HttpResponseBadRequest("No query_key set up in settings INSTALLED_BACKENDS[backend_name]")
post = request.POST
logger.debug("@@ request from TextIt - Decoded data: %r" % post)
try:
post_event = post['event']
except KeyError:
logger.error('No "Event" key in POST request')
return HttpResponseBadRequest("No Event key in POST request")
if post_event == 'mo_sms':
# Must have received a message
logger.debug("@@Got a text message")
try:
fa = post['phone']
from_address = fa[1:] if fa.startswith('+') else fa # strip off the plus sign
text = post['text']
logger.debug("@@Received message from %s: %s" % (from_address, text))
except KeyError:
logger.exception('Malformed POST message')
return HttpResponseBadRequest("Malformed POST message")
try:
# get (or create) a connections object for this backend and from_address
connections = lookup_connections(backend_name, [from_address])
except Exception as e:
r = "Error finding connection for backend_name={}, from={}, err={}".format(
backend_name, from_address, e)
logger.error(r)
return HttpResponseServerError(r)
try:
# pass the message to RapidSMS
receive(text, connections[0])
except Exception as e:
r = "Error receiving message. backend_name={}, from={}, err={}".format(
backend_name, from_address, e)
logger.error(r)
return HttpResponseServerError(r)
# Respond nicely to TextIt
return HttpResponse("OK")
# elif:
if post_event in ['mt_sent', 'mt_dlvd']:
return HttpResponse("thanks") # confirmation messages are ignored
# else:
logger.error("@@No recognized command in request from TextIt")
return HttpResponseBadRequest("Unexpected event code='{}'".format(post_event))
def index(request):
return HttpResponse("Hello, world. You're at the TextIt_test index.")
| eHealthAfrica/rapidsms_textit | rapidsms_textit/views.py | Python | bsd-3-clause | 3,591 | 0.004456 |
# Copyright (c) <2015> <Sergi Delgado Segura>
# Distributed under the BSD software license, see the accompanying file LICENSE
import pycurl
import stem.process
from stem.control import Controller
from stem.util import term
from StringIO import StringIO
__author__ = 'sdelgado'
SOCKS_PORT = 9050
CONTROL_PORT = 9051
def tor_query(url, method='GET', data=None, headers=None, socks_port=None):
""" Performs a http query using tor.
:param url: server address.
:type url: str
:param method: request method (GET, POST, ...).
:type method: str
:param data: data to be sent to the server.
:param data: JSON dumped object
:param headers: headers of the request.
:type headers: str array
:param socks_port: local socket port where tor is listening to requests (configurable in tor.rc).
:type socks_port: int
:return: response code and some server response data.
:rtype: str, str
"""
output = StringIO()
if socks_port is None:
socks_port = SOCKS_PORT
query = pycurl.Curl()
query.setopt(pycurl.URL, url)
query.setopt(pycurl.PROXY, 'localhost')
query.setopt(pycurl.PROXYPORT, socks_port)
query.setopt(pycurl.PROXYTYPE, pycurl.PROXYTYPE_SOCKS5_HOSTNAME)
query.setopt(pycurl.WRITEFUNCTION, output.write)
if method == 'POST':
if data is None or headers is None:
return "Not enough parameters for POST"
else:
query.setopt(pycurl.HTTPHEADER, headers)
query.setopt(pycurl.POST, 1)
query.setopt(pycurl.POSTFIELDS, data)
try:
query.perform()
r_code = query.getinfo(pycurl.HTTP_CODE)
return r_code, output.getvalue()
except pycurl.error:
return 500, "Unable to reach " + url
def print_bootstrap_lines(line):
""" Print the bootstrap lines.
:param line: line to be printed.
:type line: str
:return: None.
"""
if "Bootstrapped " in line:
print(term.format(line, term.Color.BLUE))
def init_tor(socks_port=None, control_port=None):
""" Initiates a tor connection.
:param socks_port: local port socket where tor will listen to requests (configurable in tor.rc).
:type socks_port: int
:param control_port: local port where tor will listen to control requests (configurable in tor.rc).
:type control_port: int
:return: a tor process and a controller of the process.
:rtype: process, controller
"""
if socks_port is None:
socks_port = SOCKS_PORT
if control_port is None:
control_port = CONTROL_PORT
process = stem.process.launch_tor_with_config(
config={
'SocksPort': str(socks_port),
'ControlPort': str(control_port)
},
init_msg_handler=print_bootstrap_lines, timeout=60, take_ownership=True)
controller = Controller.from_port()
controller.authenticate()
return process, controller
| sr-gi/paysense | utils/tor/tools.py | Python | bsd-3-clause | 2,930 | 0.001706 |
#!/usr/bin/env python
# Jonas Schnelli, 2013
# make sure the Litecoin-Qt.app contains the right plist (including the right version)
# fix made because of serval bugs in Qt mac deployment (https://bugreports.qt-project.org/browse/QTBUG-21267)
from string import Template
from datetime import date
bitcoinDir = "./";
inFile = bitcoinDir+"/share/qt/Info.plist"
outFile = "Litecoin-Qt.app/Contents/Info.plist"
version = "unknown";
fileForGrabbingVersion = bitcoinDir+"bitcoin-qt.pro"
for line in open(fileForGrabbingVersion):
lineArr = line.replace(" ", "").split("=");
if lineArr[0].startswith("VERSION"):
version = lineArr[1].replace("\n", "");
fIn = open(inFile, "r")
fileContent = fIn.read()
s = Template(fileContent)
newFileContent = s.substitute(VERSION=version,YEAR=date.today().year)
fOut = open(outFile, "w");
fOut.write(newFileContent);
print "Info.plist fresh created"
| SpoonITCurrency/SpoonITCoin | spoonitcoin/share/qt/clean_mac_info_plist.py | Python | mit | 897 | 0.016722 |
#!/usr/bin/env python3
# dirtool.py - diff tool for directories
# Copyright (C) 2018 Ingo Ruhnke <grumbel@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import signal
import sys
from PyQt5.QtCore import QCoreApplication, QFileSystemWatcher
def directory_changed(path):
print("directory_changed: {}".format(path))
def file_changed(path):
print("file_changed: {}".format(path))
def main(argv):
signal.signal(signal.SIGINT, signal.SIG_DFL)
app = QCoreApplication([])
watcher = QFileSystemWatcher()
print("Watching /tmp/")
watcher.addPath("/tmp/")
watcher.addPath("/tmp/foo")
# Files have to be watched specifically for this to trigger.
# Deleting and recreating a file makes this no longer trigger.
watcher.fileChanged.connect(file_changed)
# This triggers on file creation and deletion
watcher.directoryChanged.connect(directory_changed)
print("files:", watcher.files())
print("directories:", watcher.directories())
sys.exit(app.exec())
if __name__ == "__main__":
main(sys.argv)
# EOF #
| Grumbel/dirtool | experiments/qnotify/qnotify.py | Python | gpl-3.0 | 1,673 | 0 |
from __future__ import absolute_import
from datetime import timedelta
from django.utils import timezone
from django.test import RequestFactory
from exam import fixture
from sentry.middleware.user import UserActiveMiddleware
from sentry.testutils import TestCase
class UserActiveMiddlewareTest(TestCase):
middleware = fixture(UserActiveMiddleware)
factory = fixture(RequestFactory)
def test_simple(self):
self.view = lambda x: None
user = self.user
req = self.factory.get('/')
req.user = user
resp = self.middleware.process_view(req, self.view, [], {})
assert resp is None
assert timezone.now() - user.last_active < timedelta(minutes=1)
user.last_active = None
resp = self.middleware.process_view(req, self.view, [], {})
assert resp is None
assert timezone.now() - user.last_active < timedelta(minutes=1)
| ifduyue/sentry | tests/sentry/middleware/test_useractive.py | Python | bsd-3-clause | 912 | 0 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import datetime
import numbers
from contextlib import closing
from typing import Any, Iterable, Mapping, Optional, Sequence, Union
from airflow.operators.sql import BaseSQLOperator
from airflow.providers.google.suite.hooks.sheets import GSheetsHook
class SQLToGoogleSheetsOperator(BaseSQLOperator):
"""
Copy data from SQL results to provided Google Spreadsheet.
:param sql: The SQL to execute.
:param spreadsheet_id: The Google Sheet ID to interact with.
:param conn_id: the connection ID used to connect to the database.
:param parameters: The parameters to render the SQL query with.
:param database: name of database which overwrite the defined one in connection
:param spreadsheet_range: The A1 notation of the values to retrieve.
:param gcp_conn_id: The connection ID to use when fetching connection info.
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
"""
template_fields: Sequence[str] = (
"sql",
"spreadsheet_id",
"spreadsheet_range",
"impersonation_chain",
)
template_fields_renderers = {"sql": "sql"}
template_ext: Sequence[str] = (".sql",)
ui_color = "#a0e08c"
def __init__(
self,
*,
sql: str,
spreadsheet_id: str,
sql_conn_id: str,
parameters: Optional[Union[Mapping, Iterable]] = None,
database: Optional[str] = None,
spreadsheet_range: str = "Sheet1",
gcp_conn_id: str = "google_cloud_default",
delegate_to: Optional[str] = None,
impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.sql = sql
self.conn_id = sql_conn_id
self.database = database
self.parameters = parameters
self.gcp_conn_id = gcp_conn_id
self.spreadsheet_id = spreadsheet_id
self.spreadsheet_range = spreadsheet_range
self.delegate_to = delegate_to
self.impersonation_chain = impersonation_chain
def _data_prep(self, data):
for row in data:
item_list = []
for item in row:
if isinstance(item, (datetime.date, datetime.datetime)):
item = item.isoformat()
elif isinstance(item, int): # To exclude int from the number check.
pass
elif isinstance(item, numbers.Number):
item = float(item)
item_list.append(item)
yield item_list
def _get_data(self):
hook = self.get_db_hook()
with closing(hook.get_conn()) as conn, closing(conn.cursor()) as cur:
self.log.info("Executing query")
cur.execute(self.sql, self.parameters or ())
yield [field[0] for field in cur.description]
yield from self._data_prep(cur.fetchall())
def execute(self, context: Any) -> None:
self.log.info("Getting data")
values = list(self._get_data())
self.log.info("Connecting to Google")
sheet_hook = GSheetsHook(
gcp_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to,
impersonation_chain=self.impersonation_chain,
)
self.log.info(f"Uploading data to https://docs.google.com/spreadsheets/d/{self.spreadsheet_id}")
sheet_hook.update_values(
spreadsheet_id=self.spreadsheet_id,
range_=self.spreadsheet_range,
values=values,
)
| Acehaidrey/incubator-airflow | airflow/providers/google/suite/transfers/sql_to_sheets.py | Python | apache-2.0 | 5,101 | 0.00196 |
#!/usr/bin/env python
import sys
import glob
import hdfs3
from hdfs3 import HDFileSystem
hdfs_nn = '192.168.33.10'
hdfs = HDFileSystem(host=hdfs_nn, port=8020)
class TransparentFileSystem:
def __init__(self):
self.hdfs_flag = False
return
def set_hdfs_flag(self, flag=True):
self.hdfs_flag = flag
def exists(self, target):
if hdfs.exists(target) is True:
print target + ' This dir is HDFS.'
self.hdfs_flag = True
else:
print target + ' This dir is not HDFS. Local FS.'
# if os.path.exists('')
def glob(self, target):
if self.hdfs_flag is True:
return hdfs.glob(target)
else:
return glob.glob(target)
if __name__ == "__main__":
tfs_hdfs = TransparentFileSystem()
tfs_hdfs.exists('/tmp')
print tfs_hdfs.hdfs_flag
print tfs_hdfs.glob('/tmp')
tfs_local = TransparentFileSystem()
tfs_local.exists('dir to local')
tfs_local.set_hdfs_flag(False)
print tfs_local.hdfs_flag
print tfs_local.glob('dir to local')
sys.exit(0)
| hirolovesbeer/tfs | tfs.py | Python | apache-2.0 | 1,111 | 0.0027 |
from nose.tools import eq_, ok_, raises
import wtforms
from flask import Flask
from mongoengine import *
from flask_superadmin import Admin
from flask_superadmin.model.backends.mongoengine.view import ModelAdmin
class CustomModelView(ModelAdmin):
def __init__(self, model, name=None, category=None, endpoint=None,
url=None, **kwargs):
for k, v in kwargs.iteritems():
setattr(self, k, v)
super(CustomModelView, self).__init__(model, name, category, endpoint,
url)
def setup():
connect('superadmin_test')
app = Flask(__name__)
app.config['SECRET_KEY'] = '1'
app.config['WTF_CSRF_ENABLED'] = False
admin = Admin(app)
return app, admin
def test_model():
app, admin = setup()
class Person(Document):
name = StringField()
age = IntField()
Person.drop_collection()
view = CustomModelView(Person)
admin.add_view(view)
eq_(view.model, Person)
eq_(view.name, 'Person')
eq_(view.endpoint, 'person')
eq_(view.url, '/admin/person')
# Verify form
with app.test_request_context():
Form = view.get_form()
ok_(isinstance(Form()._fields['name'], wtforms.TextAreaField))
ok_(isinstance(Form()._fields['age'], wtforms.IntegerField))
# Make some test clients
client = app.test_client()
resp = client.get('/admin/person/')
eq_(resp.status_code, 200)
resp = client.get('/admin/person/add/')
eq_(resp.status_code, 200)
resp = client.post('/admin/person/add/',
data=dict(name='name', age='18'))
eq_(resp.status_code, 302)
person = Person.objects.first()
eq_(person.name, 'name')
eq_(person.age, 18)
resp = client.get('/admin/person/')
eq_(resp.status_code, 200)
ok_(str(person.pk) in resp.data)
resp = client.get('/admin/person/%s/' % person.pk)
eq_(resp.status_code, 200)
resp = client.post('/admin/person/%s/' % person.pk, data=dict(name='changed'))
eq_(resp.status_code, 302)
person = Person.objects.first()
eq_(person.name, 'changed')
eq_(person.age, 18)
resp = client.post('/admin/person/%s/delete/' % person.pk)
eq_(resp.status_code, 200)
eq_(Person.objects.count(), 1)
resp = client.post('/admin/person/%s/delete/' % person.pk, data={'confirm_delete': True})
eq_(resp.status_code, 302)
eq_(Person.objects.count(), 0)
def test_list_display():
app, admin = setup()
class Person(Document):
name = StringField()
age = IntField()
Person.drop_collection()
view = CustomModelView(Person, list_display=('name', 'age'))
admin.add_view(view)
eq_(len(view.list_display), 2)
client = app.test_client()
resp = client.get('/admin/person/')
ok_('Name' in resp.data)
ok_('Age' in resp.data)
resp = client.post('/admin/person/add/',
data=dict(name='Steve', age='18'))
eq_(resp.status_code, 302)
resp = client.get('/admin/person/')
ok_('Steve' in resp.data)
ok_('18' in resp.data)
def test_exclude():
app, admin = setup()
class Person(Document):
name = StringField()
age = IntField()
Person.drop_collection()
view = CustomModelView(Person, exclude=['name'])
admin.add_view(view)
# Verify form
with app.test_request_context():
Form = view.get_form()
eq_(Form()._fields.keys(), ['csrf_token', 'age'])
def test_fields():
app, admin = setup()
class Person(Document):
name = StringField()
age = IntField()
Person.drop_collection()
view = CustomModelView(Person, fields=['name'])
admin.add_view(view)
# Verify form
with app.test_request_context():
Form = view.get_form()
eq_(Form()._fields.keys(), ['csrf_token', 'name'])
def test_fields_and_exclude():
app, admin = setup()
class Person(Document):
name = StringField()
age = IntField()
Person.drop_collection()
view = CustomModelView(Person, fields=['name', 'age'], exclude=['name'])
admin.add_view(view)
# Verify form
with app.test_request_context():
Form = view.get_form()
eq_(Form()._fields.keys(), ['csrf_token', 'age'])
def test_search_fields():
app, admin = setup()
class Person(Document):
name = StringField()
age = IntField()
Person.drop_collection()
Person.objects.create(name='John', age=18)
Person.objects.create(name='Michael', age=21)
view = CustomModelView(Person, list_display=['name'],
search_fields=['name'])
admin.add_view(view)
eq_(len(view.search_fields), 1)
client = app.test_client()
resp = client.get('/admin/person/')
ok_('name="q" class="search-input"' in resp.data)
ok_('John' in resp.data)
ok_('Michael' in resp.data)
resp = client.get('/admin/person/?q=john')
ok_('John' in resp.data)
ok_('Michael' not in resp.data)
def test_pagination():
app, admin = setup()
class Person(Document):
name = StringField()
age = IntField()
Person.drop_collection()
Person.objects.create(name='John', age=18)
Person.objects.create(name='Michael', age=21)
Person.objects.create(name='Steve', age=15)
Person.objects.create(name='Ron', age=59)
view = CustomModelView(Person, list_per_page=2,
list_display=['name', 'age'])
admin.add_view(view)
client = app.test_client()
resp = client.get('/admin/person/')
ok_('<div class="total-count">Total count: 4</div>' in resp.data)
ok_('<a href="#">1</a>' in resp.data) # make sure the first page is active (i.e. has no url)
ok_('John' in resp.data)
ok_('Michael' in resp.data)
ok_('Steve' not in resp.data)
ok_('Ron' not in resp.data)
# default page == page 0
eq_(resp.data, client.get('/admin/person/?page=0').data)
resp = client.get('/admin/person/?page=1')
ok_('John' not in resp.data)
ok_('Michael' not in resp.data)
ok_('Steve' in resp.data)
ok_('Ron' in resp.data)
def test_sort():
app, admin = setup()
class Person(Document):
name = StringField()
age = IntField()
Person.drop_collection()
Person.objects.create(name='John', age=18)
Person.objects.create(name='Michael', age=21)
Person.objects.create(name='Steve', age=15)
Person.objects.create(name='Ron', age=59)
view = CustomModelView(Person, list_per_page=2,
list_display=['name', 'age'])
admin.add_view(view)
client = app.test_client()
resp = client.get('/admin/person/?sort=name')
ok_('John' in resp.data)
ok_('Michael' in resp.data)
ok_('Ron' not in resp.data)
ok_('Steve' not in resp.data)
resp = client.get('/admin/person/?sort=-name')
ok_('John' not in resp.data)
ok_('Michael' not in resp.data)
ok_('Ron' in resp.data)
ok_('Steve' in resp.data)
resp = client.get('/admin/person/?sort=age')
ok_('John' in resp.data)
ok_('Michael' not in resp.data)
ok_('Ron' not in resp.data)
ok_('Steve' in resp.data)
resp = client.get('/admin/person/?sort=-age')
ok_('John' not in resp.data)
ok_('Michael' in resp.data)
ok_('Ron' in resp.data)
ok_('Steve' not in resp.data)
def test_reference_linking():
app, admin = setup()
class Dog(Document):
name = StringField()
def __unicode__(self):
return self.name
class Person(Document):
name = StringField()
age = IntField()
pet = ReferenceField(Dog)
class DogAdmin(ModelAdmin):
pass
class PersonAdmin(ModelAdmin):
list_display = ('name', 'age', 'pet')
fields = ('name', 'age', 'pet')
readonly_fields = ('pet',)
Dog.drop_collection()
Person.drop_collection()
dog = Dog.objects.create(name='Sparky')
person = Person.objects.create(name='Stan', age=10, pet=dog)
admin.register(Dog, DogAdmin, name='Dogs')
admin.register(Person, PersonAdmin, name='People')
client = app.test_client()
# test linking on a list page
resp = client.get('/admin/person/')
dog_link = '<a href="/admin/dog/%s/">Sparky</a>' % dog.pk
ok_(dog_link in resp.data)
# test linking on an edit page
resp = client.get('/admin/person/%s/' % person.pk)
ok_('<textarea class="" id="name" name="name">Stan</textarea>' in resp.data)
ok_('<input class="" id="age" name="age" type="text" value="10">' in resp.data)
ok_(dog_link in resp.data)
def test_no_csrf_in_form():
app, admin = setup()
class Person(Document):
name = StringField()
age = IntField()
Person.drop_collection()
person = Person.objects.create(name='Eric', age=10)
client = app.test_client()
view = CustomModelView(Person)
admin.add_view(view)
resp = client.get('/admin/person/%s/' % person.pk)
ok_('<textarea class="" id="name" name="name">Eric</textarea>' in resp.data)
ok_('<input class="" id="age" name="age" type="text" value="10">' in resp.data)
ok_('<label for="csrf_token">Csrf Token</label>' not in resp.data)
def test_requred_int_field():
app, admin = setup()
class Person(Document):
name = StringField()
age = IntField(required=True)
Person.drop_collection()
view = CustomModelView(Person)
admin.add_view(view)
client = app.test_client()
resp = client.post('/admin/person/add/', data=dict(name='name', age='0'))
eq_(resp.status_code, 302)
ok_('This field is required.' not in resp.data)
ok_('error.' not in resp.data)
| syrusakbary/Flask-SuperAdmin | flask_superadmin/tests/test_mongoengine.py | Python | bsd-3-clause | 9,707 | 0.001751 |
#!/usr/bin/python
#
# Simple indication receiver using Twisted Python. HTTP post requests
# are listened for on port 5988 and port 5899 using SSL.
#
# Requires Twisted Python and
#
import sys
import optparse
import pywbem
from twisted.internet import reactor
from twisted.web import server, resource
global conn
conn=None
class WBEMConn:
_shared_state = {}
conn = None
def __init__(self, options=None):
# Borgness
self.__dict__ = WBEMConn._shared_state
self.conn = pywbem.SFCBUDSConnection()
'''
if options:
proto = 'http'
if options.secure:
proto = 'https'
url = '%s://%s' % (proto, options.host)
self.conn = pywbem.WBEMConnection(
url,
(options.user, options.password),
default_namespace = options.namespace)
'''
global conn
conn = self.conn
class CIMOM(resource.Resource):
isLeaf = 1
def render_POST(self, request):
for line in request.content.readlines():
print(line)
return ''
from OpenSSL import SSL
class ServerContextFactory:
def getContext(self):
"""Create an SSL context with a dodgy certificate."""
ctx = SSL.Context(SSL.SSLv23_METHOD)
ctx.use_certificate_file('server.pem')
ctx.use_privatekey_file('server.pem')
return ctx
from twisted.internet import ssl, reactor
from twisted.python import log
from socket import getfqdn
import time
def _createFilter(query,
ns,
querylang='WQL',
src_ns='root/cimv2',
in_name=None):
name = in_name or 'cimfilter%s'%time.time()
filterinst=pywbem.CIMInstance('CIM_IndicationFilter')
filterinst['CreationClassName']='CIM_IndicationFilter'
filterinst['SystemCreationClassName']='CIM_ComputerSystem'
filterinst['SystemName']=getfqdn()
filterinst['Name']=name
filterinst['Query']=query
filterinst['QueryLanguage']=querylang
filterinst['SourceNamespace']=src_ns
cop = pywbem.CIMInstanceName('CIM_IndicationFilter')
cop.keybindings = { 'CreationClassName':'CIM_IndicationFilter',
'SystemClassName':'CIM_ComputerSystem',
'SystemName':getfqdn(),
'Name':name }
cop.namespace=ns
filterinst.path = cop
filtercop = conn.CreateInstance(filterinst)
return filtercop
def _createDest(destination,
ns,
in_name=None):
name = in_name or 'cimlistener%s'%time.time()
destinst=pywbem.CIMInstance('CIM_ListenerDestinationCIMXML')
destinst['CreationClassName']='CIM_ListenerDestinationCIMXML'
destinst['SystemCreationClassName']='CIM_ComputerSystem'
destinst['SystemName']=getfqdn()
print("destname=%s" % name)
destinst['Name']=name
destinst['Destination']=destination
cop = pywbem.CIMInstanceName('CIM_ListenerDestinationCIMXML')
cop.keybindings = { 'CreationClassName':'CIM_ListenerDestinationCIMXML',
'SystemClassName':'CIM_ComputerSystem',
'SystemName':getfqdn(),
'Name':name }
cop.namespace=ns
destinst.path = cop
destcop = conn.CreateInstance(destinst)
return destcop
def _createSubscription(ns,
handler,
indfilter):
subinst=pywbem.CIMInstance('CIM_IndicationSubscription')
subinst['Filter']=indfilter
subinst['Handler']=indhandler
cop = pywbem.CIMInstanceName('CIM_IndicationSubscription')
cop.keybindings = { 'Filter':indfilter,
'Handler':indhandler }
cop.namespace=ns
subinst.path = cop
subcop = conn.CreateInstance(subinst)
return subcop
if __name__ == '__main__':
global conn
parser = optparse.OptionParser()
parser.add_option('--level',
'-l',
action='store',
type='int',
dest='dbglevel',
help='Indicate the level of debugging statements to display (default=2)',
default=2)
parser.add_option('-s', '--UDS', help="Use the SFCBUDSConnection to the cimom", default=False )
parser.add_option('-u', '--url', default='https://localhost',
help='Specify the url of the CIMOM (default=https://localhost)')
parser.add_option('-n', '--namespace', default='root/interop',
help='Specify the namespace the test runs against (default=root/interop)')
parser.add_option('', '--user', default='pegasus',
help='Specify the user name used when connection to the CIMOM (default=pegasus)')
parser.add_option('', '--password', default='',
help='Specify the password for the user (default=<empty>)')
parser.add_option('--verbose', '', action='store_true', default=False,
help='Show verbose output')
parser.add_option('-q', '--query', help='Query string for Filter')
parser.add_option('-g', '--qlang', help='Query Language (default=WQL)', default="WQL")
parser.add_option('-d', '--dest', help='Destination for the CIM_ListenerDestination')
parser.add_option('-p', '--provider', help='Name of provider to setup listener for')
options, arguments = parser.parse_args()
conn = WBEMConn().conn
indhandler=None
indfilter=None
indsub=None
try:
indhandler = _createDest(options.dest, options.namespace)
indfilter = _createFilter(options.query, options.namespace, querylang=options.qlang)
indsub = _createSubscription(options.namespace, indhandler, indfilter)
log.startLogging(sys.stdout)
site = server.Site(CIMOM())
reactor.listenTCP(5998, site)
reactor.listenSSL(5999, site, ServerContextFactory())
reactor.run()
finally:
if indsub:
conn.DeleteInstance(indsub)
if indfilter:
conn.DeleteInstance(indfilter)
if indhandler:
conn.DeleteInstance(indhandler)
| zenoss/pywbem | attic/irecv/pycimlistener.py | Python | lgpl-2.1 | 6,068 | 0.012525 |
import pytest
import responses
from urlobject import URLObject
from flask import Flask
from flask_dance.contrib.github import make_github_blueprint, github
from flask_dance.consumer import OAuth2ConsumerBlueprint
from flask_dance.consumer.storage import MemoryStorage
@pytest.fixture
def make_app():
"A callable to create a Flask app with the GitHub provider"
def _make_app(*args, **kwargs):
app = Flask(__name__)
app.secret_key = "whatever"
blueprint = make_github_blueprint(*args, **kwargs)
app.register_blueprint(blueprint)
return app
return _make_app
def test_blueprint_factory():
github_bp = make_github_blueprint(
client_id="foo", client_secret="bar", scope="user:email", redirect_to="index"
)
assert isinstance(github_bp, OAuth2ConsumerBlueprint)
assert github_bp.session.scope == "user:email"
assert github_bp.session.base_url == "https://api.github.com/"
assert github_bp.session.client_id == "foo"
assert github_bp.client_secret == "bar"
assert github_bp.authorization_url == "https://github.com/login/oauth/authorize"
assert github_bp.token_url == "https://github.com/login/oauth/access_token"
def test_load_from_config(make_app):
app = make_app()
app.config["GITHUB_OAUTH_CLIENT_ID"] = "foo"
app.config["GITHUB_OAUTH_CLIENT_SECRET"] = "bar"
resp = app.test_client().get("/github")
url = resp.headers["Location"]
client_id = URLObject(url).query.dict.get("client_id")
assert client_id == "foo"
@responses.activate
def test_context_local(make_app):
responses.add(responses.GET, "https://google.com")
# set up two apps with two different set of auth tokens
app1 = make_app(
"foo1",
"bar1",
redirect_to="url1",
storage=MemoryStorage({"access_token": "app1"}),
)
app2 = make_app(
"foo2",
"bar2",
redirect_to="url2",
storage=MemoryStorage({"access_token": "app2"}),
)
# outside of a request context, referencing functions on the `github` object
# will raise an exception
with pytest.raises(RuntimeError):
github.get("https://google.com")
# inside of a request context, `github` should be a proxy to the correct
# blueprint session
with app1.test_request_context("/"):
app1.preprocess_request()
github.get("https://google.com")
request = responses.calls[0].request
assert request.headers["Authorization"] == "Bearer app1"
with app2.test_request_context("/"):
app2.preprocess_request()
github.get("https://google.com")
request = responses.calls[1].request
assert request.headers["Authorization"] == "Bearer app2"
| singingwolfboy/flask-dance | tests/contrib/test_github.py | Python | mit | 2,744 | 0.001093 |
"""
Created on 1 May 2019
@author: Bruno Beloff (bruno.beloff@southcoastscience.com)
https://www.sensirion.com/en/environmental-sensors/particulate-matter-sensors-pm25/
https://bytes.com/topic/python/answers/171354-struct-ieee-754-internal-representation
Firmware report:
89667EE8A8B34BC0
"""
import time
from scs_core.data.datetime import LocalizedDatetime
from scs_core.data.datum import Decode, Encode
from scs_core.particulate.sps_datum import SPSDatum, SPSDatumCounts
from scs_dfe.particulate.opc import OPC
from scs_host.bus.i2c import I2C
# --------------------------------------------------------------------------------------------------------------------
class SPS30(OPC):
"""
classdocs
"""
SOURCE = 'S30'
MIN_SAMPLE_PERIOD = 1.0 # seconds
MAX_SAMPLE_PERIOD = 10.0 # seconds
DEFAULT_SAMPLE_PERIOD = 10.0 # seconds
DEFAULT_ADDR = 0x69
# ----------------------------------------------------------------------------------------------------------------
__BOOT_TIME = 4.0 # seconds
__POWER_CYCLE_TIME = 2.0 # seconds
__FAN_START_TIME = 2.0 # seconds
__FAN_STOP_TIME = 2.0 # seconds
__CLEANING_TIME = 10.0 # seconds
__MAX_PERMITTED_ZERO_READINGS = 4
__CMD_START_MEASUREMENT = 0x0010
__CMD_STOP_MEASUREMENT = 0x0104
__CMD_READ_DATA_READY_FLAG = 0x0202
__CMD_READ_MEASURED_VALUES = 0x0300
__CMD_AUTO_CLEANING_INTERVAL = 0x8004
__CMD_START_FAN_CLEANING = 0x5607
__CMD_READ_ARTICLE_CODE = 0xd025
__CMD_READ_SERIAL_NUMBER = 0xd033
__CMD_RESET = 0xd304
__POST_WRITE_DELAY = 0.020 # seconds
__LOCK_TIMEOUT = 2.0
# ----------------------------------------------------------------------------------------------------------------
@classmethod
def source(cls):
return cls.SOURCE
@classmethod
def uses_spi(cls):
return False
@classmethod
def datum_class(cls):
return SPSDatum
# ----------------------------------------------------------------------------------------------------------------
@classmethod
def __decode(cls, chars):
decoded = []
for i in range(0, len(chars), 3):
group = chars[i:i + 2]
decoded.extend(group)
actual_crc = chars[i + 2]
required_crc = cls.__crc(group)
if actual_crc != required_crc:
raise ValueError("bad checksum: required: 0x%02x actual: 0x%02x" % (required_crc, actual_crc))
return decoded
@classmethod
def __encode(cls, chars):
encoded = []
for i in range(0, len(chars), 2):
group = chars[i:i + 2]
encoded.extend(group)
encoded.append(cls.__crc(group))
return encoded
@staticmethod
def __crc(data):
crc = 0xff
for datum in data:
crc ^= datum
for bit in range(8, 0, -1):
crc = ((crc << 1) ^ 0x31 if crc & 0x80 else (crc << 1)) & 0xff
return crc
# ----------------------------------------------------------------------------------------------------------------
@classmethod
def lock_timeout(cls):
return cls.__LOCK_TIMEOUT
@classmethod
def boot_time(cls):
return cls.__BOOT_TIME
@classmethod
def power_cycle_time(cls):
return cls.__POWER_CYCLE_TIME
@classmethod
def max_permitted_zero_readings(cls):
return cls.__MAX_PERMITTED_ZERO_READINGS
# ----------------------------------------------------------------------------------------------------------------
def __init__(self, interface, i2c_bus, i2c_addr):
"""
Constructor
"""
super().__init__(interface)
self.__i2c_bus = i2c_bus
self.__i2c_addr = i2c_addr
# ----------------------------------------------------------------------------------------------------------------
def operations_on(self):
self.__write(self.__CMD_START_MEASUREMENT, self.__FAN_START_TIME, 0x03, 0x00)
def operations_off(self):
self.__read(self.__CMD_STOP_MEASUREMENT, self.__FAN_STOP_TIME)
def reset(self):
self.__read(self.__CMD_RESET, self.__BOOT_TIME)
# ----------------------------------------------------------------------------------------------------------------
def clean(self):
self.__read(self.__CMD_START_FAN_CLEANING, self.__CLEANING_TIME)
@property
def cleaning_interval(self):
r = self.__read(self.__CMD_AUTO_CLEANING_INTERVAL, 0, 6)
interval = Decode.unsigned_long(r[0:4], '>')
return interval
@cleaning_interval.setter
def cleaning_interval(self, interval):
values = Encode.unsigned_long(interval, '>')
self.__write(self.__CMD_AUTO_CLEANING_INTERVAL, self.__POST_WRITE_DELAY, *values)
# ----------------------------------------------------------------------------------------------------------------
def data_ready(self):
chars = self.__read(self.__CMD_READ_DATA_READY_FLAG, 0, 3)
return chars[1] == 0x01
def sample(self):
r = self.__read(self.__CMD_READ_MEASURED_VALUES, 0, 60)
# density...
pm1 = Decode.float(r[0:4], '>')
pm2p5 = Decode.float(r[4:8], '>')
pm4 = Decode.float(r[8:12], '>')
pm10 = Decode.float(r[12:16], '>')
# count...
pm0p5_count = Decode.float(r[16:20], '>')
pm1_count = Decode.float(r[20:24], '>')
pm2p5_count = Decode.float(r[24:28], '>')
pm4_count = Decode.float(r[28:32], '>')
pm10_count = Decode.float(r[32:36], '>')
# typical size...
tps = Decode.float(r[36:40], '>')
# time...
rec = LocalizedDatetime.now().utc()
# report...
counts = SPSDatumCounts(pm0p5_count, pm1_count, pm2p5_count, pm4_count, pm10_count)
return SPSDatum(self.SOURCE, rec, pm1, pm2p5, pm4, pm10, counts, tps)
# ----------------------------------------------------------------------------------------------------------------
def version(self):
r = self.__read(self.__CMD_READ_ARTICLE_CODE, 0, 48)
version = ''.join(chr(byte) for byte in r)
return version
def serial_no(self):
r = self.__read(self.__CMD_READ_SERIAL_NUMBER, 0, 48)
serial_no = ''.join(chr(byte) for byte in r)
return serial_no
def firmware(self):
return self.serial_no()
# ----------------------------------------------------------------------------------------------------------------
def get_firmware_conf(self):
raise NotImplementedError
def set_firmware_conf(self, jdict):
raise NotImplementedError
def commit_firmware_conf(self):
raise NotImplementedError
# ----------------------------------------------------------------------------------------------------------------
@property
def bus(self):
return self.__i2c_bus
@property
def address(self):
return self.__i2c_addr
# ----------------------------------------------------------------------------------------------------------------
@property
def lock_name(self):
return self.__class__.__name__ + '-' + str(self.__i2c_bus) + '-' + ("0x%02x" % self.__i2c_addr)
# ----------------------------------------------------------------------------------------------------------------
def __read(self, command, wait, count=0):
try:
self.obtain_lock()
try:
I2C.Sensors.start_tx(self.__i2c_addr)
encoded = I2C.Sensors.read_cmd16(command, count)
values = self.__decode(encoded)
finally:
I2C.Sensors.end_tx()
time.sleep(wait)
return values
finally:
self.release_lock()
def __write(self, command, wait, *values):
try:
self.obtain_lock()
try:
I2C.Sensors.start_tx(self.__i2c_addr)
encoded = self.__encode(values)
I2C.Sensors.write_addr16(command, *encoded)
finally:
I2C.Sensors.end_tx()
time.sleep(wait)
finally:
self.release_lock()
# ----------------------------------------------------------------------------------------------------------------
def __str__(self, *args, **kwargs):
return "SPS30:{interface:%s, i2c_bus:%d i2c_addr:0x%02x}" % \
(self.interface, self.__i2c_bus, self.__i2c_addr)
| south-coast-science/scs_dfe_eng | src/scs_dfe/particulate/sps_30/sps_30.py | Python | mit | 8,991 | 0.008008 |
#!/usr/bin/env python
"""
Display the planar concentration and velocity fields of a RMPCDMD simulation.
"""
import argparse
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('file', help="H5MD file")
parser.add_argument('--species', type=int, default=0)
args = parser.parse_args()
import h5py
import matplotlib.pyplot as plt
import numpy as np
with h5py.File(args.file, 'r') as f:
c = f['fields/planar_concentration']
v = f['fields/planar_velocity']
x_min = c.attrs['x_min'][()]
dx = c.attrs['dx'][()]
y_min = c.attrs['y_min'][()]
dy = c.attrs['dy'][()]
thickness = c.attrs['thickness'][()]
c = c[:]
v = v[:]
N_x, N_y = c.shape[:2]
# x and y must overshoot c.shape by one for pcolormesh
x = x_min + np.arange(N_x+1)*dx
y = y_min + np.arange(N_y+1)*dy
c /= dx*dy*thickness
plt.subplot(121, aspect=1)
plt.pcolormesh(x, y, c[:,:,args.species].T, cmap=plt.cm.viridis)
plt.colorbar()
plt.subplot(122, aspect=1)
x, y = np.meshgrid(x[:-1], y[:-1])
plt.quiver(x, y, v[:,:,args.species,0].T, v[:,:,args.species,1].T)
plt.show()
| pdebuyl-lab/RMPCDMD | experiments/03-single-janus/plot_planar.py | Python | bsd-3-clause | 1,110 | 0.00991 |
#!/usr/bin/env python
"""
A basic demo of pandas
"""
from pandas import DataFrame
df = DataFrame(["a", "b", "c"], index=[("0", "1"), ("1", "2"), ("2", "3")])
print(df.get_values())
try:
print(df.ix[("0", "1")])
except:
print("yes, accessing .ix with tuple does not work")
print(df.xs(("0", "1")))
| veltzer/demos-python | src/examples/short/pandas/tuples_as_indices.py | Python | gpl-3.0 | 308 | 0.003247 |
# -*- encoding: utf-8 -*-
#
# Copyright © 2017 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import logging
import time
import github
import requests
import six.moves
LOG = logging.getLogger(__name__)
TRAVIS_BASE_URL = 'https://api.travis-ci.org'
TRAVIS_V2_HEADERS = {"Accept": "application/vnd.travis-ci.2+json",
"User-Agent": "Pastamaker/1.0.0"}
UNUSABLE_STATES = ["unknown", None]
def ensure_mergable_state(pull):
if pull.is_merged() or pull.mergeable_state not in UNUSABLE_STATES:
return pull
# Github is currently processing this PR, we wait the completion
for i in range(0, 5):
LOG.info("%s, refreshing...", pull.pretty())
pull.update()
if pull.is_merged() or pull.mergeable_state not in UNUSABLE_STATES:
break
time.sleep(0.42) # you known, this one always work
return pull
def compute_travis_detail(pull, **extra):
if (not pull.pastamaker["travis_url"] or
pull.pastamaker["travis_url"] == "#"):
return None
build_id = pull.pastamaker["travis_url"].split("?")[0].split("/")[-1]
r = requests.get(TRAVIS_BASE_URL + "/builds/" + build_id,
headers=TRAVIS_V2_HEADERS)
if r.status_code != 200:
return None
build = r.json()["build"]
build["resume_state"] = pull.pastamaker["travis_state"]
build["jobs"] = []
for job_id in build["job_ids"]:
r = requests.get(TRAVIS_BASE_URL + "/jobs/%s" % job_id,
headers=TRAVIS_V2_HEADERS)
if r.status_code == 200:
job = r.json()["job"]
job["log_url"] = TRAVIS_BASE_URL + "/jobs/%s/log" % job_id
LOG.debug("%s: job %s %s -> %s" % (pull.pretty(), job_id,
job["state"],
job["log_url"]))
build["jobs"].append(job)
if (pull.pastamaker["travis_state"] == "pending" and
job["state"] == "started"):
build["resume_state"] = "working"
LOG.debug("%s: build %s %s/%s" % (pull.pretty(), build_id,
build["state"],
build["resume_state"]))
return build
def compute_approvals(pull, **extra):
users_info = {}
reviews_ok = set()
reviews_ko = set()
for review in pull.pastamaker["reviews"]:
if review.user.id not in extra["collaborators"]:
continue
users_info[review.user.login] = review.user.raw_data
if review.state == 'APPROVED':
reviews_ok.add(review.user.login)
if review.user.login in reviews_ko:
reviews_ko.remove(review.user.login)
elif review.state in ["DISMISSED", "CHANGES_REQUESTED"]:
if review.user.login in reviews_ok:
reviews_ok.remove(review.user.login)
if review.user.login in reviews_ko:
reviews_ko.remove(review.user.login)
if review.state == "CHANGES_REQUESTED":
reviews_ko.add(review.user.login)
elif review.state == 'COMMENTED':
pass
else:
LOG.error("%s FIXME review state unhandled: %s",
pull.pretty(), review.state)
try:
required = extra["branch_policy"][
"required_pull_request_reviews"]["required_approving_review_count"]
except KeyError:
return [], [], 1, 1
# FIXME(sileht): Compute the thing on JS side
remaining = list(six.moves.range(max(0, required - len(reviews_ok))))
return ([users_info[u] for u in reviews_ok],
[users_info[u] for u in reviews_ko],
required, remaining)
def compute_combined_status(pull, **extra):
commit = pull.base.repo.get_commit(pull.head.sha)
status = commit.get_combined_status()
return status.state
def compute_ci_statuses(pull, **extra):
# We need only travis, so shorcut to it here
if "travis" in extra:
raw_statuses = [extra["travis"]]
else:
# NOTE(sileht): Statuses are returned in reverse chronological order.
# The first status in the list will be the latest one.
commit = pull.base.repo.get_commit(pull.head.sha)
raw_statuses = [s.raw_data
for s in reversed(list(commit.get_statuses()))]
statuses = {}
for s in raw_statuses:
statuses[s["context"]] = {"state": s["state"], "url": s["target_url"]}
return statuses
def compute_approved(pull, **extra):
approved = len(pull.pastamaker["approvals"][0])
requested_changes = len(pull.pastamaker['approvals'][1])
required = pull.pastamaker['approvals'][2]
if requested_changes != 0:
return False
else:
return approved >= required
def compute_travis_state(pull, **extra):
return pull.pastamaker["ci_statuses"].get(
"continuous-integration/travis-ci/pr", {"state": "unknown"}
)["state"]
def compute_travis_url(pull, **extra):
return pull.pastamaker["ci_statuses"].get(
"continuous-integration/travis-ci/pr", {"url": "#"}
)["url"]
def compute_weight(pull, **extra):
if not pull.pastamaker["approved"]:
weight = -1
elif (pull.mergeable_state == "clean"
and pull.pastamaker["combined_status"] == "success"):
# Best PR ever, up2date and CI OK
weight = 11
elif pull.mergeable_state in ["clean", "unstable"]:
weight = 10
elif (pull.mergeable_state == "blocked"
and pull.pastamaker["combined_status"] == "pending"):
# Maybe clean soon, or maybe this is the previous run
# selected PR that we just rebase
weight = 10
elif pull.mergeable_state == "behind":
# Not up2date, but ready to merge, is branch updatable
if not pull.maintainer_can_modify:
weight = -1
elif pull.pastamaker["combined_status"] == "success":
weight = 7
elif pull.pastamaker["combined_status"] == "pending":
weight = 5
else:
weight = -1
else:
weight = -1
if weight >= 0 and pull.milestone is not None:
weight += 1
# LOG.info("%s prio: %s, %s, %s, %s, %s", pull.pretty(), weight,
# pull.pastamaker["approved"], pull.mergeable_state,
# pull.pastamaker["combined_status"])
return weight
# Order matter, some method need result of some other
FULLIFIER = [
("commits", lambda p, **extra: list(p.get_commits())),
("reviews", lambda p, **extra: list(p.get_reviews())),
("combined_status", compute_combined_status),
("approvals", compute_approvals), # Need reviews
("approved", compute_approved), # Need approvals
("ci_statuses", compute_ci_statuses), # Need approvals
("travis_state", compute_travis_state), # Need ci_statuses
("travis_url", compute_travis_url), # Need ci_statuses
("travis_detail", compute_travis_detail), # Need travis_url
("weight", compute_weight), # Need approved, travis_state
]
CACHE_HOOK_LIST_CONVERT = {
"commits": github.Commit.Commit,
"reviews": github.PullRequestReview.PullRequestReview,
}
def jsonify(pull):
raw = copy.copy(pull.raw_data)
for key, method in FULLIFIER:
value = pull.pastamaker[key]
if key in CACHE_HOOK_LIST_CONVERT:
try:
value = [item.raw_data for item in value]
except AttributeError:
LOG.exception("%s, fail to cache %s: %s",
pull.pretty(), key, value)
raw["pastamaker_%s" % key] = value
return raw
def fullify(pull, cache=None, **extra):
LOG.debug("%s, fullifing...", pull.pretty())
if not hasattr(pull, "pastamaker"):
pull.pastamaker = {}
pull = ensure_mergable_state(pull)
for key, method in FULLIFIER:
if key not in pull.pastamaker:
if cache and "pastamaker_%s" % key in cache:
value = cache["pastamaker_%s" % key]
klass = CACHE_HOOK_LIST_CONVERT.get(key)
if klass:
value = [klass(pull.base.repo._requester, {}, item,
completed=True) for item in value]
elif key == "raw_data":
value = method(pull, **extra)
else:
start = time.time()
LOG.info("%s, compute %s" % (pull.pretty(), key))
value = method(pull, **extra)
LOG.debug("%s, %s computed in %s sec" % (
pull.pretty(), key, time.time() - start))
pull.pastamaker[key] = value
LOG.debug("%s, fullified", pull.pretty())
return pull
| sileht/pastamaker | pastamaker/gh_pr_fullifier.py | Python | apache-2.0 | 9,306 | 0 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.utils.timezone
from django.conf import settings
import uuid
class Migration(migrations.Migration):
dependencies = [
('ISS', '0011_poster_timezone'),
]
operations = [
migrations.CreateModel(
name='PrivateMessage',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('chain', models.UUIDField(default=uuid.uuid4, editable=False)),
('created', models.DateTimeField(default=django.utils.timezone.now)),
('subject', models.CharField(max_length=256)),
('content', models.TextField()),
('receiver', models.ForeignKey(related_name='pms_received', to=settings.AUTH_USER_MODEL)),
('sender', models.ForeignKey(related_name='pms_sent', to=settings.AUTH_USER_MODEL)),
],
),
]
| RyanJenkins/ISS | ISS/migrations/0012_privatemessage.py | Python | gpl-3.0 | 1,032 | 0.004845 |
"""fix stock again (17.03.16)
Revision ID: 3b3de4db8006
Revises: 1b434f6a7b5
Create Date: 2016-03-17 22:02:55.090285
"""
# revision identifiers, used by Alembic.
revision = '3b3de4db8006'
down_revision = '1b434f6a7b5'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
op.execute('DROP VIEW pol_stock_latest;')
op.execute('''
CREATE VIEW pol_stock_latest as
SELECT DISTINCT ON (pol_beer_id, shop_id) shop_id, pol_beer_id, stock, updated
FROM pol_stock
ORDER BY pol_beer_id, shop_id, updated DESC;
''')
def downgrade():
pass
| atlefren/beerdatabase | alembic/versions/3b3de4db8006_fix_stock_again_17_03_16.py | Python | mit | 642 | 0.004673 |
import xmltodict
def parseData(data):
try:
return xmltodict.parse(data)
except:
if len(data.split()) is 0:
return None
else:
raise Exception('Invalid XML data', data)
| ColinKeigher/McAfeeWebGateway | mwg/parse.py | Python | gpl-2.0 | 236 | 0.012712 |
import os
import shutil
import subprocess
import sys
import tempfile
import threading
_g_failed = []
def this_location():
return os.path.abspath(os.path.dirname(__file__))
def checkenv(sd_license, release, ssh_key_path):
required_vars = ['AWS_ACCESS_KEY_ID',
'AWS_SECRET_ACCESS_KEY',
'GOPATH']
for k in required_vars:
v = os.getenv(k)
if v is None:
raise Exception("The environment variable %s must be set" % k)
p = subprocess.Popen("docker ps", shell=True)
rc = p.wait()
if rc != 0:
raise Exception("The docker environment is not configured")
file_list = [sd_license, release, ssh_key_path]
for f in file_list:
if not os.path.exists(f):
raise Exception("The file %s does not exist" % f)
os.unsetenv('STARDOG_ADMIN_PASSWORD')
def build_with_gox():
base_dir = os.path.dirname(this_location())
cmd = 'gox -osarch="linux/amd64" -osarch="darwin/amd64" ' \
'-output=release/{{.OS}}_{{.Arch}}/stardog-graviton '\
'github.com/stardog-union/stardog-graviton/cmd/stardog-graviton'
p = subprocess.Popen(cmd, shell=True, cwd=base_dir)
rc = p.wait()
if rc != 0:
raise Exception("Failed to cross compile graviton")
if not os.path.exists(os.path.join(this_location(), "linux_amd64", "stardog-graviton")):
raise Exception("The linux compile failed")
if not os.path.exists(os.path.join(this_location(), "darwin_amd64",
"stardog-graviton")):
raise Exception("The osx compile failed")
def prep_run(sd_license, release, grav_exe, ssh_key_path):
src_dir = this_location()
work_dir = tempfile.mkdtemp(prefix="graviton",
dir=os.path.abspath(os.path.dirname(__file__)))
try:
files_to_join_and_copy = ['rows.rdf', 'smoke_test_1.py']
for f in files_to_join_and_copy:
shutil.copy(os.path.join(src_dir, f),
os.path.join(work_dir, f))
shutil.copy(sd_license,
os.path.join(work_dir, "stardog-license-key.bin"))
shutil.copy(release,
os.path.join(work_dir, os.path.basename(release)))
shutil.copy(grav_exe,
os.path.join(work_dir, "stardog-graviton"))
shutil.copy(ssh_key_path,
os.path.join(work_dir, "ssh_key"))
return work_dir
finally:
pass
def run_local(work_dir, ssh_key_name, release):
print("Running in %s" % work_dir)
cmd = "python %s %s %s %s %s" % (
os.path.join(work_dir, "smoke_test_1.py"),
work_dir, release, ssh_key_name, os.path.dirname(this_location()))
print("Running %s" % cmd)
p = subprocess.Popen(cmd, shell=True, cwd=work_dir)
rc = p.wait()
if rc != 0:
raise Exception("Failed to run the smoke test")
print ("XXX Local run was successful")
def build_docker(image_name):
print("Building the docker container")
cmd = "docker build -t %s . --no-cache" % image_name
p = subprocess.Popen(cmd, shell=True, cwd=this_location())
rc = p.wait()
if rc != 0:
raise Exception("Failed build the container")
def compile_linux(image_name):
print("Compiling in a docker container")
top_dir = os.path.join(this_location(), "..")
try:
os.makedirs(os.path.join(this_location(), "release", "linux_amd64"))
except:
pass
internal_gopath = "/opt/go/src/"
docker_cmd = "/usr/lib/go-1.10/bin/go build -o %s/src/github.com/stardog-union/stardog-graviton/release/linux_amd64/stardog-graviton github.com/stardog-union/stardog-graviton/cmd/stardog-graviton" % internal_gopath
cmd = "docker run -e GOPATH=%s -v %s:%s/src/github.com/stardog-union/stardog-graviton -it %s %s" % (internal_gopath, top_dir, internal_gopath, image_name, docker_cmd)
print(cmd)
p = subprocess.Popen(cmd, shell=True, cwd=this_location())
rc = p.wait()
if rc != 0:
raise Exception("Failed build the container")
def run_docker(work_dir, ssh_key_name, release, image_name):
print("Running docker for testing...")
cmd = "docker run -v %s:/smoke " \
"-e AWS_SECRET_ACCESS_KEY=%s " \
"-e AWS_ACCESS_KEY_ID=%s " \
"-it %s " \
"python /smoke/smoke_test_1.py /smoke %s %s" %\
(work_dir,
os.environ['AWS_SECRET_ACCESS_KEY'],
os.environ['AWS_ACCESS_KEY_ID'],
image_name, release, ssh_key_name)
p = subprocess.Popen(cmd, shell=True, cwd=work_dir)
rc = p.wait()
if rc != 0:
raise Exception("Failed to run the smoke tests in the container")
def print_usage():
print("Invalid arguments:")
print("<path to stardog license> <path to stardog release file>"
" <path to ssh private key> <aws key name>")
def get_version():
cmd = "git describe --abbrev=0 --tags"
work_dir = os.path.dirname(this_location())
p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, cwd=work_dir)
(o, e) = p.communicate()
rc = p.wait()
if rc != 0:
raise Exception("Failed to zip the file")
return o.strip()
def zip_one(arch):
ver = get_version()
work_dir = os.path.join(this_location(), arch)
cmd = "zip stardog-graviton_%s_%s.zip stardog-graviton" % (ver, arch)
p = subprocess.Popen(cmd, shell=True, cwd=work_dir)
rc = p.wait()
if rc != 0:
raise Exception("Failed to zip the file")
def darwin_test(sd_license, release, ssh_key_path, ssh_key_name):
try:
darwin_binary = os.path.join(this_location(),
"darwin_amd64", "stardog-graviton")
release_name = os.path.basename(release)
work_dir = prep_run(sd_license, release, darwin_binary, ssh_key_path)
run_local(work_dir, ssh_key_name, release_name)
print("Successfully smoke tested for darwin.")
print("Exe: darwin_amd64/stardog-graviton")
except Exception as ex:
global _g_failed
_g_failed.append("Darwin failed: %s" % str(ex))
print("TEST ERROR darwin %s" % str(ex))
zip_one("darwin_amd64")
def linux_test(sd_license, release, ssh_key_path, ssh_key_name):
try:
build_docker("graviton-release-tester")
compile_linux("graviton-release-tester")
linux_binary = os.path.join(this_location(),
"linux_amd64", "stardog-graviton")
release_name = os.path.basename(release)
work_dir = prep_run(sd_license, release, linux_binary, ssh_key_path)
run_docker(work_dir, ssh_key_name, release_name, "graviton-release-tester")
print("Successfully smoke tested for darwin.")
print("Exe: linux_amd64/stardog-graviton")
except Exception as ex:
global _g_failed
_g_failed.append("Linus failed: %s" % str(ex))
print("TEST ERROR linux %s" % str(ex))
zip_one("linux_amd64")
def main():
if len(sys.argv) < 4:
print_usage()
return 1
sd_license = sys.argv[1]
release = sys.argv[2]
ssh_key_path = sys.argv[3]
ssh_key_name = sys.argv[4]
checkenv(sd_license, release, ssh_key_path)
build_with_gox()
threads = []
if sys.platform != "darwin":
print("XXXXXX We cannot test of OSX on this platform")
else:
t = threading.Thread(
target=darwin_test,
args=(sd_license, release, ssh_key_path, ssh_key_name))
threads.append(t)
t.start()
t = threading.Thread(
target=linux_test,
args=(sd_license, release, ssh_key_path, ssh_key_name))
threads.append(t)
t.start()
print("Started %d tests, waiting for completion..." % len(threads))
for t in threads:
t.join()
if len(_g_failed) != 0:
print("The tests failed %s" % _g_failed)
return 1
print("Success!")
return 0
if __name__ == "__main__":
rc = main()
sys.exit(rc)
| stardog-union/stardog-graviton | release/release.py | Python | apache-2.0 | 8,041 | 0.000871 |
# Sends GET to local server
# Author: schdub
#!/usr/bin/python
import socket
import sys
def GET(host, path, port):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(0.30)
s.connect((host, port))
s.send("GET %s HTTP/1.0\r\n" % (path))
total_data = []
while True:
data = s.recv(8192)
if (len(data)>0):
total_data.append(data)
else:
break
print ''.join(total_data)
s.shutdown(1)
s.close()
GET(sys.argv[1], sys.argv[2], 3000) | MrOnlineCoder/shockd | scripts/get.py | Python | mit | 523 | 0.00956 |
#! /usr/bin/env python3
#
# importing_modules.py
#
# Author: Billy Wilson Arante
# Created: 2/24/2016 PHT
#
import fibo
def test():
"""Test cases."""
print('Example 1:')
fibo.fib(1000)
print('Example 2:')
print(fibo.fib1(1000))
print('Example 3:')
print(fibo.__name__)
# Assigning function a local name
fib = fibo.fib
print('Example 4:')
fib(1000)
if __name__ == '__main__':
test()
| arantebillywilson/python-snippets | py3/py344-tutor/ch06-modules/importing_modules.py | Python | mit | 444 | 0.004505 |
#!/usr/bin/env python3
#
# This file is part of the MicroPython project, http://micropython.org/
#
# The MIT License (MIT)
#
# Copyright (c) 2020 Damien P. George
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
This script is used to compute metrics, like code size, of the various ports.
Typical usage is:
$ ./tools/metrics.py build | tee size0
<wait for build to complete>
$ git switch new-feature-branch
$ ./tools/metrics.py build | tee size1
<wait for build to complete>
$ ./tools/metrics.py diff size0 size1
Other commands:
$ ./tools/metrics.py sizes # print all firmware sizes
$ ./tools/metrics.py clean # clean all ports
"""
import collections, sys, re, subprocess
MAKE_FLAGS = ["-j3", "CFLAGS_EXTRA=-DNDEBUG"]
class PortData:
def __init__(self, name, dir, output, make_flags=None):
self.name = name
self.dir = dir
self.output = output
self.make_flags = make_flags
self.needs_mpy_cross = dir not in ("bare-arm", "minimal")
port_data = {
"b": PortData("bare-arm", "bare-arm", "build/firmware.elf"),
"m": PortData("minimal x86", "minimal", "build/firmware.elf"),
"u": PortData("unix x64", "unix", "micropython"),
"n": PortData("unix nanbox", "unix", "micropython-nanbox", "VARIANT=nanbox"),
"s": PortData("stm32", "stm32", "build-PYBV10/firmware.elf", "BOARD=PYBV10"),
"c": PortData("cc3200", "cc3200", "build/WIPY/release/application.axf", "BTARGET=application"),
"8": PortData("esp8266", "esp8266", "build-GENERIC/firmware.elf"),
"3": PortData("esp32", "esp32", "build-GENERIC/application.elf"),
"r": PortData("nrf", "nrf", "build-pca10040/firmware.elf"),
"d": PortData("samd", "samd", "build-ADAFRUIT_ITSYBITSY_M4_EXPRESS/firmware.elf"),
}
def syscmd(*args):
sys.stdout.flush()
a2 = []
for a in args:
if isinstance(a, str):
a2.append(a)
elif a:
a2.extend(a)
subprocess.check_call(a2)
def parse_port_list(args):
if not args:
return list(port_data.values())
else:
ports = []
for arg in args:
for port_char in arg:
try:
ports.append(port_data[port_char])
except KeyError:
print("unknown port:", port_char)
sys.exit(1)
return ports
def read_build_log(filename):
data = collections.OrderedDict()
lines = []
found_sizes = False
with open(filename) as f:
for line in f:
line = line.strip()
if line.strip() == "COMPUTING SIZES":
found_sizes = True
elif found_sizes:
lines.append(line)
is_size_line = False
for line in lines:
if is_size_line:
fields = line.split()
data[fields[-1]] = [int(f) for f in fields[:-2]]
is_size_line = False
else:
is_size_line = line.startswith("text\t ")
return data
def do_diff(args):
"""Compute the difference between firmware sizes."""
# Parse arguments.
error_threshold = None
if len(args) >= 2 and args[0] == "--error-threshold":
args.pop(0)
error_threshold = int(args.pop(0))
if len(args) != 2:
print("usage: %s diff [--error-threshold <x>] <out1> <out2>" % sys.argv[0])
sys.exit(1)
data1 = read_build_log(args[0])
data2 = read_build_log(args[1])
max_delta = None
for key, value1 in data1.items():
value2 = data2[key]
for port in port_data.values():
if key == "ports/{}/{}".format(port.dir, port.output):
name = port.name
break
data = [v2 - v1 for v1, v2 in zip(value1, value2)]
warn = ""
board = re.search(r"/build-([A-Za-z0-9_]+)/", key)
if board:
board = board.group(1)
else:
board = ""
if name == "cc3200":
delta = data[0]
percent = 100 * delta / value1[0]
if data[1] != 0:
warn += " %+u(data)" % data[1]
else:
delta = data[3]
percent = 100 * delta / value1[3]
if data[1] != 0:
warn += " %+u(data)" % data[1]
if data[2] != 0:
warn += " %+u(bss)" % data[2]
if warn:
warn = "[incl%s]" % warn
print("%11s: %+5u %+.3f%% %s%s" % (name, delta, percent, board, warn))
max_delta = delta if max_delta is None else max(max_delta, delta)
if error_threshold is not None and max_delta is not None:
if max_delta > error_threshold:
sys.exit(1)
def do_clean(args):
"""Clean ports."""
ports = parse_port_list(args)
print("CLEANING")
for port in ports:
syscmd("make", "-C", "ports/{}".format(port.dir), port.make_flags, "clean")
def do_build(args):
"""Build ports and print firmware sizes."""
ports = parse_port_list(args)
if any(port.needs_mpy_cross for port in ports):
print("BUILDING MPY-CROSS")
syscmd("make", "-C", "mpy-cross", MAKE_FLAGS)
print("BUILDING PORTS")
for port in ports:
syscmd("make", "-C", "ports/{}".format(port.dir), MAKE_FLAGS, port.make_flags)
do_sizes(args)
def do_sizes(args):
"""Compute and print sizes of firmware."""
ports = parse_port_list(args)
print("COMPUTING SIZES")
for port in ports:
syscmd("size", "ports/{}/{}".format(port.dir, port.output))
def main():
# Get command to execute
if len(sys.argv) == 1:
print("Available commands:")
for cmd in globals():
if cmd.startswith("do_"):
print(" {:9} {}".format(cmd[3:], globals()[cmd].__doc__))
sys.exit(1)
cmd = sys.argv.pop(1)
# Dispatch to desired command
try:
cmd = globals()["do_{}".format(cmd)]
except KeyError:
print("{}: unknown command '{}'".format(sys.argv[0], cmd))
sys.exit(1)
cmd(sys.argv[1:])
if __name__ == "__main__":
main()
| kerneltask/micropython | tools/metrics.py | Python | mit | 7,086 | 0.001129 |
import asyncio
from aiohttp.web import Application
from Todos import handlers
def create_server(loop, handler, host, port):
srv = loop.create_server(handler, host, port)
return loop.run_until_complete(srv)
def create_app(loop):
app = Application(loop=loop)
handler = app.make_handler()
return app, handler
def run_server():
loop = asyncio.get_event_loop()
app, handler = create_app(loop=loop)
server = create_server(loop=loop, handler=handler,
host='0.0.0.0', port=9000)
app.router.add_route('GET', '/todos', handlers.get_all_todos)
app.router.add_route('POST', '/todos', handlers.create_todos)
app.router.add_route('PATCH', '/todos', handlers.update_todos)
app.router.add_route('DELETE', '/todos', handlers.remove_todos)
app.router.add_route('GET', '/todos/{id}', handlers.get_todo)
app.router.add_route('PATCH', '/todos/{id}', handlers.update_todo)
app.router.add_route('DELETE', '/todos/{id}', handlers.remove_todo)
try:
loop.run_forever()
except KeyboardInterrupt:
pass
finally:
server.close()
loop.run_until_complete(server.wait_closed())
loop.run_until_complete(handler.finish_connections(1.0))
loop.run_until_complete(app.finish())
loop.close()
if __name__ == '__main__':
run_server()
| alexeyraspopov/aiohttp-mongodb-example | server.py | Python | mit | 1,357 | 0 |
# This was pulled from one of the python/opencv sites appearing
# in a Google search. Need to find site and add attribution!
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# USAGE: You need to specify a filter and "only one" image source
#
# (python) range-detector --filter RGB --image /path/to/image.png
# or
# (python) range-detector --filter HSV --webcam
#!/usr/bin/env python
import cv2
import argparse
from operator import xor
def callback(value):
pass
def setup_trackbars(range_filter):
cv2.namedWindow("Trackbars", 0)
for i in ["MIN", "MAX"]:
v = 0 if i == "MIN" else 255
for j in range_filter:
cv2.createTrackbar("%s_%s" % (j, i), "Trackbars", v, 255, callback)
def get_trackbar_values(range_filter):
values = []
for i in ["MIN", "MAX"]:
for j in range_filter:
v = cv2.getTrackbarPos("%s_%s" % (j, i), "Trackbars")
values.append(v)
return values
def main():
range_filter = 'HSV'
camera = cv2.VideoCapture(1)
setup_trackbars(range_filter)
while True:
ret, image = camera.read()
if not ret:
break
if range_filter == 'RGB':
frame_to_thresh = image.copy()
else:
frame_to_thresh = cv2.cvtColor(image, cv2.COLOR_BGR2HSV)
v1_min, v2_min, v3_min, v1_max, v2_max, v3_max = get_trackbar_values(range_filter)
thresh = cv2.inRange(frame_to_thresh, (v1_min, v2_min, v3_min), (v1_max, v2_max, v3_max))
cv2.imshow("Original", image)
cv2.imshow("Thresh", thresh)
if cv2.waitKey(1) & 0xFF is ord('q'):
break
if __name__ == '__main__':
main()
| jgerschler/ESL-Games | Camera Pistol/range-detector.py | Python | mit | 1,679 | 0.004169 |
#!/usr/bin/env python2.6
from BaseHTTPServer import HTTPServer
from SimpleHTTPServer import SimpleHTTPRequestHandler
import cgi
import SocketServer
import ssl
import re
import setproctitle
import others.dict2xml as dict2xml
import sober.config
import sober.settings
import sober.rule
__version__ = 'Sober HTTP/1.0'
__service__ = 'sober'
class WSHandler(SimpleHTTPRequestHandler):
value = None
def load_blacklist(self):
return self.settings.get_blacklist()
def load_whitelist(self):
return {'item': self.value}
def load_settings(self):
return self.settings.get()
def return_error(self):
return 'error'
def do_POST(self):
self.do_GET()
def do_GET(self):
try:
path = self.path.strip().split('/')
if len(path) > 5 and self.command == 'GET':
self.value = path[5]
self.object_type = path[2]
resource = path[3]
resource = 'self.do_'+ resource.upper() + '()'
response = eval(resource)
self.send_ok_response(self.to_xml(response, resource))
elif self.command == 'POST':
self.action = path[3]
resource = path[2]
resource = 'self.do_'+ resource.upper() + '()'
response = eval(resource)
self.send_ok_response(self.to_xml(response, resource))
else:
self.send_ok_response(self.to_xml(self.error_data('missing_arguments'), 'webservices'))
except Exception, e:
self.send_ok_response(self.to_xml(self.error_data(str(e)), resource))
def do_SETTINGS(self):
settings = sober.settings.Settings().get(self.object_type, self.value)
if type(settings).__name__ == 'instance':
response = {'settings': {
'type': self.object_type,
'name': settings.get_cn()[0],
'surename': settings.get_sn()[0],
'uid': settings.get_uid()[0],
'homeDirectory': settings.get_homeDirectory()[0],
'mail': settings.get_mail()[0],
'soberMailConditions': settings.get_soberMailConditions(),
'soberMailVirusCheck': settings.get_soberMailVirusCheck()[0],
'soberMailVirusAction': settings.get_soberMailVirusAction()[0],
'soberMailSpamCheck': settings.get_soberMailSpamCheck()[0],
'soberMailSpamKillLevel': settings.get_soberMailSpamKillLevel()[0],
'soberMailSpamTagLevel': settings.get_soberMailSpamTagLevel()[0],
'soberMailSpamTag2Level': settings.get_soberMailSpamTag2Level()[0],
}
}
return response
return self.error_data('not_found')
def do_BLACKLIST(self):
settings = sober.settings.Settings().get(self.object_type, self.value)
rules = settings.get_soberMailRule()
blacklist = {}
for rule in rules:
if re.search("blacklist[0-9]+", rule[1]['cn'][0]):
i = 0
for cond in rule[1]['soberMailRuleCondition']:
cond = eval(cond)
blacklist['item' + str(i)] = cond[0]['From']
i = i + 1
response = {'blacklist': {'from': blacklist } }
return response
return self.error_data('not_found')
def do_WHITELIST(self):
settings = sober.settings.Settings().get(self.object_type, self.value)
try:
rules = settings.get_soberMailRule()
except AttributeError:
return self.error_data('not_found')
whitelist = {}
for rule in rules:
if re.search("whitelist[0-9]+", rule[1]['cn'][0]):
i = 0
for cond in rule[1]['soberMailRuleCondition']:
cond = eval(cond)
for addr in cond[0]['From']:
whitelist['item' + str(i)] = addr
i = i + 1
response = {'whitelist': {'from': whitelist } }
return response
return self.error_data('not_found')
def do_RULE(self):
# POST
if self.command == 'POST':
postvars = None
ctype, pdict = cgi.parse_header(self.headers.getheader('content-type'))
length = int(self.headers.getheader('content-length'))
data = self.rfile.read(length)
if ctype == 'multipart/form-data':
postvars = cgi.parse_multipart(data, pdict)
elif ctype == 'application/x-www-form-urlencoded':
postvars = cgi.parse_qs(data, keep_blank_values=1)
name = postvars['name'][0]
direction = tuple(postvars['direction'])
sentence = {}
items = {}
conditions = {}
for key, val in postvars.iteritems():
reg = re.search(r'(item|condition)\[(.*)\]', key)
if reg:
i = int(reg.group(2))
if reg.group(1).strip() == 'item':
items[i] = tuple(val)
elif reg.group(1) == 'condition':
try:
parts = val[0].split(':')
conditions[i] = {parts[0]: { parts[1]: None}}
except:
conditions[i] = {val[0]: None}
temp = {}
for key, val in conditions.iteritems():
for skey, sval in val.iteritems():
if type(sval).__name__ == 'dict':
temp[skey] = {sval.keys()[0]: ('in', items[key])}
else:
temp[skey] = ('in', items[key])
sobermailrulecondition = '(%s)' % str(temp)
return {'rule': { 'name': name, 'directions': direction, 'conditions': sobermailrulecondition } }
# GET
rule = sober.rule.Rule().get(self.value)
name = rule.get_cn()[0]
directions = eval(rule.get_soberMailRuleDirection()[0])
actions = {}
conditions = {}
i = 0
for action in eval(rule.get_soberMailRuleAction()[0]):
actions['action' + str(i)] = action
i = i + 1
i = 0
x = 0
for condition in rule.get_soberMailRuleCondition():
cond = eval(condition)[0]
rtype = cond.keys()[0]
if not rtype in conditions:
conditions[rtype] = {}
if type(cond[rtype]).__name__ == 'tuple':
items = {}
if len(cond[rtype]) > 2 :
x = 0
for item in cond[rtype]:
items['item'+ str(x)] = item
x = x + 1
conditions[rtype] = items
elif len(cond[rtype]) == 1:
x = 0
for item in cond[rtype]:
items['item'+ str(x)] = item
x = x + 1
conditions[rtype] = items
else:
op = cond[rtype][0]
items = {}
x = 0
for item in cond[rtype][1]:
items['word'+ str(x)] = item
x = x + 1
conditions[rtype][op] = items
else:
for item in cond[rtype].iteritems():
if item[0] not in conditions[rtype]:
x = 0
conditions[rtype][item[0]] = {}
for word in item[1][1]:
if item[1][0] not in conditions[rtype][item[0]]:
conditions[rtype][item[0]][item[1][0]] = {}
conditions[rtype][item[0]][item[1][0]]['word' + str(x)] = word
x = x + 1
# end main conditions loop
i = i + 1
drt = {}
x = 0
for direction in directions:
drt['direction' + str(x)] = direction
response = {'rule': {'name': name, 'directions': drt, 'conditions': conditions, 'actions': actions } }
return response
def send_ok_response(self, data):
self.send_response(200)
self.send_header('Content-type','text/xml;')
self.end_headers()
self.wfile.write(data)
def to_xml(self, data, name):
pre = '<?xml version="1.0" encoding="UTF-8"?>\n'
xml = dict2xml.dict2Xml({'sober': data}, pre)
return xml
def error_data(self, error):
data = {'response': {'attribute': self.value, 'error': error.upper()} }
return data
class WSServer(SocketServer.ThreadingMixIn, HTTPServer): pass
if __name__ == 'webservice':
config = sober.config.Config()
cfg = config.get_config()
server_address = (cfg.get(__name__, 'listen_address'), int(cfg.get(__name__,'listen_port')))
httpd = WSServer(server_address, WSHandler)
httpd.socket = ssl.wrap_socket(httpd.socket, certfile=cfg.get(__name__, 'ssl_certificate'), server_side=True, ssl_version=ssl.PROTOCOL_SSLv23)
setproctitle.setproctitle('sober (%s: SSL listening)' % (__name__))
httpd.serve_forever()
| theflockers/sober-filter | programs/webservice/__init__.py | Python | gpl-2.0 | 9,399 | 0.005001 |
#!/usr/bin/env python
# -*- coding: utf-8; tab-width: 4; indent-tabs-mode: t -*-
#
# NetProfile: PDF-related tables, utility functions etc.
# © Copyright 2015 Alex 'Unik' Unigovsky
#
# This file is part of NetProfile.
# NetProfile is free software: you can redistribute it and/or
# modify it under the terms of the GNU Affero General Public
# License as published by the Free Software Foundation, either
# version 3 of the License, or (at your option) any later
# version.
#
# NetProfile is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General
# Public License along with NetProfile. If not, see
# <http://www.gnu.org/licenses/>.
from __future__ import (
unicode_literals,
print_function,
absolute_import,
division
)
__all__ = (
'PAGE_ORIENTATIONS',
'PAGE_SIZES',
'TABLE_STYLE_DEFAULT'
)
import logging
import os
from reportlab.lib import (
colors,
enums,
pagesizes,
styles
)
from reportlab.lib.units import (
cm,
inch
)
from reportlab.pdfbase import (
pdfmetrics,
ttfonts
)
from reportlab.platypus import (
doctemplate,
frames,
tables
)
from pyramid.i18n import TranslationStringFactory
from netprofile.common.util import (
as_dict,
make_config_dict
)
logger = logging.getLogger(__name__)
_ = TranslationStringFactory('netprofile')
_pdfss = None
PAGE_SIZES = {
'4a0' : ('4A0 (DIN 476)', (168.2 * cm, 237.8 * cm)),
'2a0' : ('2A0 (DIN 476)', (118.9 * cm, 168.2 * cm)),
'a0' : ('A0 (ISO 216)', pagesizes.A0),
'a1' : ('A1 (ISO 216)', pagesizes.A1),
'a2' : ('A2 (ISO 216)', pagesizes.A2),
'a3' : ('A3 (ISO 216)', pagesizes.A3),
'a4' : ('A4 (ISO 216)', pagesizes.A4),
'a5' : ('A5 (ISO 216)', pagesizes.A5),
'a6' : ('A6 (ISO 216)', pagesizes.A6),
'a7' : ('A7 (ISO 216)', (pagesizes.A6[1] * 0.5, pagesizes.A6[0])),
'a8' : ('A8 (ISO 216)', (pagesizes.A6[0] * 0.5, pagesizes.A6[1] * 0.5)),
'b0' : ('B0 (ISO 216)', pagesizes.B0),
'b1' : ('B1 (ISO 216)', pagesizes.B1),
'b2' : ('B2 (ISO 216)', pagesizes.B2),
'b3' : ('B3 (ISO 216)', pagesizes.B3),
'b4' : ('B4 (ISO 216)', pagesizes.B4),
'b5' : ('B5 (ISO 216)', pagesizes.B5),
'b6' : ('B6 (ISO 216)', pagesizes.B6),
'b7' : ('B7 (ISO 216)', (pagesizes.B6[1] * 0.5, pagesizes.B6[0])),
'b8' : ('B8 (ISO 216)', (pagesizes.B6[0] * 0.5, pagesizes.B6[1] * 0.5)),
'c0' : ('C0 (ISO 269)', (91.7 * cm, 129.7 * cm)),
'c1' : ('C1 (ISO 269)', (64.8 * cm, 91.7 * cm)),
'c2' : ('C2 (ISO 269)', (45.8 * cm, 64.8 * cm)),
'c3' : ('C3 (ISO 269)', (32.4 * cm, 45.8 * cm)),
'c4' : ('C4 (ISO 269)', (22.9 * cm, 32.4 * cm)),
'c5' : ('C5 (ISO 269)', (16.2 * cm, 22.9 * cm)),
'c6' : ('C6 (ISO 269)', (11.4 * cm, 16.2 * cm)),
'c7' : ('C7 (ISO 269)', (8.1 * cm, 11.4 * cm)),
'c8' : ('C8 (ISO 269)', (5.7 * cm, 8.1 * cm)),
'e5' : ('E5 (SS 014711)', (15.5 * cm, 22 * cm)),
'g5' : ('G5 (SS 014711)', (16.9 * cm, 23.9 * cm)),
'f4' : ('F4', (21 * cm, 33 * cm)),
'a3p' : ('A3+', (32.9 * cm, 48.3 * cm)),
'dl' : ('DL (ISO 269)', (9.9 * cm, 21 * cm)),
'dle' : ('DLE (ISO 269)', (11 * cm, 22 * cm)),
'e4' : ('E4 (ISO 269)', (28 * cm, 40 * cm)),
'c6c5' : ('C6/C5 (ISO 269)', (11.4 * cm, 22.9 * cm)),
'jb0' : ('JIS B0', (103 * cm, 145.6 * cm)),
'jb1' : ('JIS B1', (72.8 * cm, 103 * cm)),
'jb2' : ('JIS B2', (51.5 * cm, 72.8 * cm)),
'jb3' : ('JIS B3', (36.4 * cm, 51.5 * cm)),
'jb4' : ('JIS B4', (25.7 * cm, 36.4 * cm)),
'jb5' : ('JIS B5', (18.2 * cm, 25.7 * cm)),
'jb6' : ('JIS B6', (12.8 * cm, 18.2 * cm)),
'jb7' : ('JIS B7', (9.1 * cm, 12.8 * cm)),
'jb8' : ('JIS B8', (6.4 * cm, 9.1 * cm)),
'letter' : ('Letter (ANSI A)', pagesizes.LETTER),
'h_letter' : ('Half Letter', (pagesizes.LETTER[1] * 0.5, pagesizes.LETTER[0])),
'exec' : ('Executive', (7 * inch, 10 * inch)),
'g_letter' : ('Government-Letter', (8 * inch, 10.5 * inch)),
'legal' : ('Legal', pagesizes.LEGAL),
'j_legal' : ('Junior Legal', (5 * inch, 8 * inch)),
'11by17' : ('Tabloid (ANSI B)', pagesizes.ELEVENSEVENTEEN),
'ansi_c' : ('ANSI C', (17 * inch, 22 * inch)),
'ansi_d' : ('ANSI D', (22 * inch, 34 * inch)),
'ansi_e' : ('ANSI E', (34 * inch, 44 * inch)),
'p1' : ('P1 (CAN 2-9.60M)', (56 * cm, 86 * cm)),
'p2' : ('P2 (CAN 2-9.60M)', (43 * cm, 56 * cm)),
'p3' : ('P3 (CAN 2-9.60M)', (28 * cm, 43 * cm)),
'p4' : ('P4 (CAN 2-9.60M)', (21.5 * cm, 28 * cm)),
'p5' : ('P5 (CAN 2-9.60M)', (14 * cm, 21.5 * cm)),
'p6' : ('P6 (CAN 2-9.60M)', (10.7 * cm, 14 * cm)),
'pli1' : ('Pliego', (70 * cm, 100 * cm)),
'pli2' : ('½ pliego', (50 * cm, 70 * cm)),
'pli4' : ('¼ pliego', (35 * cm, 50 * cm)),
'pli8' : ('⅛ pliego', (25 * cm, 35 * cm)),
'carta' : ('Carta', (21.6 * cm, 27.9 * cm)),
'oficio' : ('Oficio', (21.6 * cm, 33 * cm)),
'exttab' : ('Extra Tabloide', (30.48 * cm, 45.72 * cm))
}
PAGE_ORIENTATIONS = {
'portrait' : (_('Portrait'), pagesizes.portrait),
'landscape' : (_('Landscape'), pagesizes.landscape)
}
TABLE_STYLE_DEFAULT = tables.TableStyle((
('GRID', (0, 0), (-1, -1), 0.2, colors.dimgrey),
('TEXTCOLOR', (0, 0), (-1, 0), colors.black),
('BACKGROUND', (0, 0), (-1, 0), colors.HexColor(0xe6e6e6)),
('ROWBACKGROUNDS', (0, 1), (-1, -1), (colors.white, colors.HexColor(0xf5f5f5)))
))
class DefaultDocTemplate(doctemplate.BaseDocTemplate):
def __init__(self, filename, **kwargs):
pgsz = kwargs.pop('pagesize', 'a4')
if pgsz in PAGE_SIZES:
pgsz = PAGE_SIZES[pgsz][1]
else:
pgsz = pagesizes.A4
orient = kwargs.pop('orientation', 'portrait')
if orient in PAGE_ORIENTATIONS:
pgsz = PAGE_ORIENTATIONS[orient][1](pgsz)
kwargs['pagesize'] = pgsz
kwargs['creator'] = 'NetProfile'
req = kwargs.pop('request', None)
if req:
u = req.user
if u:
kwargs['author'] = (u.name_full + ' (' + u.login + ')').strip()
super(DefaultDocTemplate, self).__init__(filename, **kwargs)
fr_body = frames.Frame(
self.leftMargin,
self.bottomMargin,
self.width,
self.height,
id='body'
)
fr_left = frames.Frame(
self.leftMargin,
self.bottomMargin,
self.width / 2,
self.height,
rightPadding=12,
id='left'
)
fr_right = frames.Frame(
self.leftMargin + self.width / 2,
self.bottomMargin,
self.width / 2,
self.height,
leftPadding=12,
id='right'
)
self.addPageTemplates((
doctemplate.PageTemplate(id='default', pagesize=pgsz, frames=(fr_body,)), # onPage=callback
doctemplate.PageTemplate(id='2columns', pagesize=pgsz, frames=(fr_left, fr_right))
))
def _register_fonts(settings):
default_fontdir = settings.get('netprofile.fonts.directory', '')
default_family = settings.get('netprofile.fonts.default_family', 'tinos')
fontcfg = make_config_dict(settings, 'netprofile.fonts.family.')
fontcfg = as_dict(fontcfg)
for fname, cfg in fontcfg.items():
if 'normal' not in cfg:
continue
fname = cfg.get('name', fname)
fontdir = cfg.get('directory', default_fontdir)
pdfmetrics.registerFont(ttfonts.TTFont(
fname,
os.path.join(fontdir, cfg['normal'])
))
reg = { 'normal' : fname }
if 'bold' in cfg:
reg['bold'] = fname + '_b'
pdfmetrics.registerFont(ttfonts.TTFont(
reg['bold'],
os.path.join(fontdir, cfg['bold'])
))
else:
reg['bold'] = fname
if 'italic' in cfg:
reg['italic'] = fname + '_i'
pdfmetrics.registerFont(ttfonts.TTFont(
reg['italic'],
os.path.join(fontdir, cfg['italic'])
))
else:
reg['italic'] = fname
if 'bold_italic' in cfg:
reg['boldItalic'] = fname + '_bi'
pdfmetrics.registerFont(ttfonts.TTFont(
reg['boldItalic'],
os.path.join(fontdir, cfg['bold_italic'])
))
else:
reg['boldItalic'] = fname
pdfmetrics.registerFontFamily(fname, **reg)
if default_family in fontcfg:
return default_family
return 'Times-Roman'
def _add_custom_ss(ss, custom_cfg, name):
pass
def _pdf_style_sheet(cfg):
settings = cfg.registry.settings
try:
ffamily = _register_fonts(settings)
except ttfonts.TTFError:
logger.error('Can\'t find or register configured fonts. PDF generation will be disabled.')
return None
if ffamily == 'Times-Roman':
fonts = ('Times-Roman', 'Times-Bold', 'Times-Italic', 'Times-BoldItalic')
else:
fonts = (ffamily, ffamily + '_b', ffamily + '_i', ffamily + '_bi')
ss = styles.StyleSheet1()
ss.add(styles.ParagraphStyle(
name='default',
fontName=fonts[0],
fontSize=10,
leading=12
))
ss.add(styles.ParagraphStyle(
name='body',
parent=ss['default'],
spaceBefore=6
))
ss.add(styles.ParagraphStyle(
name='bold',
parent=ss['body'],
fontName=fonts[1],
alias='strong'
))
ss.add(styles.ParagraphStyle(
name='italic',
parent=ss['body'],
fontName=fonts[2],
alias='em'
))
ss.add(styles.ParagraphStyle(
name='title',
parent=ss['body'],
fontName=fonts[1],
fontSize=14
))
ss.add(styles.ParagraphStyle(
name='table_header',
parent=ss['body'],
fontName=fonts[1],
alias='th'
))
custom_ss = make_config_dict(settings, 'netprofile.pdf_styles.')
if len(custom_ss) > 0:
custom_ss = as_dict(custom_ss)
for name in custom_ss:
pass # FIXME: write this
logger.debug('Loaded preconfigured PDF fonts and styles.')
return ss
def _get_pdfss(req):
global _pdfss
return _pdfss
def includeme(config):
"""
For inclusion by Pyramid.
"""
global _pdfss
_pdfss = _pdf_style_sheet(config)
config.add_request_method(_get_pdfss, 'pdf_styles', reify=True)
| nikitos/npui | netprofile/netprofile/pdf/__init__.py | Python | agpl-3.0 | 10,229 | 0.03257 |
# Copyright 2015, Google, Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Common testing utilities between samples
"""
import __builtin__
import contextlib
import json
import os
import StringIO
import sys
import unittest
from google.appengine.datastore import datastore_stub_util
from google.appengine.ext import testbed
BUCKET_NAME_ENV = 'TEST_BUCKET_NAME'
PROJECT_ID_ENV = 'TEST_PROJECT_ID'
RESOURCE_PATH = os.path.join(
os.path.abspath(os.path.dirname(__file__)), 'resources')
class mock_raw_input(object):
def __init__(self, list_):
self.i = 0
self.list_ = list_
def get_next_value(self, question):
ret = self.list_[self.i]
self.i += 1
return ret
def __enter__(self):
self.raw_input_cache = __builtin__.raw_input
__builtin__.raw_input = self.get_next_value
def __exit__(self, exc_type, exc_value, traceback):
__builtin__.raw_input = self.raw_input_cache
class CloudBaseTest(unittest.TestCase):
def setUp(self):
self.resource_path = RESOURCE_PATH
# A hack to prevent get_application_default from going GAE route.
self._server_software_org = os.environ.get('SERVER_SOFTWARE')
os.environ['SERVER_SOFTWARE'] = ''
# Constants from environment
test_bucket_name = os.environ.get(BUCKET_NAME_ENV, '')
test_project_id = os.environ.get(PROJECT_ID_ENV, '')
if not test_project_id or not test_bucket_name:
raise Exception('You need to define an env var "%s" and "%s" to '
'run the test.'
% (PROJECT_ID_ENV, BUCKET_NAME_ENV))
# Constants from resources/constants.json
with open(
os.path.join(RESOURCE_PATH, 'constants.json'),
'r') as constants_file:
self.constants = json.load(constants_file)
self.constants['projectId'] = test_project_id
self.constants['bucketName'] = test_bucket_name
self.constants['cloudStorageInputURI'] = (
self.constants['cloudStorageInputURI'] % test_bucket_name)
self.constants['cloudStorageOutputURI'] = (
self.constants['cloudStorageOutputURI'] % test_bucket_name)
def tearDown(self):
os.environ['SERVER_SOFTWARE'] = self._server_software_org
class DatastoreTestbedCase(unittest.TestCase):
"""A base test case for common setup/teardown tasks for test."""
def setUp(self):
"""Setup the datastore and memcache stub."""
# First, create an instance of the Testbed class.
self.testbed = testbed.Testbed()
# Then activate the testbed, which prepares the service stubs for
# use.
self.testbed.activate()
# Create a consistency policy that will simulate the High
# Replication consistency model.
self.policy = datastore_stub_util.PseudoRandomHRConsistencyPolicy(
probability=0)
# Initialize the datastore stub with this policy.
self.testbed.init_datastore_v3_stub(consistency_policy=self.policy)
self.testbed.init_memcache_stub()
def tearDown(self):
self.testbed.deactivate()
@contextlib.contextmanager
def capture_stdout():
"""Capture stdout."""
fake_stdout = StringIO.StringIO()
old_stdout = sys.stdout
try:
sys.stdout = fake_stdout
yield fake_stdout
finally:
sys.stdout = old_stdout
| kocicjelena/python-docs-samples | tests/__init__.py | Python | apache-2.0 | 3,946 | 0 |
"""The tests for the Sonarr platform."""
import unittest
import time
from datetime import datetime
import pytest
from homeassistant.components.sensor import sonarr
from tests.common import get_test_home_assistant
def mocked_exception(*args, **kwargs):
"""Mock exception thrown by requests.get."""
raise OSError
def mocked_requests_get(*args, **kwargs):
"""Mock requests.get invocations."""
class MockResponse:
"""Class to represent a mocked response."""
def __init__(self, json_data, status_code):
"""Initialize the mock response class."""
self.json_data = json_data
self.status_code = status_code
def json(self):
"""Return the json of the response."""
return self.json_data
today = datetime.date(datetime.fromtimestamp(time.time()))
url = str(args[0])
if 'api/calendar' in url:
return MockResponse([
{
"seriesId": 3,
"episodeFileId": 0,
"seasonNumber": 4,
"episodeNumber": 11,
"title": "Easy Com-mercial, Easy Go-mercial",
"airDate": str(today),
"airDateUtc": "2014-01-27T01:30:00Z",
"overview": "To compete with fellow “restaurateur,” Ji...",
"hasFile": "false",
"monitored": "true",
"sceneEpisodeNumber": 0,
"sceneSeasonNumber": 0,
"tvDbEpisodeId": 0,
"series": {
"tvdbId": 194031,
"tvRageId": 24607,
"imdbId": "tt1561755",
"title": "Bob's Burgers",
"cleanTitle": "bobsburgers",
"status": "continuing",
"overview": "Bob's Burgers follows a third-generation ...",
"airTime": "5:30pm",
"monitored": "true",
"qualityProfileId": 1,
"seasonFolder": "true",
"lastInfoSync": "2014-01-26T19:25:55.4555946Z",
"runtime": 30,
"images": [
{
"coverType": "banner",
"url": "http://slurm.trakt.us/images/bann.jpg"
},
{
"coverType": "poster",
"url": "http://slurm.trakt.us/images/poster00.jpg"
},
{
"coverType": "fanart",
"url": "http://slurm.trakt.us/images/fan6.jpg"
}
],
"seriesType": "standard",
"network": "FOX",
"useSceneNumbering": "false",
"titleSlug": "bobs-burgers",
"path": "T:\\Bob's Burgers",
"year": 0,
"firstAired": "2011-01-10T01:30:00Z",
"qualityProfile": {
"value": {
"name": "SD",
"allowed": [
{
"id": 1,
"name": "SDTV",
"weight": 1
},
{
"id": 8,
"name": "WEBDL-480p",
"weight": 2
},
{
"id": 2,
"name": "DVD",
"weight": 3
}
],
"cutoff": {
"id": 1,
"name": "SDTV",
"weight": 1
},
"id": 1
},
"isLoaded": "true"
},
"seasons": [
{
"seasonNumber": 4,
"monitored": "true"
},
{
"seasonNumber": 3,
"monitored": "true"
},
{
"seasonNumber": 2,
"monitored": "true"
},
{
"seasonNumber": 1,
"monitored": "true"
},
{
"seasonNumber": 0,
"monitored": "false"
}
],
"id": 66
},
"downloading": "false",
"id": 14402
}
], 200)
elif 'api/command' in url:
return MockResponse([
{
"name": "RescanSeries",
"startedOn": "0001-01-01T00:00:00Z",
"stateChangeTime": "2014-02-05T05:09:09.2366139Z",
"sendUpdatesToClient": "true",
"state": "pending",
"id": 24
}
], 200)
elif 'api/wanted/missing' in url or 'totalRecords' in url:
return MockResponse(
{
"page": 1,
"pageSize": 15,
"sortKey": "airDateUtc",
"sortDirection": "descending",
"totalRecords": 1,
"records": [
{
"seriesId": 1,
"episodeFileId": 0,
"seasonNumber": 5,
"episodeNumber": 4,
"title": "Archer Vice: House Call",
"airDate": "2014-02-03",
"airDateUtc": "2014-02-04T03:00:00Z",
"overview": "Archer has to stage an that ... ",
"hasFile": "false",
"monitored": "true",
"sceneEpisodeNumber": 0,
"sceneSeasonNumber": 0,
"tvDbEpisodeId": 0,
"absoluteEpisodeNumber": 50,
"series": {
"tvdbId": 110381,
"tvRageId": 23354,
"imdbId": "tt1486217",
"title": "Archer (2009)",
"cleanTitle": "archer2009",
"status": "continuing",
"overview": "At ISIS, an international spy ...",
"airTime": "7:00pm",
"monitored": "true",
"qualityProfileId": 1,
"seasonFolder": "true",
"lastInfoSync": "2014-02-05T04:39:28.550495Z",
"runtime": 30,
"images": [
{
"coverType": "banner",
"url": "http://slurm.trakt.us//57.12.jpg"
},
{
"coverType": "poster",
"url": "http://slurm.trakt.u/57.12-300.jpg"
},
{
"coverType": "fanart",
"url": "http://slurm.trakt.us/image.12.jpg"
}
],
"seriesType": "standard",
"network": "FX",
"useSceneNumbering": "false",
"titleSlug": "archer-2009",
"path": "E:\\Test\\TV\\Archer (2009)",
"year": 2009,
"firstAired": "2009-09-18T02:00:00Z",
"qualityProfile": {
"value": {
"name": "SD",
"cutoff": {
"id": 1,
"name": "SDTV"
},
"items": [
{
"quality": {
"id": 1,
"name": "SDTV"
},
"allowed": "true"
},
{
"quality": {
"id": 8,
"name": "WEBDL-480p"
},
"allowed": "true"
},
{
"quality": {
"id": 2,
"name": "DVD"
},
"allowed": "true"
},
{
"quality": {
"id": 4,
"name": "HDTV-720p"
},
"allowed": "false"
},
{
"quality": {
"id": 9,
"name": "HDTV-1080p"
},
"allowed": "false"
},
{
"quality": {
"id": 10,
"name": "Raw-HD"
},
"allowed": "false"
},
{
"quality": {
"id": 5,
"name": "WEBDL-720p"
},
"allowed": "false"
},
{
"quality": {
"id": 6,
"name": "Bluray-720p"
},
"allowed": "false"
},
{
"quality": {
"id": 3,
"name": "WEBDL-1080p"
},
"allowed": "false"
},
{
"quality": {
"id": 7,
"name": "Bluray-1080p"
},
"allowed": "false"
}
],
"id": 1
},
"isLoaded": "true"
},
"seasons": [
{
"seasonNumber": 5,
"monitored": "true"
},
{
"seasonNumber": 4,
"monitored": "true"
},
{
"seasonNumber": 3,
"monitored": "true"
},
{
"seasonNumber": 2,
"monitored": "true"
},
{
"seasonNumber": 1,
"monitored": "true"
},
{
"seasonNumber": 0,
"monitored": "false"
}
],
"id": 1
},
"downloading": "false",
"id": 55
}
]
}, 200)
elif 'api/queue' in url:
return MockResponse([
{
"series": {
"title": "Game of Thrones",
"sortTitle": "game thrones",
"seasonCount": 6,
"status": "continuing",
"overview": "Seven noble families fight for land ...",
"network": "HBO",
"airTime": "21:00",
"images": [
{
"coverType": "fanart",
"url": "http://thetvdb.com/banners/fanart/-83.jpg"
},
{
"coverType": "banner",
"url": "http://thetvdb.com/banners/-g19.jpg"
},
{
"coverType": "poster",
"url": "http://thetvdb.com/banners/posters-34.jpg"
}
],
"seasons": [
{
"seasonNumber": 0,
"monitored": "false"
},
{
"seasonNumber": 1,
"monitored": "false"
},
{
"seasonNumber": 2,
"monitored": "true"
},
{
"seasonNumber": 3,
"monitored": "false"
},
{
"seasonNumber": 4,
"monitored": "false"
},
{
"seasonNumber": 5,
"monitored": "true"
},
{
"seasonNumber": 6,
"monitored": "true"
}
],
"year": 2011,
"path": "/Volumes/Media/Shows/Game of Thrones",
"profileId": 5,
"seasonFolder": "true",
"monitored": "true",
"useSceneNumbering": "false",
"runtime": 60,
"tvdbId": 121361,
"tvRageId": 24493,
"tvMazeId": 82,
"firstAired": "2011-04-16T23:00:00Z",
"lastInfoSync": "2016-02-05T16:40:11.614176Z",
"seriesType": "standard",
"cleanTitle": "gamethrones",
"imdbId": "tt0944947",
"titleSlug": "game-of-thrones",
"certification": "TV-MA",
"genres": [
"Adventure",
"Drama",
"Fantasy"
],
"tags": [],
"added": "2015-12-28T13:44:24.204583Z",
"ratings": {
"votes": 1128,
"value": 9.4
},
"qualityProfileId": 5,
"id": 17
},
"episode": {
"seriesId": 17,
"episodeFileId": 0,
"seasonNumber": 3,
"episodeNumber": 8,
"title": "Second Sons",
"airDate": "2013-05-19",
"airDateUtc": "2013-05-20T01:00:00Z",
"overview": "King’s Landing hosts a wedding, and ...",
"hasFile": "false",
"monitored": "false",
"absoluteEpisodeNumber": 28,
"unverifiedSceneNumbering": "false",
"id": 889
},
"quality": {
"quality": {
"id": 7,
"name": "Bluray-1080p"
},
"revision": {
"version": 1,
"real": 0
}
},
"size": 4472186820,
"title": "Game.of.Thrones.S03E08.Second.Sons.2013.1080p.",
"sizeleft": 0,
"timeleft": "00:00:00",
"estimatedCompletionTime": "2016-02-05T22:46:52.440104Z",
"status": "Downloading",
"trackedDownloadStatus": "Ok",
"statusMessages": [],
"downloadId": "SABnzbd_nzo_Mq2f_b",
"protocol": "usenet",
"id": 1503378561
}
], 200)
elif 'api/series' in url:
return MockResponse([
{
"title": "Marvel's Daredevil",
"alternateTitles": [{
"title": "Daredevil",
"seasonNumber": -1
}],
"sortTitle": "marvels daredevil",
"seasonCount": 2,
"totalEpisodeCount": 26,
"episodeCount": 26,
"episodeFileCount": 26,
"sizeOnDisk": 79282273693,
"status": "continuing",
"overview": "Matt Murdock was blinded in a tragic accident...",
"previousAiring": "2016-03-18T04:01:00Z",
"network": "Netflix",
"airTime": "00:01",
"images": [
{
"coverType": "fanart",
"url": "/sonarr/MediaCover/7/fanart.jpg?lastWrite="
},
{
"coverType": "banner",
"url": "/sonarr/MediaCover/7/banner.jpg?lastWrite="
},
{
"coverType": "poster",
"url": "/sonarr/MediaCover/7/poster.jpg?lastWrite="
}
],
"seasons": [
{
"seasonNumber": 1,
"monitored": "false",
"statistics": {
"previousAiring": "2015-04-10T04:01:00Z",
"episodeFileCount": 13,
"episodeCount": 13,
"totalEpisodeCount": 13,
"sizeOnDisk": 22738179333,
"percentOfEpisodes": 100
}
},
{
"seasonNumber": 2,
"monitored": "false",
"statistics": {
"previousAiring": "2016-03-18T04:01:00Z",
"episodeFileCount": 13,
"episodeCount": 13,
"totalEpisodeCount": 13,
"sizeOnDisk": 56544094360,
"percentOfEpisodes": 100
}
}
],
"year": 2015,
"path": "F:\\TV_Shows\\Marvels Daredevil",
"profileId": 6,
"seasonFolder": "true",
"monitored": "true",
"useSceneNumbering": "false",
"runtime": 55,
"tvdbId": 281662,
"tvRageId": 38796,
"tvMazeId": 1369,
"firstAired": "2015-04-10T04:00:00Z",
"lastInfoSync": "2016-09-09T09:02:49.4402575Z",
"seriesType": "standard",
"cleanTitle": "marvelsdaredevil",
"imdbId": "tt3322312",
"titleSlug": "marvels-daredevil",
"certification": "TV-MA",
"genres": [
"Action",
"Crime",
"Drama"
],
"tags": [],
"added": "2015-05-15T00:20:32.7892744Z",
"ratings": {
"votes": 461,
"value": 8.9
},
"qualityProfileId": 6,
"id": 7
}
], 200)
elif 'api/diskspace' in url:
return MockResponse([
{
"path": "/data",
"label": "",
"freeSpace": 282500067328,
"totalSpace": 499738734592
}
], 200)
elif 'api/system/status' in url:
return MockResponse({
"version": "2.0.0.1121",
"buildTime": "2014-02-08T20:49:36.5560392Z",
"isDebug": "false",
"isProduction": "true",
"isAdmin": "true",
"isUserInteractive": "false",
"startupPath": "C:\\ProgramData\\NzbDrone\\bin",
"appData": "C:\\ProgramData\\NzbDrone",
"osVersion": "6.2.9200.0",
"isMono": "false",
"isLinux": "false",
"isWindows": "true",
"branch": "develop",
"authentication": "false",
"startOfWeek": 0,
"urlBase": ""
}, 200)
else:
return MockResponse({
"error": "Unauthorized"
}, 401)
class TestSonarrSetup(unittest.TestCase):
"""Test the Sonarr platform."""
# pylint: disable=invalid-name
DEVICES = []
def add_devices(self, devices, update):
"""Mock add devices."""
for device in devices:
self.DEVICES.append(device)
def setUp(self):
"""Initialize values for this testcase class."""
self.DEVICES = []
self.hass = get_test_home_assistant()
self.hass.config.time_zone = 'America/Los_Angeles'
def tearDown(self): # pylint: disable=invalid-name
"""Stop everything that was started."""
self.hass.stop()
@unittest.mock.patch('requests.get', side_effect=mocked_requests_get)
def test_diskspace_no_paths(self, req_mock):
"""Test getting all disk space."""
config = {
'platform': 'sonarr',
'api_key': 'foo',
'days': '2',
'unit': 'GB',
"include_paths": [],
'monitored_conditions': [
'diskspace'
]
}
sonarr.setup_platform(self.hass, config, self.add_devices, None)
for device in self.DEVICES:
device.update()
self.assertEqual('263.10', device.state)
self.assertEqual('mdi:harddisk', device.icon)
self.assertEqual('GB', device.unit_of_measurement)
self.assertEqual('Sonarr Disk Space', device.name)
self.assertEqual(
'263.10/465.42GB (56.53%)',
device.device_state_attributes["/data"]
)
@unittest.mock.patch('requests.get', side_effect=mocked_requests_get)
def test_diskspace_paths(self, req_mock):
"""Test getting diskspace for included paths."""
config = {
'platform': 'sonarr',
'api_key': 'foo',
'days': '2',
'unit': 'GB',
"include_paths": [
'/data'
],
'monitored_conditions': [
'diskspace'
]
}
sonarr.setup_platform(self.hass, config, self.add_devices, None)
for device in self.DEVICES:
device.update()
self.assertEqual('263.10', device.state)
self.assertEqual('mdi:harddisk', device.icon)
self.assertEqual('GB', device.unit_of_measurement)
self.assertEqual('Sonarr Disk Space', device.name)
self.assertEqual(
'263.10/465.42GB (56.53%)',
device.device_state_attributes["/data"]
)
@unittest.mock.patch('requests.get', side_effect=mocked_requests_get)
def test_commands(self, req_mock):
"""Test getting running commands."""
config = {
'platform': 'sonarr',
'api_key': 'foo',
'days': '2',
'unit': 'GB',
"include_paths": [
'/data'
],
'monitored_conditions': [
'commands'
]
}
sonarr.setup_platform(self.hass, config, self.add_devices, None)
for device in self.DEVICES:
device.update()
self.assertEqual(1, device.state)
self.assertEqual('mdi:code-braces', device.icon)
self.assertEqual('Commands', device.unit_of_measurement)
self.assertEqual('Sonarr Commands', device.name)
self.assertEqual(
'pending',
device.device_state_attributes["RescanSeries"]
)
@unittest.mock.patch('requests.get', side_effect=mocked_requests_get)
def test_queue(self, req_mock):
"""Test getting downloads in the queue."""
config = {
'platform': 'sonarr',
'api_key': 'foo',
'days': '2',
'unit': 'GB',
"include_paths": [
'/data'
],
'monitored_conditions': [
'queue'
]
}
sonarr.setup_platform(self.hass, config, self.add_devices, None)
for device in self.DEVICES:
device.update()
self.assertEqual(1, device.state)
self.assertEqual('mdi:download', device.icon)
self.assertEqual('Episodes', device.unit_of_measurement)
self.assertEqual('Sonarr Queue', device.name)
self.assertEqual(
'100.00%',
device.device_state_attributes["Game of Thrones S03E08"]
)
@unittest.mock.patch('requests.get', side_effect=mocked_requests_get)
def test_series(self, req_mock):
"""Test getting the number of series."""
config = {
'platform': 'sonarr',
'api_key': 'foo',
'days': '2',
'unit': 'GB',
"include_paths": [
'/data'
],
'monitored_conditions': [
'series'
]
}
sonarr.setup_platform(self.hass, config, self.add_devices, None)
for device in self.DEVICES:
device.update()
self.assertEqual(1, device.state)
self.assertEqual('mdi:television', device.icon)
self.assertEqual('Shows', device.unit_of_measurement)
self.assertEqual('Sonarr Series', device.name)
self.assertEqual(
'26/26 Episodes',
device.device_state_attributes["Marvel's Daredevil"]
)
@unittest.mock.patch('requests.get', side_effect=mocked_requests_get)
def test_wanted(self, req_mock):
"""Test getting wanted episodes."""
config = {
'platform': 'sonarr',
'api_key': 'foo',
'days': '2',
'unit': 'GB',
"include_paths": [
'/data'
],
'monitored_conditions': [
'wanted'
]
}
sonarr.setup_platform(self.hass, config, self.add_devices, None)
for device in self.DEVICES:
device.update()
self.assertEqual(1, device.state)
self.assertEqual('mdi:television', device.icon)
self.assertEqual('Episodes', device.unit_of_measurement)
self.assertEqual('Sonarr Wanted', device.name)
self.assertEqual(
'2014-02-03',
device.device_state_attributes["Archer (2009) S05E04"]
)
@unittest.mock.patch('requests.get', side_effect=mocked_requests_get)
def test_upcoming_multiple_days(self, req_mock):
"""Test the upcoming episodes for multiple days."""
config = {
'platform': 'sonarr',
'api_key': 'foo',
'days': '2',
'unit': 'GB',
"include_paths": [
'/data'
],
'monitored_conditions': [
'upcoming'
]
}
sonarr.setup_platform(self.hass, config, self.add_devices, None)
for device in self.DEVICES:
device.update()
self.assertEqual(1, device.state)
self.assertEqual('mdi:television', device.icon)
self.assertEqual('Episodes', device.unit_of_measurement)
self.assertEqual('Sonarr Upcoming', device.name)
self.assertEqual(
'S04E11',
device.device_state_attributes["Bob's Burgers"]
)
@pytest.mark.skip
@unittest.mock.patch('requests.get', side_effect=mocked_requests_get)
def test_upcoming_today(self, req_mock):
"""Test filtering for a single day.
Sonarr needs to respond with at least 2 days
"""
config = {
'platform': 'sonarr',
'api_key': 'foo',
'days': '1',
'unit': 'GB',
"include_paths": [
'/data'
],
'monitored_conditions': [
'upcoming'
]
}
sonarr.setup_platform(self.hass, config, self.add_devices, None)
for device in self.DEVICES:
device.update()
self.assertEqual(1, device.state)
self.assertEqual('mdi:television', device.icon)
self.assertEqual('Episodes', device.unit_of_measurement)
self.assertEqual('Sonarr Upcoming', device.name)
self.assertEqual(
'S04E11',
device.device_state_attributes["Bob's Burgers"]
)
@unittest.mock.patch('requests.get', side_effect=mocked_requests_get)
def test_system_status(self, req_mock):
"""Test getting system status."""
config = {
'platform': 'sonarr',
'api_key': 'foo',
'days': '2',
'unit': 'GB',
"include_paths": [
'/data'
],
'monitored_conditions': [
'status'
]
}
sonarr.setup_platform(self.hass, config, self.add_devices, None)
for device in self.DEVICES:
device.update()
self.assertEqual('2.0.0.1121', device.state)
self.assertEqual('mdi:information', device.icon)
self.assertEqual('Sonarr Status', device.name)
self.assertEqual(
'6.2.9200.0',
device.device_state_attributes['osVersion'])
@pytest.mark.skip
@unittest.mock.patch('requests.get', side_effect=mocked_requests_get)
def test_ssl(self, req_mock):
"""Test SSL being enabled."""
config = {
'platform': 'sonarr',
'api_key': 'foo',
'days': '1',
'unit': 'GB',
"include_paths": [
'/data'
],
'monitored_conditions': [
'upcoming'
],
"ssl": "true"
}
sonarr.setup_platform(self.hass, config, self.add_devices, None)
for device in self.DEVICES:
device.update()
self.assertEqual(1, device.state)
self.assertEqual('s', device.ssl)
self.assertEqual('mdi:television', device.icon)
self.assertEqual('Episodes', device.unit_of_measurement)
self.assertEqual('Sonarr Upcoming', device.name)
self.assertEqual(
'S04E11',
device.device_state_attributes["Bob's Burgers"]
)
@unittest.mock.patch('requests.get', side_effect=mocked_exception)
def test_exception_handling(self, req_mock):
"""Test exception being handled."""
config = {
'platform': 'sonarr',
'api_key': 'foo',
'days': '1',
'unit': 'GB',
"include_paths": [
'/data'
],
'monitored_conditions': [
'upcoming'
]
}
sonarr.setup_platform(self.hass, config, self.add_devices, None)
for device in self.DEVICES:
device.update()
self.assertEqual(None, device.state)
| LinuxChristian/home-assistant | tests/components/sensor/test_sonarr.py | Python | apache-2.0 | 34,600 | 0 |
#!/usr/bin/env python3
# encoding: utf-8
# === This file is part of Calamares - <http://github.com/calamares> ===
#
# Copyright 2014, Philip Müller <philm@manjaro.org>
# Copyright 2014, Teo Mrnjavac <teo@kde.org>
#
# Calamares is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Calamares is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Calamares. If not, see <http://www.gnu.org/licenses/>.
import os
import libcalamares
def set_autologin(username, displaymanagers, root_mount_point):
""" Enables automatic login for the installed desktop managers """
if "mdm" in displaymanagers:
# Systems with MDM as Desktop Manager
mdm_conf_path = os.path.join(root_mount_point, "etc/mdm/custom.conf")
if os.path.exists(mdm_conf_path):
with open(mdm_conf_path, 'r') as mdm_conf:
text = mdm_conf.readlines()
with open(mdm_conf_path, 'w') as mdm_conf:
for line in text:
if '[daemon]' in line:
line = '[daemon]\nAutomaticLogin=%s\nAutomaticLoginEnable=True\n' % username
mdm_conf.write(line)
else:
with open(mdm_conf_path, 'w') as mdm_conf:
mdm_conf.write(
'# Calamares - Enable automatic login for user\n')
mdm_conf.write('[daemon]\n')
mdm_conf.write('AutomaticLogin=%s\n' % username)
mdm_conf.write('AutomaticLoginEnable=True\n')
if "gdm" in displaymanagers:
# Systems with GDM as Desktop Manager
gdm_conf_path = os.path.join(root_mount_point, "etc/gdm/custom.conf")
if os.path.exists(gdm_conf_path):
with open(gdm_conf_path, 'r') as gdm_conf:
text = gdm_conf.readlines()
with open(gdm_conf_path, 'w') as gdm_conf:
for line in text:
if '[daemon]' in line:
line = '[daemon]\nAutomaticLogin=%s\nAutomaticLoginEnable=True\n' % username
gdm_conf.write(line)
else:
with open(gdm_conf_path, 'w') as gdm_conf:
gdm_conf.write(
'# Calamares - Enable automatic login for user\n')
gdm_conf.write('[daemon]\n')
gdm_conf.write('AutomaticLogin=%s\n' % username)
gdm_conf.write('AutomaticLoginEnable=True\n')
if "kdm" in displaymanagers:
# Systems with KDM as Desktop Manager
kdm_conf_path = os.path.join(
root_mount_point, "usr/share/config/kdm/kdmrc")
text = []
if os.path.exists(kdm_conf_path):
with open(kdm_conf_path, 'r') as kdm_conf:
text = kdm_conf.readlines()
with open(kdm_conf_path, 'w') as kdm_conf:
for line in text:
if '#AutoLoginEnable=true' in line:
line = 'AutoLoginEnable=true\n'
if 'AutoLoginUser=' in line:
line = 'AutoLoginUser=%s\n' % username
kdm_conf.write(line)
else:
return "Cannot write KDM configuration file", "KDM config file %s does not exist" % kdm_conf_path
if "lxdm" in displaymanagers:
# Systems with LXDM as Desktop Manager
lxdm_conf_path = os.path.join(root_mount_point, "etc/lxdm/lxdm.conf")
text = []
if os.path.exists(lxdm_conf_path):
with open(lxdm_conf_path, 'r') as lxdm_conf:
text = lxdm_conf.readlines()
with open(lxdm_conf_path, 'w') as lxdm_conf:
for line in text:
if '# autologin=dgod' in line:
line = 'autologin=%s\n' % username
lxdm_conf.write(line)
else:
return "Cannot write LXDM configuration file", "LXDM config file %s does not exist" % lxdm_conf_path
if "lightdm" in displaymanagers:
# Systems with LightDM as Desktop Manager
# Ideally, we should use configparser for the ini conf file,
# but we just do a simple text replacement for now, as it
# worksforme(tm)
lightdm_conf_path = os.path.join(
root_mount_point, "etc/lightdm/lightdm.conf")
text = []
if os.path.exists(lightdm_conf_path):
with open(lightdm_conf_path, 'r') as lightdm_conf:
text = lightdm_conf.readlines()
with open(lightdm_conf_path, 'w') as lightdm_conf:
for line in text:
if '#autologin-user=' in line:
line = 'autologin-user=%s\n' % username
lightdm_conf.write(line)
else:
return "Cannot write LightDM configuration file", "LightDM config file %s does not exist" % lightdm_conf_path
if "slim" in displaymanagers:
# Systems with Slim as Desktop Manager
slim_conf_path = os.path.join(root_mount_point, "etc/slim.conf")
text = []
if os.path.exists(slim_conf_path):
with open(slim_conf_path, 'r') as slim_conf:
text = slim_conf.readlines()
with open(slim_conf_path, 'w') as slim_conf:
for line in text:
if 'auto_login' in line:
line = 'auto_login yes\n'
if 'default_user' in line:
line = 'default_user %s\n' % username
slim_conf.write(line)
else:
return "Cannot write SLIM configuration file", "SLIM config file %s does not exist" % slim_conf_path
if "sddm" in displaymanagers:
# Systems with Sddm as Desktop Manager
sddm_conf_path = os.path.join(root_mount_point, "etc/sddm.conf")
text = []
if os.path.exists(sddm_conf_path):
with open(sddm_conf_path, 'r') as sddm_conf:
text = sddm_conf.readlines()
with open(sddm_conf_path, 'w') as sddm_conf:
for line in text:
if 'User=' in line:
line = 'User={}\n'.format(username)
sddm_conf.write(line)
else:
return "Cannot write SDDM configuration file", "SDDM config file %s does not exist" % sddm_conf_path
return None
def run():
""" Configure display managers """
# We acquire a list of displaymanagers, either from config or (overridden) from globalstorage.
# This module will try to set up (including autologin) all the displaymanagers in the list, in that specific order.
# Most distros will probably only ship one displaymanager.
# If a displaymanager is in the list but not installed, this module quits with error.
if "displaymanagers" in libcalamares.job.configuration:
displaymanagers = libcalamares.job.configuration["displaymanagers"]
if libcalamares.globalstorage.contains("displaymanagers"):
displaymanagers = libcalamares.globalstorage.value("displaymanagers")
if displaymanagers is None:
return "No display managers selected for the displaymanager module.",\
"The displaymanagers list is empty or undefined in both globalstorage and displaymanager.conf."
username = libcalamares.globalstorage.value("autologinUser")
root_mount_point = libcalamares.globalstorage.value("rootMountPoint")
# Setup slim
if "slim" in displaymanagers:
if not os.path.exists("%s/usr/bin/slim" % root_mount_point):
return "slim selected but not installed", ""
# Setup sddm
if "sddm" in displaymanagers:
if not os.path.exists("%s/usr/bin/sddm" % root_mount_point):
return "sddm selected but not installed", ""
# setup lightdm
if "lightdm" in displaymanagers:
if os.path.exists("%s/usr/bin/lightdm" % root_mount_point):
libcalamares.utils.chroot_call(['mkdir', '-p', '/run/lightdm'])
libcalamares.utils.chroot_call(['getent', 'group', 'lightdm'])
libcalamares.utils.chroot_call(
['groupadd', '-g', '620', 'lightdm'])
libcalamares.utils.chroot_call(['getent', 'passwd', 'lightdm'])
libcalamares.utils.chroot_call(['useradd', '-c', '"LightDM Display Manager"',
'-u', '620', '-g', 'lightdm', '-d', '/var/run/lightdm',
'-s', '/usr/bin/nologin', 'lightdm'])
libcalamares.utils.chroot_call(['passwd', '-l', 'lightdm'])
libcalamares.utils.chroot_call(
['chown', '-R', 'lightdm:lightdm', '/run/lightdm'])
if os.path.exists("%s/usr/bin/startxfce4" % root_mount_point):
os.system(
"sed -i -e 's/^.*user-session=.*/user-session=xfce/' %s/etc/lightdm/lightdm.conf" % root_mount_point)
libcalamares.utils.chroot_call(['ln', '-s',
'/usr/lib/lightdm/lightdm/gdmflexiserver',
'/usr/bin/gdmflexiserver'])
libcalamares.utils.chroot_call(
['chmod', '+r' '/etc/lightdm/lightdm.conf'])
else:
return "lightdm selected but not installed", ""
# Setup gdm
if "gdm" in displaymanagers:
if os.path.exists("%s/usr/bin/gdm" % root_mount_point):
libcalamares.utils.chroot_call(['getent', 'group', 'gdm'])
libcalamares.utils.chroot_call(['groupadd', '-g', '120', 'gdm'])
libcalamares.utils.chroot_call(['getent', 'passwd', 'gdm'])
libcalamares.utils.chroot_call(['useradd', '-c', '"Gnome Display Manager"',
'-u', '120', '-g', 'gdm', '-d', '/var/lib/gdm',
'-s', '/usr/bin/nologin', 'gdm'])
libcalamares.utils.chroot_call(['passwd', '-l', 'gdm'])
libcalamares.utils.chroot_call(
['chown', '-R', 'gdm:gdm', '/var/lib/gdm'])
if os.path.exists("%s/var/lib/AccountsService/users" % root_mount_point):
os.system(
"echo \"[User]\" > %s/var/lib/AccountsService/users/gdm" % root_mount_point)
if os.path.exists("%s/usr/bin/startxfce4" % root_mount_point):
os.system(
"echo \"XSession=xfce\" >> %s/var/lib/AccountsService/users/gdm" % root_mount_point)
if os.path.exists("%s/usr/bin/cinnamon-session" % root_mount_point):
os.system(
"echo \"XSession=cinnamon-session\" >> %s/var/lib/AccountsService/users/gdm" % root_mount_point)
if os.path.exists("%s/usr/bin/mate-session" % root_mount_point):
os.system(
"echo \"XSession=mate\" >> %s/var/lib/AccountsService/users/gdm" % root_mount_point)
if os.path.exists("%s/usr/bin/enlightenment_start" % root_mount_point):
os.system(
"echo \"XSession=enlightenment\" >> %s/var/lib/AccountsService/users/gdm" % root_mount_point)
if os.path.exists("%s/usr/bin/openbox-session" % root_mount_point):
os.system(
"echo \"XSession=openbox\" >> %s/var/lib/AccountsService/users/gdm" % root_mount_point)
if os.path.exists("%s/usr/bin/lxsession" % root_mount_point):
os.system(
"echo \"XSession=LXDE\" >> %s/var/lib/AccountsService/users/gdm" % root_mount_point)
os.system(
"echo \"Icon=\" >> %s/var/lib/AccountsService/users/gdm" % root_mount_point)
else:
return "gdm selected but not installed", ""
# Setup mdm
if "mdm" in displaymanagers:
if os.path.exists("%s/usr/bin/mdm" % root_mount_point):
libcalamares.utils.chroot_call(['getent', 'group', 'mdm'])
libcalamares.utils.chroot_call(['groupadd', '-g', '128', 'mdm'])
libcalamares.utils.chroot_call(['getent', 'passwd', 'mdm'])
libcalamares.utils.chroot_call(['useradd', '-c', '"Linux Mint Display Manager"',
'-u', '128', '-g', 'mdm', '-d', '/var/lib/mdm',
'-s', '/usr/bin/nologin', 'mdm'])
libcalamares.utils.chroot_call(['passwd', '-l', 'mdm'])
libcalamares.utils.chroot_call(
['chown', 'root:mdm', '/var/lib/mdm'])
libcalamares.utils.chroot_call(['chmod', '1770', '/var/lib/mdm'])
if os.path.exists("%s/usr/bin/startxfce4" % root_mount_point):
os.system(
"sed -i 's|default.desktop|xfce.desktop|g' %s/etc/mdm/custom.conf" % root_mount_point)
if os.path.exists("%s/usr/bin/cinnamon-session" % root_mount_point):
os.system(
"sed -i 's|default.desktop|cinnamon.desktop|g' %s/etc/mdm/custom.conf" % root_mount_point)
if os.path.exists("%s/usr/bin/openbox-session" % root_mount_point):
os.system(
"sed -i 's|default.desktop|openbox.desktop|g' %s/etc/mdm/custom.conf" % root_mount_point)
if os.path.exists("%s/usr/bin/mate-session" % root_mount_point):
os.system(
"sed -i 's|default.desktop|mate.desktop|g' %s/etc/mdm/custom.conf" % root_mount_point)
if os.path.exists("%s/usr/bin/lxsession" % root_mount_point):
os.system(
"sed -i 's|default.desktop|LXDE.desktop|g' %s/etc/mdm/custom.conf" % root_mount_point)
if os.path.exists("%s/usr/bin/enlightenment_start" % root_mount_point):
os.system(
"sed -i 's|default.desktop|enlightenment.desktop|g' %s/etc/mdm/custom.conf" % root_mount_point)
else:
return "mdm selected but not installed", ""
# Setup lxdm
if "lxdm" in displaymanagers:
if os.path.exists("%s/usr/bin/lxdm" % root_mount_point):
libcalamares.utils.chroot_call(['groupadd', '--system', 'lxdm'])
if os.path.exists("%s/usr/bin/startxfce4" % root_mount_point):
os.system(
"sed -i -e 's|^.*session=.*|session=/usr/bin/startxfce4|' %s/etc/lxdm/lxdm.conf" % root_mount_point)
if os.path.exists("%s/usr/bin/cinnamon-session" % root_mount_point):
os.system(
"sed -i -e 's|^.*session=.*|session=/usr/bin/cinnamon-session|' %s/etc/lxdm/lxdm.conf" % root_mount_point)
if os.path.exists("%s/usr/bin/mate-session" % root_mount_point):
os.system(
"sed -i -e 's|^.*session=.*|session=/usr/bin/mate-session|' %s/etc/lxdm/lxdm.conf" % root_mount_point)
if os.path.exists("%s/usr/bin/enlightenment_start" % root_mount_point):
os.system(
"sed -i -e 's|^.*session=.*|session=/usr/bin/enlightenment_start|' %s/etc/lxdm/lxdm.conf" % root_mount_point)
if os.path.exists("%s/usr/bin/openbox-session" % root_mount_point):
os.system(
"sed -i -e 's|^.*session=.*|session=/usr/bin/openbox-session|' %s/etc/lxdm/lxdm.conf" % root_mount_point)
if os.path.exists("%s/usr/bin/lxsession" % root_mount_point):
os.system(
"sed -i -e 's|^.*session=.*|session=/usr/bin/lxsession|' %s/etc/lxdm/lxdm.conf" % root_mount_point)
libcalamares.utils.chroot_call(
['chgrp', '-R', 'lxdm', '/var/lib/lxdm'])
libcalamares.utils.chroot_call(
['chgrp', 'lxdm', '/etc/lxdm/lxdm.conf'])
libcalamares.utils.chroot_call(
['chmod', '+r', '/etc/lxdm/lxdm.conf'])
else:
return "lxdm selected but not installed", ""
# Setup kdm
if "kdm" in displaymanagers:
if os.path.exists("%s/usr/bin/kdm" % root_mount_point):
libcalamares.utils.chroot_call(['getent', 'group', 'kdm'])
libcalamares.utils.chroot_call(['groupadd', '-g', '135', 'kdm'])
libcalamares.utils.chroot_call(['getent', 'passwd', 'kdm'])
libcalamares.utils.chroot_call(['useradd', '-u', '135', '-g', 'kdm', '-d',
'/var/lib/kdm', '-s', '/bin/false', '-r', '-M', 'kdm'])
libcalamares.utils.chroot_call(
['chown', '-R', '135:135', 'var/lib/kdm'])
libcalamares.utils.chroot_call(
['xdg-icon-resource', 'forceupdate', '--theme', 'hicolor'])
libcalamares.utils.chroot_call(['update-desktop-database', '-q'])
else:
return "kdm selected but not installed", ""
if username != None:
libcalamares.utils.debug(
"Setting up autologin for user %s." % username)
return set_autologin(username, displaymanagers, root_mount_point)
return None
| maui-packages/calamares | src/modules/displaymanager/main.py | Python | gpl-3.0 | 17,415 | 0.002871 |
# Copyright 2014 ARM Limited
#
# Licensed under the Apache License, Version 2.0
# See LICENSE file for details.
# standard library modules, , ,
import os
import errno
import shutil
import platform
import stat
def mkDirP(path):
try:
os.makedirs(path)
except OSError as exception:
if exception.errno != errno.EEXIST:
raise
def rmF(path):
try:
os.remove(path)
except OSError as exception:
if exception.errno != errno.ENOENT:
raise
def rmRf(path):
# we may have to make files writable before we can successfully delete
# them, to do this
def fixPermissions(fn, path, excinfo):
if os.access(path, os.W_OK):
raise
else:
os.chmod(path, stat.S_IWUSR)
fn(path)
try:
shutil.rmtree(path, onerror=fixPermissions)
except OSError as exception:
if 'cannot call rmtree on a symbolic link' in str(exception).lower():
os.unlink(path)
elif exception.errno == errno.ENOTDIR:
rmF(path)
elif exception.errno != errno.ENOENT:
raise
def fullySplitPath(path):
components = []
while True:
path, component = os.path.split(path)
if component != '':
components.append(component)
else:
if path != '':
components.append(path)
break
components.reverse()
return components
# The link-related functions are platform-dependent
links = __import__("fsutils_win" if os.name == 'nt' else "fsutils_posix", globals(), locals(), ['*'])
isLink = links.isLink
tryReadLink = links.tryReadLink
_symlink = links._symlink
realpath = links.realpath
# !!! FIXME: the logic in the "except" block below probably doesn't work in Windows
def symlink(source, link_name):
try:
# os.symlink doesn't update existing links, so need to rm first
rmF(link_name)
_symlink(source, link_name)
except OSError as exception:
if exception.errno != errno.EEXIST and (tryReadLink(link_name) != source):
raise
| bridadan/yotta | yotta/lib/fsutils.py | Python | apache-2.0 | 2,102 | 0.004757 |
# -*- coding: utf-8 -*-
""" This is the pytest configuration file """
import colorama
import pytest
import sys
from seleniumbase import config as sb_config
from seleniumbase.core import log_helper
from seleniumbase.core import proxy_helper
from seleniumbase.fixtures import constants
def pytest_addoption(parser):
"""
This plugin adds the following command-line options to pytest:
--browser=BROWSER (The web browser to use. Default: "chrome".)
--settings-file=FILE (Override default SeleniumBase settings.)
--env=ENV (Set the test env. Access with "self.env" in tests.)
--data=DATA (Extra test data. Access with "self.data" in tests.)
--var1=DATA (Extra test data. Access with "self.var1" in tests.)
--var2=DATA (Extra test data. Access with "self.var2" in tests.)
--var3=DATA (Extra test data. Access with "self.var3" in tests.)
--user-data-dir=DIR (Set the Chrome user data directory to use.)
--server=SERVER (The Selenium Grid server/IP used for tests.)
--port=PORT (The Selenium Grid port used by the test server.)
--cap-file=FILE (The web browser's desired capabilities to use.)
--cap-string=STRING (The web browser's desired capabilities to use.)
--proxy=SERVER:PORT (Connect to a proxy server:port for tests.)
--proxy=USERNAME:PASSWORD@SERVER:PORT (Use authenticated proxy server.)
--agent=STRING (Modify the web browser's User-Agent string.)
--mobile (Use the mobile device emulator while running tests.)
--metrics=STRING (Set mobile "CSSWidth,CSSHeight,PixelRatio".)
--extension-zip=ZIP (Load a Chrome Extension .zip|.crx, comma-separated.)
--extension-dir=DIR (Load a Chrome Extension directory, comma-separated.)
--headless (Run tests headlessly. Default mode on Linux OS.)
--headed (Run tests with a GUI on Linux OS.)
--locale=LOCALE_CODE (Set the Language Locale Code for the web browser.)
--start-page=URL (The starting URL for the web browser when tests begin.)
--archive-logs (Archive old log files instead of deleting them.)
--time-limit=SECONDS (Safely fail any test that exceeds the limit limit.)
--slow (Slow down the automation. Faster than using Demo Mode.)
--demo (Slow down and visually see test actions as they occur.)
--demo-sleep=SECONDS (Set the wait time after Demo Mode actions.)
--highlights=NUM (Number of highlight animations for Demo Mode actions.)
--message-duration=SECONDS (The time length for Messenger alerts.)
--check-js (Check for JavaScript errors after page loads.)
--ad-block (Block some types of display ads after page loads.)
--block-images (Block images from loading during tests.)
--verify-delay=SECONDS (The delay before MasterQA verification checks.)
--disable-csp (Disable the Content Security Policy of websites.)
--enable-ws (Enable Web Security on Chrome.)
--enable-sync (Enable "Chrome Sync".)
--use-auto-ext (Use Chrome's automation extension.)
--swiftshader (Use Chrome's "--use-gl=swiftshader" feature.)
--incognito (Enable Chrome's Incognito mode.)
--guest (Enable Chrome's Guest mode.)
--devtools (Open Chrome's DevTools when the browser opens.)
--reuse-session / --rs (Reuse the browser session between tests.)
--crumbs (Delete all cookies between tests reusing a session.)
--maximize (Start tests with the web browser window maximized.)
--save-screenshot (Save a screenshot at the end of each test.)
--visual-baseline (Set the visual baseline for Visual/Layout tests.)
--timeout-multiplier=MULTIPLIER (Multiplies the default timeout values.)
"""
colorama.init(autoreset=True)
c1 = colorama.Fore.BLUE + colorama.Back.LIGHTCYAN_EX
c2 = colorama.Fore.BLUE + colorama.Back.LIGHTGREEN_EX
c3 = colorama.Fore.MAGENTA + colorama.Back.LIGHTYELLOW_EX
cr = colorama.Style.RESET_ALL
s_str = "SeleniumBase"
s_str = s_str.replace("SeleniumBase", c1 + "Selenium" + c2 + "Base" + cr)
s_str = s_str + cr + " " + c3 + "command-line options for pytest" + cr
parser = parser.getgroup('SeleniumBase', s_str)
parser.addoption('--browser',
action="store",
dest='browser',
type=str.lower,
choices=constants.ValidBrowsers.valid_browsers,
default=constants.Browser.GOOGLE_CHROME,
help="""Specifies the web browser to use. Default: Chrome.
If you want to use Firefox, explicitly indicate that.
Example: (--browser=firefox)""")
parser.addoption('--with-selenium',
action="store_true",
dest='with_selenium',
default=True,
help="""(DEPRECATED) Start tests with an open web browser.
(This is ALWAYS True now when importing BaseCase)""")
parser.addoption('--env',
action='store',
dest='environment',
type=str.lower,
choices=(
constants.Environment.QA,
constants.Environment.STAGING,
constants.Environment.DEVELOP,
constants.Environment.PRODUCTION,
constants.Environment.MASTER,
constants.Environment.LOCAL,
constants.Environment.TEST
),
default=constants.Environment.TEST,
help="The environment to run the tests in.")
parser.addoption('--data',
dest='data',
default=None,
help='Extra data to pass to tests from the command line.')
parser.addoption('--var1',
dest='var1',
default=None,
help='Extra data to pass to tests from the command line.')
parser.addoption('--var2',
dest='var2',
default=None,
help='Extra data to pass to tests from the command line.')
parser.addoption('--var3',
dest='var3',
default=None,
help='Extra data to pass to tests from the command line.')
parser.addoption('--cap_file', '--cap-file',
dest='cap_file',
default=None,
help="""The file that stores browser desired capabilities
for BrowserStack, Sauce Labs, and other
remote web drivers to use.""")
parser.addoption('--cap_string', '--cap-string',
dest='cap_string',
default=None,
help="""The string that stores browser desired
capabilities for BrowserStack, Sauce Labs,
and other remote web drivers to use.
Enclose cap-string in single quotes.
Enclose parameter keys in double quotes.
Example: --cap-string='{"name":"test1","v":"42"}'""")
parser.addoption('--settings_file', '--settings-file', '--settings',
action='store',
dest='settings_file',
default=None,
help="""The file that stores key/value pairs for
overriding values in the
seleniumbase/config/settings.py file.""")
parser.addoption('--user_data_dir', '--user-data-dir',
dest='user_data_dir',
default=None,
help="""The Chrome User Data Directory to use. (Profile)
If the directory doesn't exist, it'll be created.""")
parser.addoption('--with-testing_base', '--with-testing-base',
action="store_true",
dest='with_testing_base',
default=True,
help="""Use to save logs and screenshots when tests fail.
The following options are now active by default
with --with-testing_base (which is on by default):
--with-screen_shots ,
--with-basic_test_info ,
--with-page_source
""")
parser.addoption('--log_path', '--log-path',
dest='log_path',
default='latest_logs/',
help="""Log files are saved to the "latest_logs/" folder.
(This field is NOT EDITABLE anymore!)""")
parser.addoption('--archive_logs', '--archive-logs',
action="store_true",
dest='archive_logs',
default=False,
help="Archive old log files instead of deleting them.")
parser.addoption('--with-db_reporting', '--with-db-reporting',
action="store_true",
dest='with_db_reporting',
default=False,
help="Use to record test data in the MySQL database.")
parser.addoption('--database_env', '--database-env',
action='store',
dest='database_env',
choices=(
constants.Environment.QA,
constants.Environment.STAGING,
constants.Environment.DEVELOP,
constants.Environment.PRODUCTION,
constants.Environment.MASTER,
constants.Environment.LOCAL,
constants.Environment.TEST
),
default=constants.Environment.TEST,
help="The database environment to run the tests in.")
parser.addoption('--with-s3_logging', '--with-s3-logging',
action="store_true",
dest='with_s3_logging',
default=False,
help="Use to save test log files in Amazon S3.")
parser.addoption('--with-screen_shots', '--with-screen-shots',
action="store_true",
dest='with_screen_shots',
default=False,
help="""Use to save screenshots on test failure.
(Automatically on when using --with-testing_base)""")
parser.addoption('--with-basic_test_info', '--with-basic-test-info',
action="store_true",
dest='with_basic_test_info',
default=False,
help="""Use to save basic test info on test failure.
(Automatically on when using --with-testing_base)""")
parser.addoption('--with-page_source', '--with-page-source',
action="store_true",
dest='with_page_source',
default=False,
help="""Use to save page source on test failure.
(Automatically on when using --with-testing_base)""")
parser.addoption('--server',
action='store',
dest='servername',
default='localhost',
help="""Designates the Selenium Grid server to use.
Use "127.0.0.1" to connect to a localhost Grid.
If unset or set to "localhost", Grid isn't used.
Default: "localhost".""")
parser.addoption('--port',
action='store',
dest='port',
default='4444',
help="""Designates the Selenium Grid port to use.
Default: 4444.""")
parser.addoption('--proxy',
action='store',
dest='proxy_string',
default=None,
help="""Designates the proxy server:port to use.
Format: servername:port. OR
username:password@servername:port OR
A dict key from proxy_list.PROXY_LIST
Default: None.""")
parser.addoption('--agent', '--user-agent', '--user_agent',
action='store',
dest='user_agent',
default=None,
help="""Designates the User-Agent for the browser to use.
Format: A string.
Default: None.""")
parser.addoption('--mobile', '--mobile-emulator', '--mobile_emulator',
action="store_true",
dest='mobile_emulator',
default=False,
help="""If this option is enabled, the mobile emulator
will be used while running tests.""")
parser.addoption('--metrics', '--device-metrics', '--device_metrics',
action='store',
dest='device_metrics',
default=None,
help="""Designates the three device metrics of the mobile
emulator: CSS Width, CSS Height, and Pixel-Ratio.
Format: A comma-separated string with the 3 values.
Example: "375,734,3"
Default: None. (Will use default values if None)""")
parser.addoption('--extension_zip', '--extension-zip', '--crx',
action='store',
dest='extension_zip',
default=None,
help="""Designates the Chrome Extension ZIP file to load.
Format: A comma-separated list of .zip or .crx files
containing the Chrome extensions to load.
Default: None.""")
parser.addoption('--extension_dir', '--extension-dir',
action='store',
dest='extension_dir',
default=None,
help="""Designates the Chrome Extension folder to load.
Format: A directory containing the Chrome extension.
(Can also be a comma-separated list of directories.)
Default: None.""")
parser.addoption('--headless',
action="store_true",
dest='headless',
default=False,
help="""Using this makes Webdriver run web browsers
headlessly, which is required on headless machines.
Default: False on Mac/Windows. True on Linux.""")
parser.addoption('--headed', '--gui',
action="store_true",
dest='headed',
default=False,
help="""Using this makes Webdriver run web browsers with
a GUI when running tests on Linux machines.
(The default setting on Linux is headless.)
(The default setting on Mac or Windows is headed.)
""")
parser.addoption('--locale_code', '--locale-code', '--locale',
action='store',
dest='locale_code',
default=None,
help="""Designates the Locale Code for the web browser.
A Locale is a specific version of a spoken Language.
The Locale alters visible text on supported websites.
See: https://seleniumbase.io/help_docs/locale_codes/
Default: None. (The web browser's default mode.)""")
parser.addoption('--start_page', '--start-page', '--url',
action='store',
dest='start_page',
default=None,
help="""Designates the starting URL for the web browser
when each test begins.
Default: None.""")
parser.addoption('--is_pytest', '--is-pytest',
action="store_true",
dest='is_pytest',
default=True,
help="""This is used by the BaseCase class to tell apart
pytest runs from nosetest runs. (Automatic)""")
parser.addoption('--time_limit', '--time-limit', '--timelimit',
action='store',
dest='time_limit',
default=None,
help="""Use this to set a time limit per test, in seconds.
If a test runs beyond the limit, it fails.""")
parser.addoption('--slow_mode', '--slow-mode', '--slow',
action="store_true",
dest='slow_mode',
default=False,
help="""Using this slows down the automation.""")
parser.addoption('--demo_mode', '--demo-mode', '--demo',
action="store_true",
dest='demo_mode',
default=False,
help="""Using this slows down the automation and lets you
visually see what the tests are actually doing.""")
parser.addoption('--demo_sleep', '--demo-sleep',
action='store',
dest='demo_sleep',
default=None,
help="""Setting this overrides the Demo Mode sleep
time that happens after browser actions.""")
parser.addoption('--highlights',
action='store',
dest='highlights',
default=None,
help="""Setting this overrides the default number of
highlight animation loops to have per call.""")
parser.addoption('--message_duration', '--message-duration',
action="store",
dest='message_duration',
default=None,
help="""Setting this overrides the default time that
messenger notifications remain visible when reaching
assert statements during Demo Mode.""")
parser.addoption('--check_js', '--check-js',
action="store_true",
dest='js_checking_on',
default=False,
help="""The option to check for JavaScript errors after
every page load.""")
parser.addoption('--ad_block', '--ad-block', '--block_ads', '--block-ads',
action="store_true",
dest='ad_block_on',
default=False,
help="""Using this makes WebDriver block display ads
that are defined in ad_block_list.AD_BLOCK_LIST.""")
parser.addoption('--block_images', '--block-images',
action="store_true",
dest='block_images',
default=False,
help="""Using this makes WebDriver block images from
loading on web pages during tests.""")
parser.addoption('--verify_delay', '--verify-delay',
action='store',
dest='verify_delay',
default=None,
help="""Setting this overrides the default wait time
before each MasterQA verification pop-up.""")
parser.addoption('--disable_csp', '--disable-csp', '--no_csp', '--no-csp',
action="store_true",
dest='disable_csp',
default=False,
help="""Using this disables the Content Security Policy of
websites, which may interfere with some features of
SeleniumBase, such as loading custom JavaScript
libraries for various testing actions.
Setting this to True (--disable-csp) overrides the
value set in seleniumbase/config/settings.py""")
parser.addoption('--enable_ws', '--enable-ws', '--enable-web-security',
action="store_true",
dest='enable_ws',
default=False,
help="""Using this enables the "Web Security" feature of
Chrome and Chromium-based browsers such as Edge.""")
parser.addoption('--enable_sync', '--enable-sync',
action="store_true",
dest='enable_sync',
default=False,
help="""Using this enables the "Chrome Sync" feature.""")
parser.addoption('--use_auto_ext', '--use-auto-ext', '--auto-ext',
action="store_true",
dest='use_auto_ext',
default=False,
help="""Using this enables Chrome's Automation Extension.
It's not required, but some commands & advanced
features may need it.""")
parser.addoption('--no_sandbox', '--no-sandbox',
action="store_true",
dest='no_sandbox',
default=False,
help="""Using this enables the "No Sandbox" feature.
(This setting is now always enabled by default.)""")
parser.addoption('--disable_gpu', '--disable-gpu',
action="store_true",
dest='disable_gpu',
default=False,
help="""Using this enables the "Disable GPU" feature.
(This setting is now always enabled by default.)""")
parser.addoption('--swiftshader',
action="store_true",
dest='swiftshader',
default=False,
help="""Using this enables the "--use-gl=swiftshader"
feature when running tests on Chrome.""")
parser.addoption('--incognito', '--incognito_mode', '--incognito-mode',
action="store_true",
dest='incognito',
default=False,
help="""Using this enables Chrome's Incognito mode.""")
parser.addoption('--guest', '--guest_mode', '--guest-mode',
action="store_true",
dest='guest_mode',
default=False,
help="""Using this enables Chrome's Guest mode.""")
parser.addoption('--devtools', '--open_devtools', '--open-devtools',
action="store_true",
dest='devtools',
default=False,
help="""Using this opens Chrome's DevTools.""")
parser.addoption('--rs', '--reuse_session', '--reuse-session',
action="store_true",
dest='reuse_session',
default=False,
help="""The option to reuse the selenium browser window
session between tests.""")
parser.addoption('--crumbs',
action="store_true",
dest='crumbs',
default=False,
help="""The option to delete all cookies between tests
that reuse the same browser session. This option
is only needed when using "--reuse-session".""")
parser.addoption('--maximize_window', '--maximize-window', '--maximize',
'--fullscreen',
action="store_true",
dest='maximize_option',
default=False,
help="""The option to start with the browser window
maximized.""")
parser.addoption('--save_screenshot', '--save-screenshot',
action='store_true',
dest='save_screenshot',
default=False,
help="""Take a screenshot on last page after the last step
of the test. (Added to the "latest_logs" folder.)""")
parser.addoption('--visual_baseline', '--visual-baseline',
action='store_true',
dest='visual_baseline',
default=False,
help="""Setting this resets the visual baseline for
Automated Visual Testing with SeleniumBase.
When a test calls self.check_window(), it will
rebuild its files in the visual_baseline folder.""")
parser.addoption('--timeout_multiplier', '--timeout-multiplier',
action='store',
dest='timeout_multiplier',
default=None,
help="""Setting this overrides the default timeout
by the multiplier when waiting for page elements.
Unused when tests overide the default value.""")
for arg in sys.argv:
if "--timeout=" in arg:
raise Exception(
"\n\n Don't use --timeout=s from pytest-timeout! "
"\n It's not thread-safe for WebDriver processes! "
"\n Use --time-limit=s from SeleniumBase instead!\n")
def pytest_configure(config):
""" This runs after command line options have been parsed """
sb_config.is_pytest = True
sb_config.browser = config.getoption('browser')
sb_config.data = config.getoption('data')
sb_config.var1 = config.getoption('var1')
sb_config.var2 = config.getoption('var2')
sb_config.var3 = config.getoption('var3')
sb_config.environment = config.getoption('environment')
sb_config.with_selenium = config.getoption('with_selenium')
sb_config.user_agent = config.getoption('user_agent')
sb_config.mobile_emulator = config.getoption('mobile_emulator')
sb_config.device_metrics = config.getoption('device_metrics')
sb_config.headless = config.getoption('headless')
sb_config.headed = config.getoption('headed')
sb_config.locale_code = config.getoption('locale_code')
sb_config.start_page = config.getoption('start_page')
sb_config.extension_zip = config.getoption('extension_zip')
sb_config.extension_dir = config.getoption('extension_dir')
sb_config.with_testing_base = config.getoption('with_testing_base')
sb_config.with_db_reporting = config.getoption('with_db_reporting')
sb_config.with_s3_logging = config.getoption('with_s3_logging')
sb_config.with_screen_shots = config.getoption('with_screen_shots')
sb_config.with_basic_test_info = config.getoption('with_basic_test_info')
sb_config.with_page_source = config.getoption('with_page_source')
sb_config.servername = config.getoption('servername')
sb_config.port = config.getoption('port')
sb_config.proxy_string = config.getoption('proxy_string')
sb_config.cap_file = config.getoption('cap_file')
sb_config.cap_string = config.getoption('cap_string')
sb_config.settings_file = config.getoption('settings_file')
sb_config.user_data_dir = config.getoption('user_data_dir')
sb_config.database_env = config.getoption('database_env')
sb_config.log_path = 'latest_logs/' # (No longer editable!)
sb_config.archive_logs = config.getoption('archive_logs')
sb_config._time_limit = config.getoption('time_limit')
sb_config.time_limit = config.getoption('time_limit')
sb_config.slow_mode = config.getoption('slow_mode')
sb_config.demo_mode = config.getoption('demo_mode')
sb_config.demo_sleep = config.getoption('demo_sleep')
sb_config.highlights = config.getoption('highlights')
sb_config.message_duration = config.getoption('message_duration')
sb_config.js_checking_on = config.getoption('js_checking_on')
sb_config.ad_block_on = config.getoption('ad_block_on')
sb_config.block_images = config.getoption('block_images')
sb_config.verify_delay = config.getoption('verify_delay')
sb_config.disable_csp = config.getoption('disable_csp')
sb_config.enable_ws = config.getoption('enable_ws')
sb_config.enable_sync = config.getoption('enable_sync')
sb_config.use_auto_ext = config.getoption('use_auto_ext')
sb_config.no_sandbox = config.getoption('no_sandbox')
sb_config.disable_gpu = config.getoption('disable_gpu')
sb_config.swiftshader = config.getoption('swiftshader')
sb_config.incognito = config.getoption('incognito')
sb_config.guest_mode = config.getoption('guest_mode')
sb_config.devtools = config.getoption('devtools')
sb_config.reuse_session = config.getoption('reuse_session')
sb_config.crumbs = config.getoption('crumbs')
sb_config.shared_driver = None # The default driver for session reuse
sb_config.maximize_option = config.getoption('maximize_option')
sb_config.save_screenshot = config.getoption('save_screenshot')
sb_config.visual_baseline = config.getoption('visual_baseline')
sb_config.timeout_multiplier = config.getoption('timeout_multiplier')
sb_config.pytest_html_report = config.getoption('htmlpath') # --html=FILE
sb_config._sb_node = {} # sb node dictionary (Used with the sb fixture)
if sb_config.reuse_session:
arg_join = " ".join(sys.argv)
if ("-n" in sys.argv) or ("-n=" in arg_join) or (arg_join == "-c"):
# sb_config.reuse_session = False
pass # Allow multithreaded browser sessions to be reused now
if "linux" in sys.platform and (
not sb_config.headed and not sb_config.headless):
print(
"(Running with --headless on Linux. "
"Use --headed or --gui to override.)")
sb_config.headless = True
if not sb_config.headless:
sb_config.headed = True
if sb_config.with_testing_base:
log_helper.log_folder_setup(sb_config.log_path, sb_config.archive_logs)
proxy_helper.remove_proxy_zip_if_present()
def pytest_unconfigure():
""" This runs after all tests have completed with pytest. """
proxy_helper.remove_proxy_zip_if_present()
if sb_config.reuse_session:
# Close the shared browser session
if sb_config.shared_driver:
try:
sb_config.shared_driver.quit()
except AttributeError:
pass
except Exception:
pass
sb_config.shared_driver = None
log_helper.archive_logs_if_set(sb_config.log_path, sb_config.archive_logs)
def pytest_runtest_setup():
""" This runs before every test with pytest """
pass
def pytest_runtest_teardown(item):
""" This runs after every test with pytest """
# Make sure webdriver has exited properly and any headless display
try:
self = item._testcase
try:
if hasattr(self, 'driver') and self.driver:
self.driver.quit()
except Exception:
pass
try:
if hasattr(self, 'headless') and self.headless:
if self.headless_active:
if hasattr(self, 'display') and self.display:
self.display.stop()
except Exception:
pass
except Exception:
pass
@pytest.fixture()
def sb(request):
""" SeleniumBase as a pytest fixture.
Usage example: "def test_one(sb):"
You'll need to use this for tests that use other pytest fixtures. """
from seleniumbase import BaseCase
class BaseClass(BaseCase):
def setUp(self):
super(BaseClass, self).setUp()
def tearDown(self):
self.save_teardown_screenshot()
super(BaseClass, self).tearDown()
def base_method(self):
pass
if request.cls:
request.cls.sb = BaseClass("base_method")
request.cls.sb.setUp()
request.cls.sb._needs_tearDown = True
sb_config._sb_node[request.node.nodeid] = request.cls.sb
yield request.cls.sb
if request.cls.sb._needs_tearDown:
request.cls.sb.tearDown()
request.cls.sb._needs_tearDown = False
else:
sb = BaseClass("base_method")
sb.setUp()
sb._needs_tearDown = True
sb_config._sb_node[request.node.nodeid] = sb
yield sb
if sb._needs_tearDown:
sb.tearDown()
sb._needs_tearDown = False
@pytest.mark.hookwrapper
def pytest_runtest_makereport(item, call):
pytest_html = item.config.pluginmanager.getplugin('html')
outcome = yield
report = outcome.get_result()
if pytest_html and report.when == 'call':
try:
extra_report = None
if hasattr(item, "_testcase"):
extra_report = item._testcase._html_report_extra
elif hasattr(item.instance, "sb") or (
item.nodeid in sb_config._sb_node):
if not hasattr(item.instance, "sb"):
sb_node = sb_config._sb_node[item.nodeid]
else:
sb_node = item.instance.sb
test_id = item.nodeid
if not test_id:
test_id = "unidentified_TestCase"
test_id = test_id.replace(' ', '_')
if '[' in test_id:
import re
test_id_intro = test_id.split('[')[0]
parameter = test_id.split('[')[1]
parameter = re.sub(re.compile(r'\W'), '', parameter)
test_id = test_id_intro + "__" + parameter
test_id = test_id.replace('/', '.').replace('\\', '.')
test_id = test_id.replace('::', '.').replace('.py', '')
sb_node._sb_test_identifier = test_id
if sb_node._needs_tearDown:
sb_node.tearDown()
sb_node._needs_tearDown = False
extra_report = sb_node._html_report_extra
else:
return
extra = getattr(report, 'extra', [])
if len(extra_report) > 1 and extra_report[1]["content"]:
report.extra = extra + extra_report
except Exception:
pass
| mdmintz/SeleniumBase | seleniumbase/plugins/pytest_plugin.py | Python | mit | 34,757 | 0 |
"""Get ride details and liveboard details for NMBS (Belgian railway)."""
import logging
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
ATTR_ATTRIBUTION,
ATTR_LATITUDE,
ATTR_LONGITUDE,
CONF_NAME,
CONF_SHOW_ON_MAP,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
import homeassistant.util.dt as dt_util
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "NMBS"
DEFAULT_ICON = "mdi:train"
DEFAULT_ICON_ALERT = "mdi:alert-octagon"
CONF_STATION_FROM = "station_from"
CONF_STATION_TO = "station_to"
CONF_STATION_LIVE = "station_live"
CONF_EXCLUDE_VIAS = "exclude_vias"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_STATION_FROM): cv.string,
vol.Required(CONF_STATION_TO): cv.string,
vol.Optional(CONF_STATION_LIVE): cv.string,
vol.Optional(CONF_EXCLUDE_VIAS, default=False): cv.boolean,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_SHOW_ON_MAP, default=False): cv.boolean,
}
)
def get_time_until(departure_time=None):
"""Calculate the time between now and a train's departure time."""
if departure_time is None:
return 0
delta = dt_util.utc_from_timestamp(int(departure_time)) - dt_util.now()
return round((delta.total_seconds() / 60))
def get_delay_in_minutes(delay=0):
"""Get the delay in minutes from a delay in seconds."""
return round((int(delay) / 60))
def get_ride_duration(departure_time, arrival_time, delay=0):
"""Calculate the total travel time in minutes."""
duration = dt_util.utc_from_timestamp(
int(arrival_time)
) - dt_util.utc_from_timestamp(int(departure_time))
duration_time = int(round((duration.total_seconds() / 60)))
return duration_time + get_delay_in_minutes(delay)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the NMBS sensor with iRail API."""
from pyrail import iRail
api_client = iRail()
name = config[CONF_NAME]
show_on_map = config[CONF_SHOW_ON_MAP]
station_from = config[CONF_STATION_FROM]
station_to = config[CONF_STATION_TO]
station_live = config.get(CONF_STATION_LIVE)
excl_vias = config[CONF_EXCLUDE_VIAS]
sensors = [
NMBSSensor(api_client, name, show_on_map, station_from, station_to, excl_vias)
]
if station_live is not None:
sensors.append(NMBSLiveBoard(api_client, station_live))
add_entities(sensors, True)
class NMBSLiveBoard(Entity):
"""Get the next train from a station's liveboard."""
def __init__(self, api_client, live_station):
"""Initialize the sensor for getting liveboard data."""
self._station = live_station
self._api_client = api_client
self._attrs = {}
self._state = None
@property
def name(self):
"""Return the sensor default name."""
return "NMBS Live"
@property
def icon(self):
"""Return the default icon or an alert icon if delays."""
if self._attrs and int(self._attrs["delay"]) > 0:
return DEFAULT_ICON_ALERT
return DEFAULT_ICON
@property
def state(self):
"""Return sensor state."""
return self._state
@property
def device_state_attributes(self):
"""Return the sensor attributes if data is available."""
if self._state is None or not self._attrs:
return None
delay = get_delay_in_minutes(self._attrs["delay"])
departure = get_time_until(self._attrs["time"])
attrs = {
"departure": f"In {departure} minutes",
"extra_train": int(self._attrs["isExtra"]) > 0,
"vehicle_id": self._attrs["vehicle"],
"monitored_station": self._station,
ATTR_ATTRIBUTION: "https://api.irail.be/",
}
if delay > 0:
attrs["delay"] = f"{delay} minutes"
return attrs
def update(self):
"""Set the state equal to the next departure."""
liveboard = self._api_client.get_liveboard(self._station)
next_departure = liveboard["departures"]["departure"][0]
self._attrs = next_departure
self._state = "Track {} - {}".format(
next_departure["platform"], next_departure["station"]
)
class NMBSSensor(Entity):
"""Get the the total travel time for a given connection."""
def __init__(
self, api_client, name, show_on_map, station_from, station_to, excl_vias
):
"""Initialize the NMBS connection sensor."""
self._name = name
self._show_on_map = show_on_map
self._api_client = api_client
self._station_from = station_from
self._station_to = station_to
self._excl_vias = excl_vias
self._attrs = {}
self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return "min"
@property
def icon(self):
"""Return the sensor default icon or an alert icon if any delay."""
if self._attrs:
delay = get_delay_in_minutes(self._attrs["departure"]["delay"])
if delay > 0:
return "mdi:alert-octagon"
return "mdi:train"
@property
def device_state_attributes(self):
"""Return sensor attributes if data is available."""
if self._state is None or not self._attrs:
return None
delay = get_delay_in_minutes(self._attrs["departure"]["delay"])
departure = get_time_until(self._attrs["departure"]["time"])
attrs = {
"departure": f"In {departure} minutes",
"destination": self._station_to,
"direction": self._attrs["departure"]["direction"]["name"],
"platform_arriving": self._attrs["arrival"]["platform"],
"platform_departing": self._attrs["departure"]["platform"],
"vehicle_id": self._attrs["departure"]["vehicle"],
ATTR_ATTRIBUTION: "https://api.irail.be/",
}
if self._show_on_map and self.station_coordinates:
attrs[ATTR_LATITUDE] = self.station_coordinates[0]
attrs[ATTR_LONGITUDE] = self.station_coordinates[1]
if self.is_via_connection and not self._excl_vias:
via = self._attrs["vias"]["via"][0]
attrs["via"] = via["station"]
attrs["via_arrival_platform"] = via["arrival"]["platform"]
attrs["via_transfer_platform"] = via["departure"]["platform"]
attrs["via_transfer_time"] = get_delay_in_minutes(
via["timeBetween"]
) + get_delay_in_minutes(via["departure"]["delay"])
if delay > 0:
attrs["delay"] = f"{delay} minutes"
return attrs
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def station_coordinates(self):
"""Get the lat, long coordinates for station."""
if self._state is None or not self._attrs:
return []
latitude = float(self._attrs["departure"]["stationinfo"]["locationY"])
longitude = float(self._attrs["departure"]["stationinfo"]["locationX"])
return [latitude, longitude]
@property
def is_via_connection(self):
"""Return whether the connection goes through another station."""
if not self._attrs:
return False
return "vias" in self._attrs and int(self._attrs["vias"]["number"]) > 0
def update(self):
"""Set the state to the duration of a connection."""
connections = self._api_client.get_connections(
self._station_from, self._station_to
)
if int(connections["connection"][0]["departure"]["left"]) > 0:
next_connection = connections["connection"][1]
else:
next_connection = connections["connection"][0]
self._attrs = next_connection
if self._excl_vias and self.is_via_connection:
_LOGGER.debug(
"Skipping update of NMBSSensor \
because this connection is a via"
)
return
duration = get_ride_duration(
next_connection["departure"]["time"],
next_connection["arrival"]["time"],
next_connection["departure"]["delay"],
)
self._state = duration
| joopert/home-assistant | homeassistant/components/nmbs/sensor.py | Python | apache-2.0 | 8,620 | 0.000232 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('ordenes', '0008_tecnico_orden'),
]
operations = [
migrations.AlterField(
model_name='concepto',
name='nombre',
field=models.CharField(max_length=100, null=True, verbose_name=b'Concepto', blank=True),
),
]
| zaresdelweb/tecnoservicio | tecnoservicio/ordenes/migrations/0009_auto_20150513_1841.py | Python | bsd-3-clause | 449 | 0.002227 |
# Copyright (C) 2001-2006 Python Software Foundation
# Author: Keith Dart
# Contact: email-sig@python.org
"""Class representing application/* type MIME documents."""
__all__ = ["MIMEApplication"]
from email import encoders
from email.mime.nonmultipart import MIMENonMultipart
class MIMEApplication(MIMENonMultipart):
"""Class for generating application/* MIME documents."""
def __init__(self, _data, _subtype='octet-stream',
_encoder=encoders.encode_base64, **_params):
"""Create an application/* type MIME document.
_data is a string containing the raw application data.
_subtype is the MIME content type subtype, defaulting to
'octet-stream'.
_encoder is a function which will perform the actual encoding for
transport of the application data, defaulting to base64 encoding.
Any additional keyword arguments are passed to the base class
constructor, which turns them into parameters on the Content-Type
header.
"""
if _subtype is None:
raise TypeError('Invalid application MIME subtype')
MIMENonMultipart.__init__(self, 'application', _subtype, **_params)
self.set_payload(_data)
_encoder(self)
| huran2014/huran.github.io | wot_gateway/usr/lib/python2.7/email/mime/application.py | Python | gpl-2.0 | 1,256 | 0 |
#!/usr/bin/python3
# -*- coding: utf-8 -*-
# repolist.py file is part of slpkg.
# Copyright 2014-2021 Dimitris Zlatanidis <d.zlatanidis@gmail.com>
# All rights reserved.
# Slpkg is a user-friendly package manager for Slackware installations
# https://gitlab.com/dslackw/slpkg
# Slpkg is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from slpkg.messages import Msg
from slpkg.repositories import Repo
from slpkg.__metadata__ import MetaData as _meta_
class RepoList:
"""List of repositories
"""
def __init__(self):
self.meta = _meta_
self.green = self.meta.color["GREEN"]
self.red = self.meta.color["RED"]
self.grey = self.meta.color["GREY"]
self.endc = self.meta.color["ENDC"]
self.msg = Msg()
self.all_repos = Repo().default_repository()
self.all_repos["slack"] = Repo().slack()
self.all_repos.update(Repo().custom_repository())
def repos(self):
"""View or enabled or disabled repositories
"""
def_cnt, cus_cnt = 0, 0
self.msg.template(78)
print("{0}{1}{2}{3}{4}{5}{6}".format(
"| Repo id", " " * 2,
"Repo URL", " " * 44,
"Default", " " * 3,
"Status"))
self.msg.template(78)
for repo_id, repo_URL in sorted(self.all_repos.items()):
status, COLOR = "disabled", self.red
default = "yes"
if len(repo_URL) > 49:
repo_URL = repo_URL[:48] + "~"
if repo_id in self.meta.repositories:
def_cnt += 1
status, COLOR = "enabled", self.green
if repo_id not in self.meta.default_repositories:
cus_cnt += 1
default = "no"
print(" {0}{1}{2}{3}{4}{5}{6}{7:>8}{8}".format(
repo_id, " " * (9 - len(repo_id)),
repo_URL, " " * (52 - len(repo_URL)),
default, " " * (8 - len(default)),
COLOR, status, self.endc))
print("\nRepositories summary")
print("=" * 79)
print(f"{self.grey}{def_cnt}/{len(self.all_repos)} enabled default "
f"repositories and {cus_cnt} custom.")
print("Edit the file '/etc/slpkg/repositories.conf' for enable "
"and disable default\nrepositories or run 'slpkg "
f"repo-enable' command.{self.endc}")
| dslackw/slpkg | slpkg/repolist.py | Python | gpl-3.0 | 2,957 | 0 |
# Copyright 2008-2015 Nokia Solutions and Networks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from robot.utils import OrderedDict, compress_text
class StringIndex(int):
pass
class StringCache(object):
_compress_threshold = 80
_use_compressed_threshold = 1.1
_zero_index = StringIndex(0)
def __init__(self):
self._cache = OrderedDict({'*': self._zero_index})
def add(self, text):
if not text:
return self._zero_index
text = self._encode(text)
if text not in self._cache:
self._cache[text] = StringIndex(len(self._cache))
return self._cache[text]
def _encode(self, text):
raw = self._raw(text)
if raw in self._cache or len(raw) < self._compress_threshold:
return raw
compressed = compress_text(text)
if len(compressed) * self._use_compressed_threshold < len(raw):
return compressed
return raw
def _raw(self, text):
return '*'+text
def dump(self):
return tuple(self._cache)
| snyderr/robotframework | src/robot/reporting/stringcache.py | Python | apache-2.0 | 1,574 | 0 |
# -*- coding: utf-8 -*-
__author__ = 'LIWEI240'
"""
Constants definition
"""
class Const(object):
class RetCode(object):
OK = 0
InvalidParam = -1
NotExist = -2
ParseError = -3 | lwldcr/keyboardman | common/const.py | Python | gpl-3.0 | 214 | 0.009346 |
from mist.mist_job import *
class SimpleStreaming(MistJob, WithStreamingContext, WithPublisher):
def execute(self, parameters):
import time
def takeAndPublish(time, rdd):
taken = rdd.take(11)
self.publisher.publish("-------------------------------------------")
self.publisher.publish("Time: %s" % time)
self.publisher.publish("-------------------------------------------")
self.publisher.publish(str(taken))
ssc = self.streaming_context
type(ssc)
rddQueue = []
for i in range(500):
rddQueue += [ssc.sparkContext.parallelize([j for j in range(1, 1001)], 10)]
# Create the QueueInputDStream and use it do some processing
inputStream = ssc.queueStream(rddQueue)
mappedStream = inputStream.map(lambda x: (x % 10, 1))
reducedStream = mappedStream.reduceByKey(lambda a, b: a + b)
#reducedStream.pprint()
reducedStream.foreachRDD(takeAndPublish)
ssc.start()
time.sleep(15)
ssc.stop(stopSparkContext=False, stopGraceFully=False)
result = "success"
return {"result": result}
| KineticCookie/mist | examples-python/simple_streaming.py | Python | apache-2.0 | 1,188 | 0.004209 |
"""
Rotate an array of n elements to the right by k steps.
For example, with n = 7 and k = 3, the array [1,2,3,4,5,6,7] is rotated to
[5,6,7,1,2,3,4].
Note:
Try to come up as many solutions as you can, there are at least 3 different
ways to solve this problem.
Hint:
Could you do it in-place with O(1) extra space?
Related problem: Reverse Words in a String II
Credits:
Special thanks to @Freezen for adding this problem and creating all test
cases.
Show Company Tags
Show Tags
Show Similar Problems
"""
class Solution(object):
def rotate(self, nums, k):
"""
:type nums: List[int]
:type k: int
:rtype: void Do not return anything, modify nums in-place instead.
"""
n=len(nums)
k%=n
nums[:]=nums[n-k:]+nums[:n-k]
"""
Note:
def rotate(self, nums, k):
n = len(nums)
k %= n
self.reverse(nums, 0, n - k)
self.reverse(nums, n - k, n)
self.reverse(nums, 0, n)
def reverse(self, nums, start, end):
for x in range(start, (start + end) / 2):
nums[x] ^= nums[start + end - x - 1]
nums[start + end - x - 1] ^= nums[x]
nums[x] ^= nums[start + end - x - 1]
"""
| ufjfeng/leetcode-jf-soln | python/189_rotate_array.py | Python | mit | 1,252 | 0.003994 |
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.utils import cstr
import mimetypes, json
from werkzeug.wrappers import Response
from frappe.website.context import get_context
from frappe.website.utils import scrub_relative_urls, get_home_page, can_cache, delete_page_cache
from frappe.website.permissions import clear_permissions
from frappe.website.router import clear_sitemap
class PageNotFoundError(Exception): pass
def render(path, http_status_code=None):
"""render html page"""
path = resolve_path(path.strip("/"))
try:
data = render_page(path)
except frappe.DoesNotExistError, e:
doctype, name = get_doctype_from_path(path)
if doctype and name:
path = "print"
frappe.local.form_dict.doctype = doctype
frappe.local.form_dict.name = name
elif doctype:
path = "list"
frappe.local.form_dict.type = doctype
else:
path = "404"
http_status_code = e.http_status_code
try:
data = render_page(path)
except frappe.PermissionError, e:
data, http_status_code = render_403(e, path)
except frappe.PermissionError, e:
data, http_status_code = render_403(e, path)
except Exception:
path = "error"
data = render_page(path)
http_status_code = 500
return build_response(path, data, http_status_code or 200)
def render_403(e, pathname):
path = "message"
frappe.local.message = """<p><strong>{error}</strong></p>
<p>
<a href="/login?redirect-to=/{pathname}" class="btn btn-primary>{login}</a>
</p>""".format(error=cstr(e), login=_("Login"), pathname=pathname)
frappe.local.message_title = _("Not Permitted")
return render_page(path), e.http_status_code
def get_doctype_from_path(path):
doctypes = frappe.db.sql_list("select name from tabDocType")
parts = path.split("/")
doctype = parts[0]
name = parts[1] if len(parts) > 1 else None
if doctype in doctypes:
return doctype, name
# try scrubbed
doctype = doctype.replace("_", " ").title()
if doctype in doctypes:
return doctype, name
return None, None
def build_response(path, data, http_status_code):
# build response
response = Response()
response.data = set_content_type(response, data, path)
response.status_code = http_status_code
response.headers[b"X-Page-Name"] = path.encode("utf-8")
response.headers[b"X-From-Cache"] = frappe.local.response.from_cache or False
return response
def render_page(path):
"""get page html"""
cache_key = ("page_context:{}" if is_ajax() else "page:{}").format(path)
out = None
# try memcache
if can_cache():
out = frappe.cache().get_value(cache_key)
if out and is_ajax():
out = out.get("data")
if out:
frappe.local.response.from_cache = True
return out
return build(path)
def build(path):
if not frappe.db:
frappe.connect()
build_method = (build_json if is_ajax() else build_page)
try:
return build_method(path)
except frappe.DoesNotExistError:
hooks = frappe.get_hooks()
if hooks.website_catch_all:
path = hooks.website_catch_all[0]
return build_method(path)
else:
raise
def build_json(path):
return get_context(path).data
def build_page(path):
context = get_context(path)
html = frappe.get_template(context.base_template_path).render(context)
html = scrub_relative_urls(html)
if can_cache(context.no_cache):
frappe.cache().set_value("page:" + path, html)
return html
def is_ajax():
return getattr(frappe.local, "is_ajax", False)
def resolve_path(path):
if not path:
path = "index"
if path.endswith('.html'):
path = path[:-5]
if path == "index":
path = get_home_page()
return path
def set_content_type(response, data, path):
if isinstance(data, dict):
response.headers[b"Content-Type"] = b"application/json; charset: utf-8"
data = json.dumps(data)
return data
response.headers[b"Content-Type"] = b"text/html; charset: utf-8"
if "." in path:
content_type, encoding = mimetypes.guess_type(path)
if not content_type:
raise frappe.UnsupportedMediaType("Cannot determine content type of {}".format(path))
response.headers[b"Content-Type"] = content_type.encode("utf-8")
return data
def clear_cache(path=None):
if path:
delete_page_cache(path)
else:
clear_sitemap()
frappe.clear_cache("Guest")
clear_permissions()
for method in frappe.get_hooks("website_clear_cache"):
frappe.get_attr(method)(path)
| gangadharkadam/johnfrappe | frappe/website/render.py | Python | mit | 4,424 | 0.028707 |
# coding: utf-8
#-------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#--------------------------------------------------------------------------
import unittest
import azure.mgmt.servicebus.models
from azure.mgmt.servicebus.models import SBNamespace
from azure.common.credentials import ServicePrincipalCredentials
from devtools_testutils import AzureMgmtTestCase, ResourceGroupPreparer
class MgmtServiceBusTest(AzureMgmtTestCase):
def setUp(self):
super(MgmtServiceBusTest, self).setUp()
self.servicebus_client = self.create_mgmt_client(
azure.mgmt.servicebus.ServiceBusManagementClient
)
def process(self, result):
pass
@ResourceGroupPreparer()
def test_sb_namespace_available(self, resource_group, location):
# Check the namespace availability
availabilityresult = self.servicebus_client.namespaces.check_name_availability_method("Testingthenamespacenameforpython")
self.assertEqual(availabilityresult.name_available, True)
#------------------------------------------------------------------------------
if __name__ == '__main__':
unittest.main() | lmazuel/azure-sdk-for-python | azure-mgmt-servicebus/tests/test_azure_mgmt_servicebus_check_name_availability.py | Python | mit | 1,330 | 0.003765 |
# Opus/UrbanSim urban simulation software.
# Copyright (C) 2005-2009 University of Washington
# See opus_core/LICENSE
from urbansim.abstract_variables.abstract_travel_time_variable import abstract_travel_time_variable
class travel_time_hbw_am_drive_alone_from_home_to_work_alt(abstract_travel_time_variable):
"""travel_time_hbw_am_drive_alone_from_home_to_work"""
agent_zone_id = "psrc.household.home_zone_id_from_grid_id"
location_zone_id = "urbansim.zone.zone_id"
travel_data_attribute = "urbansim.travel_data.am_single_vehicle_to_work_travel_time"
from numpy import ma, array
from opus_core.tests import opus_unittest
from urbansim.variable_test_toolbox import VariableTestToolbox
from psrc.opus_package_info import package
from urbansim.datasets.zone_dataset import ZoneDataset
from urbansim.datasets.household_dataset import HouseholdDataset
from psrc.datasets.person_x_zone_dataset import PersonXZoneDataset
from psrc.datasets.person_dataset import PersonDataset
class Tests(opus_unittest.OpusTestCase):
variable_name = "psrc.household_x_zone.travel_time_hbw_am_drive_alone_from_home_to_work_alt"
def test_my_inputs(self):
values = VariableTestToolbox().compute_variable(self.variable_name, \
{
"household":{
"household_id":array([1,2,3,4,5]),
"home_zone_id_from_grid_id":array([3, 1, 1, 1, 2]),
},
"zone":{
"zone_id":array([1, 2, 3]),
},
"travel_data":{
"from_zone_id": array([3, 3, 1, 1, 1, 2, 2, 3, 2]),
"to_zone_id": array([1, 3, 1, 3, 2, 1, 3, 2, 2]),
"am_single_vehicle_to_work_travel_time":array([1.1, 2.2, 3.3, 4.4, 0.5, 0.7, 8.7, 7.8, 1.0])}},
dataset = "household_x_zone")
default_value = travel_time_hbw_am_drive_alone_from_home_to_work_alt.default_value
should_be = array([[1.1, 7.8, 2.2],
[3.3, 0.5, 4.4], [3.3, 0.5, 4.4],
[3.3, 0.5, 4.4], [0.7, 1.0, 8.7]])
self.assertEqual(ma.allclose(values, should_be, rtol=1e-3), \
True, msg = "Error in " + self.variable_name)
if __name__=='__main__':
opus_unittest.main() | christianurich/VIBe2UrbanSim | 3rdparty/opus/src/psrc/household_x_zone/travel_time_hbw_am_drive_alone_from_home_to_work_alt.py | Python | gpl-2.0 | 2,397 | 0.01627 |
from pymongo import MongoClient
import json
import requests
import time
from datetime import datetime
def subredditInfo(sr, limit=100, sorting="top", period="day",
user_agent="ChicagoSchool's scraper", **kwargs):
"""retrieves X (max 100) amount of stories in a subreddit
'sorting' is whether or not the sorting of the reddit should be customized or not,
if it is: Allowed passing params/queries such as t=hour, week, month, year or all"""
#query to send
parameters = {"limit": limit,}
parameters.update(kwargs)
url = "http://www.reddit.com/r/%s/%s.json?limit=%d&t=%s" % (sr, sorting, limit, period)
r = requests.get(url, headers={"user-agent": user_agent})
j = json.loads(r.text)
#return list of stories
stories = []
for story in j["data"]["children"]:
stories.append(story)
return stories
def extractContent(link, sub, limit=100, sorting="top",
user_agent="ChicagoSchool's scraper", **kwargs):
url = "http://www.reddit.com/%s.json?sort=%s&limit=%d" % (link, sorting, limit)
r = requests.get(url, headers={"user-agent": user_agent})
j = json.loads(r.text)
date = datetime.fromtimestamp(j[0]["data"]["children"][0]["data"]["created"])
db_data = {"date": date, "link": link, "subreddit": sub, "content": j}
return db_data
def dbScraper(db_n, col_n, sub_l):
"""scrapes all the threads for a subreddit and stores them in a
mongodb db"""
m_ind = 0
t_f = datetime.now()
sub_ln = len(sub_l)
client = MongoClient()
db = client[db_n]
col = db[col_n]
while True:
t_1 = datetime.now()
for i, s in enumerate(sub_l):
try:
sub_info = subredditInfo(s)
except Exception as e:
print e
time.sleep(300)
sub_info = subredditInfo(s)
time.sleep(2)
sub_info_ln = len(sub_info)
for j, l in enumerate(sub_info):
link = l["data"]["permalink"]
try:
content = extractContent(link, s)
col.insert(content)
except Exception as e:
print e
time.sleep(60)
try:
content = extractContent(link, s)
col.insert(content)
except Exception as e:
print e
time.sleep(300)
print i * 100. / sub_ln, j * 100. / sub_info_ln, m_ind, i, j, datetime.now() - t_1, datetime.now() - t_f
time.sleep(2)
# now we wait until a full day has passed since we started our search
t_diff = datetime.now() - t_1
while t_diff.days < 1:
time.sleep(60)
t_diff = datetime.now() - t_1
| lbybee/reddit_spelling_index | reddit_db_scraper.py | Python | gpl-2.0 | 2,876 | 0.004868 |
#
# ElementTree
# $Id: ElementInclude.py 3225 2007-08-27 21:32:08Z fredrik $
#
# limited xinclude support for element trees
#
# history:
# 2003-08-15 fl created
# 2003-11-14 fl fixed default loader
#
# Copyright (c) 2003-2004 by Fredrik Lundh. All rights reserved.
#
# fredrik@pythonware.com
# http://www.pythonware.com
#
# --------------------------------------------------------------------
# The ElementTree toolkit is
#
# Copyright (c) 1999-2007 by Fredrik Lundh
#
# By obtaining, using, and/or copying this software and/or its
# associated documentation, you agree that you have read, understood,
# and will comply with the following terms and conditions:
#
# Permission to use, copy, modify, and distribute this software and
# its associated documentation for any purpose and without fee is
# hereby granted, provided that the above copyright notice appears in
# all copies, and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of
# Secret Labs AB or the author not be used in advertising or publicity
# pertaining to distribution of the software without specific, written
# prior permission.
#
# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT-
# ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR
# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
# OF THIS SOFTWARE.
# --------------------------------------------------------------------
##
# Limited XInclude support for the ElementTree package.
##
import ElementTree
def copy(elem):
e = ElementTree.Element(elem.tag, elem.attrib)
e.text = elem.text
e.tail = elem.tail
e[:] = elem
return e
XINCLUDE = "{http://www.w3.org/2001/XInclude}"
XINCLUDE_INCLUDE = XINCLUDE + "include"
XINCLUDE_FALLBACK = XINCLUDE + "fallback"
##
# Fatal include error.
class FatalIncludeError(SyntaxError):
pass
##
# Default loader. This loader reads an included resource from disk.
#
# @param href Resource reference.
# @param parse Parse mode. Either "xml" or "text".
# @param encoding Optional text encoding.
# @return The expanded resource. If the parse mode is "xml", this
# is an ElementTree instance. If the parse mode is "text", this
# is a Unicode string. If the loader fails, it can return None
# or raise an IOError exception.
# @throws IOError If the loader fails to load the resource.
def default_loader(href, parse, encoding=None):
file = open(href)
if parse == "xml":
data = ElementTree.parse(file).getroot()
else:
data = file.read()
if encoding:
data = data.decode(encoding)
file.close()
return data
##
# Expand XInclude directives.
#
# @param elem Root element.
# @param loader Optional resource loader. If omitted, it defaults
# to {@link default_loader}. If given, it should be a callable
# that implements the same interface as <b>default_loader</b>.
# @throws FatalIncludeError If the function fails to include a given
# resource, or if the tree contains malformed XInclude elements.
# @throws IOError If the function fails to load a given resource.
def include(elem, loader=None):
if loader is None:
loader = default_loader
# look for xinclude elements
i = 0
while i < len(elem):
e = elem[i]
if e.tag == XINCLUDE_INCLUDE:
# process xinclude directive
href = e.get("href")
parse = e.get("parse", "xml")
if parse == "xml":
node = loader(href, parse)
if node is None:
raise FatalIncludeError(
"cannot load %r as %r" % (href, parse)
)
node = copy(node)
if e.tail:
node.tail = (node.tail or "") + e.tail
elem[i] = node
elif parse == "text":
text = loader(href, parse, e.get("encoding"))
if text is None:
raise FatalIncludeError(
"cannot load %r as %r" % (href, parse)
)
if i:
node = elem[i-1]
node.tail = (node.tail or "") + text
else:
elem.text = (elem.text or "") + text + (e.tail or "")
del elem[i]
continue
else:
raise FatalIncludeError(
"unknown parse type in xi:include tag (%r)" % parse
)
elif e.tag == XINCLUDE_FALLBACK:
raise FatalIncludeError(
"xi:fallback tag must be child of xi:include (%r)" % e.tag
)
else:
include(e, loader)
i = i + 1
| SMALLplayer/smallplayer-image-creator | storage/.xbmc/addons/script.module.elementtree/lib/elementtree/ElementInclude.py | Python | gpl-2.0 | 5,051 | 0.00099 |
# =============================================================================
# 2013+ Copyright (c) Alexey Ivanov <rbtz@ph34r.me>
# All rights reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# =============================================================================
from __future__ import absolute_import
| reverbrain/elliptics | recovery/elliptics_recovery/types/__init__.py | Python | lgpl-3.0 | 769 | 0 |
import click
from arrow.commands.remote.add_organism import cli as add_organism
from arrow.commands.remote.add_track import cli as add_track
from arrow.commands.remote.delete_organism import cli as delete_organism
from arrow.commands.remote.delete_track import cli as delete_track
from arrow.commands.remote.update_organism import cli as update_organism
from arrow.commands.remote.update_track import cli as update_track
@click.group()
def cli():
pass
cli.add_command(add_organism)
cli.add_command(add_track)
cli.add_command(delete_organism)
cli.add_command(delete_track)
cli.add_command(update_organism)
cli.add_command(update_track)
| galaxy-genome-annotation/python-apollo | arrow/commands/cmd_remote.py | Python | mit | 643 | 0 |
import numpy as np
from ase import Atoms
from gpaw import GPAW, FermiDirac
from gpaw.response.df import DielectricFunction
from gpaw.test import equal, findpeak
GS = 1
ABS = 1
if GS:
cluster = Atoms('Au2', [(0, 0, 0), (0, 0, 2.564)])
cluster.set_cell((6, 6, 6), scale_atoms=False)
cluster.center()
calc = GPAW(mode='pw',
dtype=complex,
xc='RPBE',
nbands=16,
eigensolver='rmm-diis',
occupations=FermiDirac(0.01))
cluster.set_calculator(calc)
cluster.get_potential_energy()
calc.diagonalize_full_hamiltonian(nbands=24, scalapack=True)
calc.write('Au2.gpw', 'all')
if ABS:
df = DielectricFunction('Au2.gpw',
frequencies=np.linspace(0, 14, 141),
hilbert=not True,
eta=0.1,
ecut=10)
b0, b = df.get_dielectric_function(filename=None,
direction='z')
a0, a = df.get_polarizability(filename=None,
direction='z')
a0_ws, a_ws = df.get_polarizability(filename=None,
wigner_seitz_truncation=True,
direction='z')
w0_ = 5.60491055
I0_ = 244.693028
w_ = 5.696528390
I_ = 207.8
w, I = findpeak(np.linspace(0, 14., 141), b0.imag)
equal(w, w0_, 0.05)
equal(6**3 * I / (4 * np.pi), I0_, 0.5)
w, I = findpeak(np.linspace(0, 14., 141), a0.imag)
equal(w, w0_, 0.05)
equal(I, I0_, 0.5)
w, I = findpeak(np.linspace(0, 14., 141), a0_ws.imag)
equal(w, w0_, 0.05)
equal(I, I0_, 0.5)
w, I = findpeak(np.linspace(0, 14., 141), b.imag)
equal(w, w_, 0.05)
equal(6**3 * I / (4 * np.pi), I_, 0.5)
w, I = findpeak(np.linspace(0, 14., 141), a.imag)
equal(w, w_, 0.05)
equal(I, I_, 0.5)
# The Wigner-Seitz truncation does not give exactly the same for small cell
w, I = findpeak(np.linspace(0, 14., 141), a_ws.imag)
equal(w, w_, 0.2)
equal(I, I_, 8.0)
| robwarm/gpaw-symm | gpaw/test/au02_absorption.py | Python | gpl-3.0 | 2,114 | 0.003784 |
from pyrs import schema
import six
from . import lib
from . import response
class Error(Exception):
"""
This is the base exception of this framework.
The response based on this exception will be a JSON data
"""
#: HTTP status code (default=500)
status = 500
#: HTTP Response headers, (default None processed as empty)
headers = None
#: Error code should be a string. If it's not specified the class fully
#: qualified name will be used
error = None
#: Description of error. Should give details about the error
#: In the message it will appearing as error_description
description = None
#: Reference for this error. You can pointing out a documentation which
#: gives more information about how could this error happen and how could
#: be possible to avoid
uri = None
#: None used as empty dict. Gives extra information about this error which
#: could be parsed by the consumer of API.
details = None
#: You can specify your schema class for validating your message
#: By default the application default error schema the `ErrorSchema` will
#: be used
schema = None
def __init__(self, *args, **details):
super(Error, self).__init__(*args)
if six.PY3:
self.traceback = lib.parse_traceback(self.__traceback__)
cause = self.__cause__ or self.__context__
else:
self.traceback = lib.get_traceback()
cause = None
self.cause = details.pop('cause', cause)
self.details = details
def get_headers(self):
"""
This method gives back the header property by default or an empty dict,
but you can override, then provide special headers based on the context
"""
return self.headers or {}
def get_status(self):
"""
This method gives back the status property by default which will be
threated as HTTP status code. You can override, then provide your own
status code based on the context.
"""
return self.status
def get_message(self, debug=False):
"""
Should give back a dictionary which will be threated the response body.
The message should be conform with the `ErrorSchema`.
"""
res = {
'error': self.error or lib.get_fqname(self)
}
if self.args:
res['message'] = self.args[0]
if self.description:
res['error_description'] = self.description
if self.uri:
res['error_uri'] = self.uri
details = self.get_details(debug)
if details:
res['details'] = details
return res
def get_details(self, debug=False):
"""
Gives back detailed information about the error and the context.
By default its an empty dictionary. The `debug` depends on the debug
parameter should give back traceback information and the positional
arguments of the exception.
As this is part of the message should conform with the `ErrorSchema`.
"""
details = {}
if self.details:
details = self.details.copy()
if debug:
details['traceback'] = self.traceback
details['args'] = self.args[1:]
return details
@classmethod
def wrap(cls, original):
"""
Wraps the exception gives back an `Error` instance. The created `Error`
instance `error` property will be updated by the fully qualified name
of the `original` exception.
You could use it for `Error` instances as well, though is not
recommended.
"""
ex = cls(*original.args, cause=original)
ex.error = lib.get_fqname(original)
return ex
class ClientError(Error):
"""
Generic Client Error. Normally the client errors have 4xx status codes.
"""
status = 400
class ValidationError(Error):
status = 500
error = 'validation_error'
class InputValidationError(Error):
status = 400
error = 'invalid_request_format'
class DetailsSchema(schema.Object):
"""
Details part of the error schema. Additional properties possible.
"""
traceback = schema.Array()
args = schema.Array()
class Attrs:
additional = True
class ErrorSchema(schema.Object):
"""
Describe how the error response should look like. Goal of this schema is
a minimalistic but usable error response.
"""
error = schema.String(required=True)
error_description = schema.String()
error_uri = schema.String()
message = schema.String()
details = DetailsSchema()
def dump(self, ex):
msg = ex.get_message(self['debug'])
return super(ErrorSchema, self).dump(msg)
class ErrorResponse(response.Response):
def setup(self):
if not isinstance(self.content, Error):
self.content = Error.wrap(self.content)
self.status = self.content.get_status()
self.headers = self.content.get_headers()
if self.content.schema:
self.processor = self.content.schema(debug=self.app['debug'])
else:
self.processor = ErrorSchema(debug=self.app['debug'])
| palankai/pyrs-resource | pyrs/resource/errors.py | Python | lgpl-3.0 | 5,253 | 0 |
#
# network.py - network configuration install data
#
# Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007 Red Hat, Inc.
# 2008, 2009
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Author(s): Matt Wilson <ewt@redhat.com>
# Erik Troan <ewt@redhat.com>
# Mike Fulbright <msf@redhat.com>
# Brent Fox <bfox@redhat.com>
# David Cantrell <dcantrell@redhat.com>
# Radek Vykydal <rvykydal@redhat.com>
import string
import shutil
from pyanaconda import iutil
import socket
import os
import time
import threading
import re
import dbus
import IPy
from uuid import uuid4
import itertools
from pyanaconda.simpleconfig import SimpleConfigFile
from blivet.devices import FcoeDiskDevice, iScsiDiskDevice
import blivet.arch
from pyanaconda import nm
from pyanaconda import constants
from pyanaconda.flags import flags, can_touch_runtime_system
from pyanaconda.i18n import _
from gi.repository import NetworkManager
import logging
log = logging.getLogger("anaconda")
sysconfigDir = "/etc/sysconfig"
netscriptsDir = "%s/network-scripts" % (sysconfigDir)
networkConfFile = "%s/network" % (sysconfigDir)
hostnameFile = "/etc/hostname"
ipv6ConfFile = "/etc/sysctl.d/anaconda.conf"
ifcfgLogFile = "/tmp/ifcfg.log"
DEFAULT_HOSTNAME = "localhost.localdomain"
# part of a valid hostname between two periods (cannot start nor end with '-')
# for more info about '(?!-)' and '(?<!-)' see 're' module documentation
HOSTNAME_PART_RE = re.compile(r"(?!-)[A-Z\d-]{1,63}(?<!-)$", re.IGNORECASE)
ifcfglog = None
network_connected = None
network_connected_condition = threading.Condition()
def setup_ifcfg_log():
# Setup special logging for ifcfg NM interface
from pyanaconda import anaconda_log
global ifcfglog
logger = logging.getLogger("ifcfg")
logger.setLevel(logging.DEBUG)
anaconda_log.logger.addFileHandler(ifcfgLogFile, logger, logging.DEBUG)
if os.access("/dev/tty3", os.W_OK):
anaconda_log.logger.addFileHandler("/dev/tty3", logger,
anaconda_log.DEFAULT_TTY_LEVEL,
anaconda_log.TTY_FORMAT,
autoLevel=True)
anaconda_log.logger.forwardToSyslog(logger)
ifcfglog = logging.getLogger("ifcfg")
def check_ip_address(address, version=None):
try:
_ip, ver = IPy.parseAddress(address)
except ValueError:
return False
if version and version == ver:
return True
def sanityCheckHostname(hostname):
"""
Check if the given string is (syntactically) a valid hostname.
:param hostname: a string to check
:returns: a pair containing boolean value (valid or invalid) and
an error message (if applicable)
:rtype: (bool, str)
"""
if not hostname:
return (False, _("Hostname cannot be None or an empty string."))
if len(hostname) > 255:
return (False, _("Hostname must be 255 or fewer characters in length."))
validStart = string.ascii_letters + string.digits
if hostname[0] not in validStart:
return (False, _("Hostname must start with a valid character in the "
"ranges 'a-z', 'A-Z', or '0-9'"))
if hostname.endswith("."):
# hostname can end with '.', but the regexp used below would not match
hostname = hostname[:-1]
if not all(HOSTNAME_PART_RE.match(part) for part in hostname.split(".")):
return (False, _("Hostnames can only contain the characters 'a-z', "
"'A-Z', '0-9', '-', or '.', parts between periods "
"must contain something and cannot start or end with "
"'-'."))
return (True, "")
# Return a list of IP addresses for all active devices.
def getIPs():
ipv4_addresses = []
ipv6_addresses = []
for devname in nm.nm_activated_devices():
try:
ipv4_addresses += nm.nm_device_ip_addresses(devname, version=4)
ipv6_addresses += nm.nm_device_ip_addresses(devname, version=6)
except (dbus.DBusException, ValueError) as e:
log.warning("Got an exception trying to get the ip addr "
"of %s: %s", devname, e)
# prefer IPv4 addresses to IPv6 addresses
return ipv4_addresses + ipv6_addresses
# Return the first real non-local IP we find
def getFirstRealIP():
for ip in getIPs():
if ip not in ("127.0.0.1", "::1"):
return ip
return None
def netmask2prefix(netmask):
prefix = 0
while prefix < 33:
if (prefix2netmask(prefix) == netmask):
return prefix
prefix += 1
return prefix
def prefix2netmask(prefix):
""" Convert prefix (CIDR bits) to netmask """
_bytes = []
for _i in range(4):
if prefix >= 8:
_bytes.append(255)
prefix -= 8
else:
_bytes.append(256 - 2**(8-prefix))
prefix = 0
netmask = ".".join(str(byte) for byte in _bytes)
return netmask
# Try to determine what the hostname should be for this system
def getHostname():
hn = None
# First address (we prefer ipv4) of last device (as it used to be) wins
for dev in nm.nm_activated_devices():
addrs = (nm.nm_device_ip_addresses(dev, version=4) +
nm.nm_device_ip_addresses(dev, version=6))
for ipaddr in addrs:
try:
hinfo = socket.gethostbyaddr(ipaddr)
except socket.herror as e:
log.debug("Exception caught trying to get host name of %s: %s", ipaddr, e)
else:
if len(hinfo) == 3:
hn = hinfo[0]
break
if not hn or hn in ('(none)', 'localhost', 'localhost.localdomain'):
hn = socket.gethostname()
if not hn or hn in ('(none)', 'localhost', 'localhost.localdomain'):
hn = DEFAULT_HOSTNAME
return hn
def logIfcfgFile(path, message=""):
content = ""
if os.access(path, os.R_OK):
f = open(path, 'r')
content = f.read()
f.close()
else:
content = "file not found"
ifcfglog.debug("%s%s:\n%s", message, path, content)
def _ifcfg_files(directory):
rv = []
for name in os.listdir(directory):
if name.startswith("ifcfg-"):
if name == "ifcfg-lo":
continue
rv.append(os.path.join(directory,name))
return rv
def logIfcfgFiles(message=""):
ifcfglog.debug("content of files (%s):", message)
for path in _ifcfg_files(netscriptsDir):
ifcfglog.debug("%s:", path)
with open(path, "r") as f:
for line in f:
ifcfglog.debug(" %s", line.strip())
ifcfglog.debug("all settings: %s", nm.nm_get_all_settings())
class IfcfgFile(SimpleConfigFile):
def __init__(self, filename):
SimpleConfigFile.__init__(self, always_quote=True, filename=filename)
self._dirty = False
def read(self, filename=None):
self.reset()
ifcfglog.debug("IfcfFile.read %s", self.filename)
SimpleConfigFile.read(self)
self._dirty = False
def write(self, filename=None, use_tmp=False):
if self._dirty or filename:
# ifcfg-rh is using inotify IN_CLOSE_WRITE event so we don't use
# temporary file for new configuration
ifcfglog.debug("IfcfgFile.write %s:\n%s", self.filename, self.__str__())
SimpleConfigFile.write(self, filename, use_tmp=use_tmp)
self._dirty = False
def set(self, *args):
for (key, data) in args:
if self.get(key) != data:
break
else:
return
ifcfglog.debug("IfcfgFile.set %s: %s", self.filename, args)
SimpleConfigFile.set(self, *args)
self._dirty = True
def unset(self, *args):
for key in args:
if self.get(key):
self._dirty = True
break
else:
return
ifcfglog.debug("IfcfgFile.unset %s: %s", self.filename, args)
SimpleConfigFile.unset(self, *args)
def dumpMissingDefaultIfcfgs():
"""
Dump missing default ifcfg file for wired devices.
For default auto connections created by NM upon start - which happens
in case of missing ifcfg file - rename the connection using device name
and dump its ifcfg file. (For server, default auto connections will
be turned off in NetworkManager.conf.)
The connection id (and consequently ifcfg file) is set to device name.
Returns list of devices for which ifcfg file was dumped.
"""
rv = []
for devname in nm.nm_devices():
# for each ethernet device
# FIXME add more types (infiniband, bond...?)
if not nm.nm_device_type_is_ethernet(devname):
continue
# check that device has connection without ifcfg file
try:
nm.nm_device_setting_value(devname, "connection", "uuid")
except nm.SettingsNotFoundError:
continue
if find_ifcfg_file_of_device(devname):
continue
try:
nm.nm_update_settings_of_device(devname, [['connection', 'id', devname, None]])
log.debug("network: dumping ifcfg file for default autoconnection on %s", devname)
nm.nm_update_settings_of_device(devname, [['connection', 'autoconnect', False, None]])
log.debug("network: setting autoconnect of %s to False" , devname)
except nm.SettingsNotFoundError:
log.debug("network: no ifcfg file for %s", devname)
rv.append(devname)
return rv
# get a kernel cmdline string for dracut needed for access to storage host
def dracutSetupArgs(networkStorageDevice):
if networkStorageDevice.nic == "default" or ":" in networkStorageDevice.nic:
nic = ifaceForHostIP(networkStorageDevice.host_address)
if not nic:
return ""
else:
nic = networkStorageDevice.nic
if nic not in nm.nm_devices():
log.error('Unknown network interface: %s', nic)
return ""
ifcfg_path = find_ifcfg_file_of_device(nic)
if not ifcfg_path:
log.error("dracutSetupArgs: can't find ifcfg file for %s", nic)
return ""
ifcfg = IfcfgFile(ifcfg_path)
ifcfg.read()
return dracutBootArguments(nic,
ifcfg,
networkStorageDevice.host_address,
getHostname())
def dracutBootArguments(devname, ifcfg, storage_ipaddr, hostname=None):
netargs = set()
if ifcfg.get('BOOTPROTO') == 'ibft':
netargs.add("ip=ibft")
elif storage_ipaddr:
if hostname is None:
hostname = ""
# if using ipv6
if ':' in storage_ipaddr:
if ifcfg.get('DHCPV6C') == "yes":
# XXX combination with autoconf not yet clear,
# support for dhcpv6 is not yet implemented in NM/ifcfg-rh
netargs.add("ip=%s:dhcp6" % devname)
elif ifcfg.get('IPV6_AUTOCONF') == "yes":
netargs.add("ip=%s:auto6" % devname)
elif ifcfg.get('IPV6ADDR'):
ipaddr = "[%s]" % ifcfg.get('IPV6ADDR')
if ifcfg.get('IPV6_DEFAULTGW'):
gateway = "[%s]" % ifcfg.get('IPV6_DEFAULTGW')
else:
gateway = ""
netargs.add("ip=%s::%s:%s:%s:%s:none" % (ipaddr, gateway,
ifcfg.get('PREFIX'), hostname, devname))
else:
if iutil.lowerASCII(ifcfg.get('bootproto')) == 'dhcp':
netargs.add("ip=%s:dhcp" % devname)
else:
if ifcfg.get('GATEWAY'):
gateway = ifcfg.get('GATEWAY')
else:
gateway = ""
netmask = ifcfg.get('netmask')
prefix = ifcfg.get('prefix')
if not netmask and prefix:
netmask = prefix2netmask(int(prefix))
netargs.add("ip=%s::%s:%s:%s:%s:none" % (ifcfg.get('ipaddr'),
gateway, netmask, hostname, devname))
hwaddr = ifcfg.get("HWADDR")
if hwaddr:
netargs.add("ifname=%s:%s" % (devname, hwaddr.lower()))
nettype = ifcfg.get("NETTYPE")
subchannels = ifcfg.get("SUBCHANNELS")
if blivet.arch.isS390() and nettype and subchannels:
znet = "rd.znet=%s,%s" % (nettype, subchannels)
options = ifcfg.get("OPTIONS").strip("'\"")
if options:
options = filter(lambda x: x != '', options.split(' '))
znet += ",%s" % (','.join(options))
netargs.add(znet)
return netargs
def _get_ip_setting_values_from_ksdata(networkdata):
values = []
# ipv4 settings
method4 = "auto"
if networkdata.bootProto == "static":
method4 = "manual"
values.append(["ipv4", "method", method4, "s"])
if method4 == "manual":
addr4 = nm.nm_ipv4_to_dbus_int(networkdata.ip)
gateway4 = nm.nm_ipv4_to_dbus_int(networkdata.gateway)
prefix4 = netmask2prefix(networkdata.netmask)
values.append(["ipv4", "addresses", [[addr4, prefix4, gateway4]], "aau"])
# ipv6 settings
if networkdata.noipv6:
method6 = "ignore"
else:
if not networkdata.ipv6:
method6 = "auto"
elif networkdata.ipv6 == "auto":
method6 = "auto"
elif networkdata.ipv6 == "dhcp":
method6 = "dhcp"
else:
method6 = "manual"
values.append(["ipv6", "method", method6, "s"])
if method6 == "manual":
addr6, _slash, prefix6 = networkdata.ipv6.partition("/")
if prefix6:
prefix6 = int(prefix6)
else:
prefix6 = 64
addr6 = nm.nm_ipv6_to_dbus_ay(addr6)
if networkdata.ipv6gateway:
gateway6 = nm.nm_ipv6_to_dbus_ay(networkdata.ipv6gateway)
else:
gateway6 = [0] * 16
values.append(["ipv6", "addresses", [(addr6, prefix6, gateway6)], "a(ayuay)"])
# nameservers
nss4 = []
nss6 = []
if networkdata.nameserver:
for ns in networkdata.nameserver.split(","):
if ":" in ns:
nss6.append(nm.nm_ipv6_to_dbus_ay(ns))
else:
nss4.append(nm.nm_ipv4_to_dbus_int(ns))
values.append(["ipv4", "dns", nss4, "au"])
values.append(["ipv6", "dns", nss6, "aay"])
return values
def update_settings_with_ksdata(devname, networkdata):
new_values = _get_ip_setting_values_from_ksdata(networkdata)
new_values.append(['connection', 'autoconnect', networkdata.onboot, None])
uuid = nm.nm_device_setting_value(devname, "connection", "uuid")
nm.nm_update_settings_of_device(devname, new_values)
return uuid
def bond_options_ksdata_to_dbus(opts_str):
retval = {}
for option in opts_str.split(";" if ';' in opts_str else ","):
key, _sep, value = option.partition("=")
retval[key] = value
return retval
def add_connection_for_ksdata(networkdata, devname):
added_connections = []
con_uuid = str(uuid4())
values = _get_ip_setting_values_from_ksdata(networkdata)
# HACK preventing NM to autoactivate the connection
#values.append(['connection', 'autoconnect', networkdata.onboot, 'b'])
values.append(['connection', 'autoconnect', False, 'b'])
values.append(['connection', 'uuid', con_uuid, 's'])
# type "bond"
if networkdata.bondslaves:
# bond connection is autoactivated
values.append(['connection', 'type', 'bond', 's'])
values.append(['connection', 'id', devname, 's'])
values.append(['bond', 'interface-name', devname, 's'])
options = bond_options_ksdata_to_dbus(networkdata.bondopts)
values.append(['bond', 'options', options, 'a{ss}'])
for _i, slave in enumerate(networkdata.bondslaves.split(","), 1):
#slave_name = "%s slave %d" % (devname, i)
slave_name = slave
svalues = []
suuid = str(uuid4())
svalues.append(['connection', 'uuid', suuid, 's'])
svalues.append(['connection', 'id', slave_name, 's'])
svalues.append(['connection', 'slave-type', 'bond', 's'])
svalues.append(['connection', 'master', devname, 's'])
svalues.append(['connection', 'type', '802-3-ethernet', 's'])
mac = nm.nm_device_perm_hwaddress(slave)
mac = [int(b, 16) for b in mac.split(":")]
svalues.append(['802-3-ethernet', 'mac-address', mac, 'ay'])
# disconnect slaves
if networkdata.activate:
nm.nm_disconnect_device(slave)
# remove ifcfg file
ifcfg_path = find_ifcfg_file_of_device(slave)
if ifcfg_path and os.access(ifcfg_path, os.R_OK):
os.unlink(ifcfg_path)
nm.nm_add_connection(svalues)
added_connections.append((suuid, slave))
dev_spec = None
# type "team"
elif networkdata.teamslaves:
values.append(['connection', 'type', 'team', 's'])
values.append(['connection', 'id', devname, 's'])
values.append(['team', 'interface-name', devname, 's'])
values.append(['team', 'config', networkdata.teamconfig, 's'])
for _i, (slave, cfg) in enumerate(networkdata.teamslaves):
# assume ethernet, TODO: infiniband, wifi, vlan
#slave_name = "%s slave %d" % (devname, i)
slave_name = slave
svalues = []
suuid = str(uuid4())
svalues.append(['connection', 'uuid', suuid, 's'])
svalues.append(['connection', 'id', slave_name, 's'])
svalues.append(['connection', 'slave-type', 'team', 's'])
svalues.append(['connection', 'master', devname, 's'])
svalues.append(['connection', 'type', '802-3-ethernet', 's'])
mac = nm.nm_device_perm_hwaddress(slave)
mac = [int(b, 16) for b in mac.split(":")]
svalues.append(['802-3-ethernet', 'mac-address', mac, 'ay'])
svalues.append(['team-port', 'config', cfg, 's'])
# disconnect slaves
if networkdata.activate:
nm.nm_disconnect_device(slave)
# remove ifcfg file
ifcfg_path = find_ifcfg_file_of_device(slave)
if ifcfg_path and os.access(ifcfg_path, os.R_OK):
os.unlink(ifcfg_path)
nm.nm_add_connection(svalues)
added_connections.append((suuid, slave))
dev_spec = None
# type "vlan"
elif networkdata.vlanid:
parent, _sep, _vlanid = devname.partition(".")
values.append(['vlan', 'parent', parent, 's'])
values.append(['connection', 'type', 'vlan', 's'])
values.append(['connection', 'id', devname, 's'])
values.append(['vlan', 'interface-name', devname, 's'])
values.append(['vlan', 'id', int(networkdata.vlanid), 'u'])
dev_spec = None
# type "802-3-ethernet"
else:
values.append(['connection', 'type', '802-3-ethernet', 's'])
values.append(['connection', 'id', devname, 's'])
mac = nm.nm_device_perm_hwaddress(devname)
mac = [int(b, 16) for b in mac.split(":")]
values.append(['802-3-ethernet', 'mac-address', mac, 'ay'])
dev_spec = devname
nm.nm_add_connection(values)
added_connections.insert(0, (con_uuid, dev_spec))
return added_connections
def ksdata_from_ifcfg(devname, uuid=None):
if nm.nm_device_is_slave(devname):
return None
if nm.nm_device_type_is_wifi(devname):
# wifi from kickstart is not supported yet
return None
if not uuid:
# Find ifcfg file for the device.
# If the device is active, use uuid of its active connection.
uuid = nm.nm_device_active_con_uuid(devname)
if uuid:
ifcfg_path = find_ifcfg_file([("UUID", uuid)])
else:
# look it up by other values depending on its type
ifcfg_path = find_ifcfg_file_of_device(devname)
if not ifcfg_path:
return None
ifcfg = IfcfgFile(ifcfg_path)
ifcfg.read()
nd = ifcfg_to_ksdata(ifcfg, devname)
if not nd:
return None
if nm.nm_device_type_is_ethernet(devname):
nd.device = devname
elif nm.nm_device_type_is_wifi(devname):
nm.device = ""
elif nm.nm_device_type_is_bond(devname):
nd.device = devname
elif nm.nm_device_type_is_team(devname):
nd.device = devname
elif nm.nm_device_type_is_vlan(devname):
nd.device = devname.split(".")[0]
return nd
def ifcfg_to_ksdata(ifcfg, devname):
from pyanaconda.kickstart import AnacondaKSHandler
handler = AnacondaKSHandler()
kwargs = {}
# no network command for bond slaves
if ifcfg.get("MASTER"):
return None
# no network command for team slaves
if ifcfg.get("TEAM_MASTER"):
return None
# ipv4 and ipv6
if ifcfg.get("ONBOOT") and ifcfg.get("ONBOOT" ) == "no":
kwargs["onboot"] = False
if ifcfg.get('MTU') and ifcfg.get('MTU') != "0":
kwargs["mtu"] = ifcfg.get('MTU')
# ipv4
if not ifcfg.get('BOOTPROTO'):
kwargs["noipv4"] = True
else:
if iutil.lowerASCII(ifcfg.get('BOOTPROTO')) == 'dhcp':
kwargs["bootProto"] = "dhcp"
if ifcfg.get('DHCPCLASS'):
kwargs["dhcpclass"] = ifcfg.get('DHCPCLASS')
elif ifcfg.get('IPADDR'):
kwargs["bootProto"] = "static"
kwargs["ip"] = ifcfg.get('IPADDR')
netmask = ifcfg.get('NETMASK')
prefix = ifcfg.get('PREFIX')
if not netmask and prefix:
netmask = prefix2netmask(int(prefix))
if netmask:
kwargs["netmask"] = netmask
# note that --gateway is common for ipv4 and ipv6
if ifcfg.get('GATEWAY'):
kwargs["gateway"] = ifcfg.get('GATEWAY')
elif ifcfg.get('IPADDR0'):
kwargs["bootProto"] = "static"
kwargs["ip"] = ifcfg.get('IPADDR0')
prefix = ifcfg.get('PREFIX0')
if prefix:
netmask = prefix2netmask(int(prefix))
kwargs["netmask"] = netmask
# note that --gateway is common for ipv4 and ipv6
if ifcfg.get('GATEWAY0'):
kwargs["gateway"] = ifcfg.get('GATEWAY0')
# ipv6
if (not ifcfg.get('IPV6INIT') or
ifcfg.get('IPV6INIT') == "no"):
kwargs["noipv6"] = True
else:
if ifcfg.get('IPV6_AUTOCONF') in ("yes", ""):
kwargs["ipv6"] = "auto"
else:
if ifcfg.get('IPV6ADDR'):
kwargs["ipv6"] = ifcfg.get('IPV6ADDR')
if ifcfg.get('IPV6_DEFAULTGW') \
and ifcfg.get('IPV6_DEFAULTGW') != "::":
kwargs["ipv6gateway"] = ifcfg.get('IPV6_DEFAULTGW')
if ifcfg.get('DHCPV6C') == "yes":
kwargs["ipv6"] = "dhcp"
# ipv4 and ipv6
dnsline = ''
for key in ifcfg.info.keys():
if iutil.upperASCII(key).startswith('DNS'):
if dnsline == '':
dnsline = ifcfg.get(key)
else:
dnsline += "," + ifcfg.get(key)
if dnsline:
kwargs["nameserver"] = dnsline
if ifcfg.get("ETHTOOL_OPTS"):
kwargs["ethtool"] = ifcfg.get("ETHTOOL_OPTS")
if ifcfg.get("ESSID"):
kwargs["essid"] = ifcfg.get("ESSID")
# hostname
if ifcfg.get("DHCP_HOSTNAME"):
kwargs["hostname"] = ifcfg.get("DHCP_HOSTNAME")
# bonding
# FIXME: dracut has only BOND_OPTS
if ifcfg.get("BONDING_MASTER") == "yes" or ifcfg.get("TYPE") == "Bond":
slaves = get_bond_slaves_from_ifcfgs([devname, ifcfg.get("UUID")])
if slaves:
kwargs["bondslaves"] = ",".join(slaves)
bondopts = ifcfg.get("BONDING_OPTS")
if bondopts:
sep = ","
if sep in bondopts:
sep = ";"
kwargs["bondopts"] = sep.join(bondopts.split())
# vlan
if ifcfg.get("VLAN") == "yes" or ifcfg.get("TYPE") == "Vlan":
kwargs["device"] = ifcfg.get("PHYSDEV")
kwargs["vlanid"] = ifcfg.get("VLAN_ID")
# pylint: disable=E1101
nd = handler.NetworkData(**kwargs)
# teaming
if ifcfg.get("TYPE") == "Team" or ifcfg.get("DEVICETYPE") == "Team":
slaves = get_team_slaves([devname, ifcfg.get("UUID")])
for dev, cfg in slaves:
nd.teamslaves.append((dev, cfg))
teamconfig = nm.nm_device_setting_value(devname, "team", "config")
if teamconfig:
nd.teamconfig = teamconfig
return nd
def hostname_ksdata(hostname):
from pyanaconda.kickstart import AnacondaKSHandler
handler = AnacondaKSHandler()
# pylint: disable=E1101
return handler.NetworkData(hostname=hostname, bootProto="")
def find_ifcfg_file_of_device(devname, root_path=""):
ifcfg_path = None
if devname not in nm.nm_devices():
return None
if nm.nm_device_type_is_wifi(devname):
ssid = nm.nm_device_active_ssid(devname)
if ssid:
ifcfg_path = find_ifcfg_file([("ESSID", ssid)])
elif nm.nm_device_type_is_bond(devname):
ifcfg_path = find_ifcfg_file([("DEVICE", devname)])
elif nm.nm_device_type_is_team(devname):
ifcfg_path = find_ifcfg_file([("DEVICE", devname)])
elif nm.nm_device_type_is_vlan(devname):
ifcfg_path = find_ifcfg_file([("DEVICE", devname)])
elif nm.nm_device_type_is_ethernet(devname):
try:
hwaddr = nm.nm_device_perm_hwaddress(devname)
except nm.PropertyNotFoundError:
hwaddr = None
if hwaddr:
hwaddr_check = lambda mac: mac.upper() == hwaddr.upper()
nonempty = lambda x: x
# slave configration created in GUI takes precedence
ifcfg_path = find_ifcfg_file([("HWADDR", hwaddr_check),
("MASTER", nonempty)],
root_path)
if not ifcfg_path:
ifcfg_path = find_ifcfg_file([("HWADDR", hwaddr_check),
("TEAM_MASTER", nonempty)],
root_path)
if not ifcfg_path:
ifcfg_path = find_ifcfg_file([("HWADDR", hwaddr_check)], root_path)
if not ifcfg_path:
ifcfg_path = find_ifcfg_file([("DEVICE", devname)], root_path)
return ifcfg_path
def find_ifcfg_file(values, root_path=""):
for filepath in _ifcfg_files(os.path.normpath(root_path+netscriptsDir)):
ifcfg = IfcfgFile(filepath)
ifcfg.read()
for key, value in values:
if callable(value):
if not value(ifcfg.get(key)):
break
else:
if ifcfg.get(key) != value:
break
else:
return filepath
return None
def get_bond_slaves_from_ifcfgs(master_specs):
"""List of slave device names of master specified by master_specs.
master_specs is a list containing device name of master (dracut)
and/or master's connection uuid
"""
slaves = []
for filepath in _ifcfg_files(netscriptsDir):
ifcfg = IfcfgFile(filepath)
ifcfg.read()
master = ifcfg.get("MASTER")
if master in master_specs:
device = ifcfg.get("DEVICE")
if device:
slaves.append(device)
else:
hwaddr = ifcfg.get("HWADDR")
for devname in nm.nm_devices():
try:
h = nm.nm_device_property(devname, "PermHwAddress")
except nm.PropertyNotFoundError:
log.debug("can't get PermHwAddress of devname %s", devname)
continue
if h.upper() == hwaddr.upper():
slaves.append(devname)
break
return slaves
# why not from ifcfg? because we want config json value without escapes
def get_team_slaves(master_specs):
"""List of slaves of master specified by master_specs (name, opts).
master_specs is a list containing device name of master (dracut)
and/or master's connection uuid
"""
slaves = []
for master in master_specs:
slave_settings = nm.nm_get_settings(master, "connection", "master")
for settings in slave_settings:
try:
cfg = settings["team-port"]["config"]
except KeyError:
cfg = ""
devname = settings["connection"].get("interface-name")
#nm-c-e doesn't save device name
# TODO: wifi, infiniband
if not devname:
ty = settings["connection"]["type"]
if ty == "802-3-ethernet":
hwaddr = settings["802-3-ethernet"]["mac-address"]
hwaddr = ":".join("%02X" % b for b in hwaddr)
devname = nm.nm_hwaddr_to_device_name(hwaddr)
if devname:
slaves.append((devname, cfg))
else:
uuid = settings["connection"].get("uuid")
log.debug("network: can't get team slave device name of %s", uuid)
return slaves
def ifaceForHostIP(host):
route = iutil.execWithCapture("ip", [ "route", "get", "to", host ])
if not route:
log.error("Could not get interface for route to %s", host)
return ""
routeInfo = route.split()
if routeInfo[0] != host or len(routeInfo) < 5 or \
"dev" not in routeInfo or routeInfo.index("dev") > 3:
log.error('Unexpected "ip route get to %s" reply: %s', host, routeInfo)
return ""
return routeInfo[routeInfo.index("dev") + 1]
def copyFileToPath(fileName, destPath='', overwrite=False):
if not os.path.isfile(fileName):
return False
destfile = os.path.join(destPath, fileName.lstrip('/'))
if (os.path.isfile(destfile) and not overwrite):
return False
if not os.path.isdir(os.path.dirname(destfile)):
iutil.mkdirChain(os.path.dirname(destfile))
shutil.copy(fileName, destfile)
return True
# /etc/sysconfig/network-scripts/ifcfg-*
# /etc/sysconfig/network-scripts/keys-*
# TODO: routing info from /etc/sysconfig/network-scripts?
def copyIfcfgFiles(destPath):
files = os.listdir(netscriptsDir)
for cfgFile in files:
if cfgFile.startswith(("ifcfg-","keys-")):
srcfile = os.path.join(netscriptsDir, cfgFile)
copyFileToPath(srcfile, destPath)
# /etc/dhcp/dhclient-DEVICE.conf
# TODORV: do we really don't want overwrite on live cd?
def copyDhclientConfFiles(destPath):
for devName in nm.nm_devices():
dhclientfile = os.path.join("/etc/dhcp/dhclient-%s.conf" % devName)
copyFileToPath(dhclientfile, destPath)
def ks_spec_to_device_name(ksspec=""):
ksdevice = ksspec
bootif_mac = ''
if ksdevice == 'bootif' and "BOOTIF" in flags.cmdline:
bootif_mac = flags.cmdline["BOOTIF"][3:].replace("-", ":").upper()
for dev in sorted(nm.nm_devices()):
# "eth0"
if ksdevice == dev:
break
# "link"
elif ksdevice == 'link':
try:
link_up = nm.nm_device_carrier(dev)
except ValueError as e:
log.debug("ks_spec_to_device_name: %s", e)
continue
if link_up:
ksdevice = dev
break
# "XX:XX:XX:XX:XX:XX" (mac address)
elif ':' in ksdevice:
try:
hwaddr = nm.nm_device_perm_hwaddress(dev)
except ValueError as e:
log.debug("ks_spec_to_device_name: %s", e)
continue
if ksdevice.lower() == hwaddr.lower():
ksdevice = dev
break
# "bootif" and BOOTIF==XX:XX:XX:XX:XX:XX
elif ksdevice == 'bootif':
try:
hwaddr = nm.nm_device_perm_hwaddress(dev)
except ValueError as e:
log.debug("ks_spec_to_device_name: %s", e)
continue
if bootif_mac.lower() == hwaddr.lower():
ksdevice = dev
break
return ksdevice
def set_hostname(hn):
if can_touch_runtime_system("set hostname", touch_live=True):
log.info("setting installation environment hostname to %s", hn)
iutil.execWithRedirect("hostnamectl", ["set-hostname", hn])
def write_hostname(rootpath, ksdata, overwrite=False):
cfgfile = os.path.normpath(rootpath + hostnameFile)
if (os.path.isfile(cfgfile) and not overwrite):
return False
f = open(cfgfile, "w")
f.write("%s\n" % ksdata.network.hostname)
f.close()
return True
def disableIPV6(rootpath):
cfgfile = os.path.normpath(rootpath + ipv6ConfFile)
if ('noipv6' in flags.cmdline
and all(nm.nm_device_setting_value(dev, "ipv6", "method") == "ignore"
for dev in nm.nm_devices() if nm.nm_device_type_is_ethernet(dev))):
log.info('Disabling ipv6 on target system')
with open(cfgfile, "a") as f:
f.write("# Anaconda disabling ipv6 (noipv6 option)\n")
f.write("net.ipv6.conf.all.disable_ipv6=1\n")
f.write("net.ipv6.conf.default.disable_ipv6=1\n")
def disableNMForStorageDevices(rootpath, storage):
for devname in nm.nm_devices():
if (usedByFCoE(devname, storage) or
usedByRootOnISCSI(devname, storage)):
ifcfg_path = find_ifcfg_file_of_device(devname, root_path=rootpath)
if not ifcfg_path:
log.warning("disableNMForStorageDevices: ifcfg file for %s not found",
devname)
continue
ifcfg = IfcfgFile(ifcfg_path)
ifcfg.read()
ifcfg.set(('NM_CONTROLLED', 'no'))
ifcfg.write()
log.info("network device %s used by storage will not be "
"controlled by NM", devname)
# sets ONBOOT=yes (and its mirror value in ksdata) for devices used by FCoE
def autostartFCoEDevices(rootpath, storage, ksdata):
for devname in nm.nm_devices():
if usedByFCoE(devname, storage):
ifcfg_path = find_ifcfg_file_of_device(devname, root_path=rootpath)
if not ifcfg_path:
log.warning("autoconnectFCoEDevices: ifcfg file for %s not found", devname)
continue
ifcfg = IfcfgFile(ifcfg_path)
ifcfg.read()
ifcfg.set(('ONBOOT', 'yes'))
ifcfg.write()
log.debug("setting ONBOOT=yes for network device %s used by fcoe", devname)
for nd in ksdata.network.network:
if nd.device == devname:
nd.onboot = True
break
def usedByFCoE(iface, storage):
for d in storage.devices:
if (isinstance(d, FcoeDiskDevice) and
d.nic == iface):
return True
return False
def usedByRootOnISCSI(iface, storage):
rootdev = storage.rootDevice
for d in storage.devices:
if (isinstance(d, iScsiDiskDevice) and
rootdev.dependsOn(d)):
if d.nic == "default" or ":" in d.nic:
if iface == ifaceForHostIP(d.host_address):
return True
elif d.nic == iface:
return True
return False
def write_sysconfig_network(rootpath, overwrite=False):
cfgfile = os.path.normpath(rootpath + networkConfFile)
if (os.path.isfile(cfgfile) and not overwrite):
return False
with open(cfgfile, "w") as f:
f.write("# Created by anaconda\n")
return True
def write_network_config(storage, ksdata, instClass, rootpath):
write_hostname(rootpath, ksdata, overwrite=flags.livecdInstall)
set_hostname(ksdata.network.hostname)
write_sysconfig_network(rootpath, overwrite=flags.livecdInstall)
disableIPV6(rootpath)
if not flags.imageInstall:
copyIfcfgFiles(rootpath)
copyDhclientConfFiles(rootpath)
copyFileToPath("/etc/resolv.conf", rootpath, overwrite=flags.livecdInstall)
# TODO the default for ONBOOT needs to be lay down
# before newui we didn't set it for kickstart installs
instClass.setNetworkOnbootDefault(ksdata)
# NM_CONTROLLED is not mirrored in ksdata
disableNMForStorageDevices(rootpath, storage)
autostartFCoEDevices(rootpath, storage, ksdata)
def update_hostname_data(ksdata, hostname):
log.debug("updating hostname %s", hostname)
hostname_found = False
for nd in ksdata.network.network:
if nd.hostname:
nd.hostname = hostname
hostname_found = True
if not hostname_found:
nd = hostname_ksdata(hostname)
ksdata.network.network.append(nd)
def get_device_name(network_data):
ksspec = network_data.device or flags.cmdline.get('ksdevice', "")
dev_name = ks_spec_to_device_name(ksspec)
if not dev_name:
return ""
if dev_name not in nm.nm_devices():
if not any((network_data.vlanid, network_data.bondslaves, network_data.teamslaves)):
return ""
if network_data.vlanid:
dev_name = "%s.%s" % (dev_name, network_data.vlanid)
return dev_name
def setOnboot(ksdata):
updated_devices = []
for network_data in ksdata.network.network:
devname = get_device_name(network_data)
if not devname:
log.warning("network: set ONBOOT: --device %s does not exist", network_data.device)
continue
updated_devices.append(devname)
try:
nm.nm_update_settings_of_device(devname, [['connection', 'autoconnect', network_data.onboot, None]])
except (nm.SettingsNotFoundError, nm.UnknownDeviceError) as e:
log.debug("setOnboot: %s", e)
return updated_devices
def apply_kickstart_from_pre_section(ksdata):
applied_devices = []
for network_data in ksdata.network.network:
# TODO: wireless not supported yet
if network_data.essid:
continue
dev_name = get_device_name(network_data)
if not dev_name:
log.warning("network: pre kickstart: --device %s does not exist", network_data.device)
continue
ifcfg_path = find_ifcfg_file_of_device(dev_name)
# if the device was already configured in intramfs by kickstart ignore it
if ifcfg_path:
with open(ifcfg_path, 'r') as f:
if "Generated by parse-kickstart" in f.read():
continue
applied_devices.append(dev_name)
if ifcfg_path:
# if the device was already configured in initramfs update the settings
log.debug("network: pre kickstart - updating settings of device %s", dev_name)
con_uuid = update_settings_with_ksdata(dev_name, network_data)
added_connections = [(con_uuid, dev_name)]
else:
log.debug("network: pre kickstart - adding connection for %s", dev_name)
# Virtual devices (eg vlan, bond) return dev_name == None
added_connections = add_connection_for_ksdata(network_data, dev_name)
if network_data.activate:
for con_uuid, dev_name in added_connections:
try:
nm.nm_activate_device_connection(dev_name, con_uuid)
except nm.UnknownConnectionError:
log.warning("network: pre kickstart: can't activate connection %s on %s",
con_uuid, dev_name)
return applied_devices
def networkInitialize(ksdata):
log.debug("network: devices found %s", nm.nm_devices())
logIfcfgFiles("network initialization")
if not flags.imageInstall:
devnames = apply_kickstart_from_pre_section(ksdata)
if devnames:
msg = "kickstart pre section applied for devices %s" % devnames
log.debug("network: %s", msg)
logIfcfgFiles(msg)
devnames = dumpMissingDefaultIfcfgs()
if devnames:
msg = "missing ifcfgs created for devices %s" % devnames
log.debug("network: %s", msg)
logIfcfgFiles(msg)
# For kickstart network --activate option we set ONBOOT=yes
# in dracut to get devices activated by NM. The real network --onboot
# value is set here.
devnames = setOnboot(ksdata)
if devnames:
msg = "setting real kickstart ONBOOT value for devices %s" % devnames
log.debug("network: %s", msg)
logIfcfgFiles(msg)
if ksdata.network.hostname is None:
hostname = getHostname()
update_hostname_data(ksdata, hostname)
def _get_ntp_servers_from_dhcp(ksdata):
"""Check if some NTP servers were returned from DHCP and set them
to ksdata (if not NTP servers were specified in the kickstart)"""
ntp_servers = nm.nm_ntp_servers_from_dhcp()
log.info("got %d NTP servers from DHCP", len(ntp_servers))
hostnames = []
for server_address in ntp_servers:
try:
hostname = socket.gethostbyaddr(server_address)[0]
except socket.error:
# getting hostname failed, just use the address returned from DHCP
log.debug("getting NTP server hostname failed for address: %s",
server_address)
hostname = server_address
hostnames.append(hostname)
# check if some NTP servers were specified from kickstart
if not ksdata.timezone.ntpservers:
# no NTP servers were specified, add those from DHCP
ksdata.timezone.ntpservers = hostnames
def _wait_for_connecting_NM():
"""If NM is in connecting state, wait for connection.
Return value: NM has got connection."""
if nm.nm_is_connected:
return True
if nm.nm_is_connecting():
log.debug("waiting for connecting NM (dhcp?)")
else:
return False
i = 0
while nm.nm_is_connecting() and i < constants.NETWORK_CONNECTION_TIMEOUT:
i += constants.NETWORK_CONNECTED_CHECK_INTERVAL
time.sleep(constants.NETWORK_CONNECTED_CHECK_INTERVAL)
if nm.nm_is_connected():
log.debug("connected, waited %d seconds", i)
return True
log.debug("not connected, waited %d of %d secs", i, constants.NETWORK_CONNECTION_TIMEOUT)
return False
def wait_for_network_devices(devices, timeout=constants.NETWORK_CONNECTION_TIMEOUT):
devices = set(devices)
i = 0
log.debug("waiting for connection of devices %s for iscsi", devices)
while i < timeout:
if not devices - set(nm.nm_activated_devices()):
return True
i += 1
time.sleep(1)
return False
def wait_for_connecting_NM_thread(ksdata):
"""This function is called from a thread which is run at startup
to wait for Network Manager to connect."""
# connection (e.g. auto default dhcp) is activated by NM service
connected = _wait_for_connecting_NM()
if connected:
if ksdata.network.hostname == DEFAULT_HOSTNAME:
hostname = getHostname()
update_hostname_data(ksdata, hostname)
_get_ntp_servers_from_dhcp(ksdata)
with network_connected_condition:
global network_connected
network_connected = connected
network_connected_condition.notify_all()
def wait_for_connectivity(timeout=constants.NETWORK_CONNECTION_TIMEOUT):
"""Wait for network connectivty to become available
:param timeout: how long to wait in seconds
:type param: integer of float"""
connected = False
network_connected_condition.acquire()
# if network_connected is None, network connectivity check
# has not yet been run or is in progress, so wait for it to finish
if network_connected is None:
# wait releases the lock and reacquires it once the thread is unblocked
network_connected_condition.wait(timeout=timeout)
connected = network_connected
# after wait() unblocks, we get the lock back,
# so we need to release it
network_connected_condition.release()
return connected
def status_message():
""" A short string describing which devices are connected. """
msg = _("Unknown")
state = nm.nm_state()
if state == NetworkManager.State.CONNECTING:
msg = _("Connecting...")
elif state == NetworkManager.State.DISCONNECTING:
msg = _("Disconnecting...")
else:
active_devs = nm.nm_activated_devices()
if active_devs:
slaves = {}
ssids = {}
# first find slaves and wireless aps
for devname in active_devs:
slaves[devname] = nm.nm_device_slaves(devname) or []
if nm.nm_device_type_is_wifi(devname):
ssids[devname] = nm.nm_device_active_ssid(devname) or ""
all_slaves = set(itertools.chain.from_iterable(slaves.values()))
nonslaves = [dev for dev in active_devs if dev not in all_slaves]
if len(nonslaves) == 1:
devname = nonslaves[0]
if nm.nm_device_type_is_ethernet(devname):
msg = _("Wired (%(interface_name)s) connected") \
% {"interface_name": devname}
elif nm.nm_device_type_is_wifi(devname):
msg = _("Wireless connected to %(access_point)s") \
% {"access_point" : ssids[devname]}
elif nm.nm_device_type_is_bond(devname):
msg = _("Bond %(interface_name)s (%(list_of_slaves)s) connected") \
% {"interface_name": devname, \
"list_of_slaves": ",".join(slaves[devname])}
elif nm.nm_device_type_is_team(devname):
msg = _("Team%(interface_name)s (%(list_of_slaves)s) connected") \
% {"interface_name": devname, \
"list_of_slaves": ",".join(slaves[devname])}
elif nm.nm_device_type_is_vlan(devname):
parent = nm.nm_device_setting_value(devname, "vlan", "parent")
vlanid = nm.nm_device_setting_value(devname, "vlan", "id")
msg = _("Vlan %(interface_name)s (%(parent_device)s, ID %(vlanid)s) connected") \
% {"interface_name": devname, "parent_device": parent, "vlanid": vlanid}
elif len(nonslaves) > 1:
devlist = []
for devname in nonslaves:
if nm.nm_device_type_is_ethernet(devname):
devlist.append("%s" % devname)
elif nm.nm_device_type_is_wifi(devname):
devlist.append("%s" % ssids[devname])
elif nm.nm_device_type_is_bond(devname):
devlist.append("%s (%s)" % (devname, ",".join(slaves[devname])))
elif nm.nm_device_type_is_team(devname):
devlist.append("%s (%s)" % (devname, ",".join(slaves[devname])))
elif nm.nm_device_type_is_vlan(devname):
devlist.append("%s" % devname)
msg = _("Connected: %(list_of_interface_names)s") \
% {"list_of_interface_names": ", ".join(devlist)}
else:
msg = _("Not connected")
if not nm.nm_devices():
msg = _("No network devices available")
return msg
| akozumpl/anaconda | pyanaconda/network.py | Python | gpl-2.0 | 48,413 | 0.002417 |
from decimal import *
class PI:
#Sets decimal to 25 digits of precision
getcontext().prec = 1000
@staticmethod
def factorial(n):
# if n<1:
# return 1
# else:
# return n * PI.factorial(n-1)
result = 1
for i in xrange(2, n+1):
result *= i
return result
@staticmethod
def plouffBig(n): #http://en.wikipedia.org/wiki/Bailey%E2%80%93Borwein%E2%80%93Plouffe_formula
pi = Decimal(0)
k = 0
while k < n:
pi += (Decimal(1)/(16**k))*((Decimal(4)/(8*k+1))-(Decimal(2)/(8*k+4))-(Decimal(1)/(8*k+5))-(Decimal(1)/(8*k+6)))
k += 1
return pi
@staticmethod
def bellardBig(n): #http://en.wikipedia.org/wiki/Bellard%27s_formula
pi = Decimal(0)
k = 0
while k < n:
pi += (Decimal(-1)**k/(1024**k))*( Decimal(256)/(10*k+1) + Decimal(1)/(10*k+9) - Decimal(64)/(10*k+3) - Decimal(32)/(4*k+1) - Decimal(4)/(10*k+5) - Decimal(4)/(10*k+7) -Decimal(1)/(4*k+3))
k += 1
pi = pi * 1/(2**6)
return pi
@staticmethod
def chudnovskyBig(n): #http://en.wikipedia.org/wiki/Chudnovsky_algorithm
pi = Decimal(0)
k = 0
while k < n:
pi += (Decimal(-1)**k)*(Decimal(PI.factorial(6*k))/((PI.factorial(k)**3)*(PI.factorial(3*k)))* (13591409+545140134*k)/(640320**(3*k)))
k += 1
pi = pi * Decimal(10005).sqrt()/4270934400
pi = pi**(-1)
return pi
@staticmethod
def calculate():
return PI.bellardBig(1000)
| susemeee/Chunsabot-framework | chunsabot/pi.py | Python | mit | 1,587 | 0.009452 |
#!/usr/bin/env python3
"""
Generate a set of agent demonstrations.
The agent can either be a trained model or the heuristic expert (bot).
Demonstration generation can take a long time, but it can be parallelized
if you have a cluster at your disposal. Provide a script that launches
make_agent_demos.py at your cluster as --job-script and the number of jobs as --jobs.
"""
import argparse
import gym
import logging
import sys
import subprocess
import os
import time
import numpy as np
import blosc
import torch
import babyai.utils as utils
# Parse arguments
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("--env", required=True,
help="name of the environment to be run (REQUIRED)")
parser.add_argument("--model", default='BOT',
help="name of the trained model (REQUIRED)")
parser.add_argument("--demos", default=None,
help="path to save demonstrations (based on --model and --origin by default)")
parser.add_argument("--episodes", type=int, default=1000,
help="number of episodes to generate demonstrations for")
parser.add_argument("--valid-episodes", type=int, default=512,
help="number of validation episodes to generate demonstrations for")
parser.add_argument("--seed", type=int, default=0,
help="start random seed")
parser.add_argument("--argmax", action="store_true", default=False,
help="action with highest probability is selected")
parser.add_argument("--log-interval", type=int, default=100,
help="interval between progress reports")
parser.add_argument("--save-interval", type=int, default=10000,
help="interval between demonstrations saving")
parser.add_argument("--filter-steps", type=int, default=0,
help="filter out demos with number of steps more than filter-steps")
parser.add_argument("--on-exception", type=str, default='warn', choices=('warn', 'crash'),
help="How to handle exceptions during demo generation")
parser.add_argument("--job-script", type=str, default=None,
help="The script that launches make_agent_demos.py at a cluster.")
parser.add_argument("--jobs", type=int, default=0,
help="Split generation in that many jobs")
args = parser.parse_args()
logger = logging.getLogger(__name__)
# Set seed for all randomness sources
def print_demo_lengths(demos):
num_frames_per_episode = [len(demo[2]) for demo in demos]
logger.info('Demo length: {:.3f}+-{:.3f}'.format(
np.mean(num_frames_per_episode), np.std(num_frames_per_episode)))
def generate_demos(n_episodes, valid, seed, shift=0):
utils.seed(seed)
# Generate environment
env = gym.make(args.env)
agent = utils.load_agent(env, args.model, args.demos, 'agent', args.argmax, args.env)
demos_path = utils.get_demos_path(args.demos, args.env, 'agent', valid)
demos = []
checkpoint_time = time.time()
just_crashed = False
while True:
if len(demos) == n_episodes:
break
done = False
if just_crashed:
logger.info("reset the environment to find a mission that the bot can solve")
env.reset()
else:
env.seed(seed + len(demos))
obs = env.reset()
agent.on_reset()
actions = []
mission = obs["mission"]
images = []
directions = []
try:
while not done:
action = agent.act(obs)['action']
if isinstance(action, torch.Tensor):
action = action.item()
new_obs, reward, done, _ = env.step(action)
agent.analyze_feedback(reward, done)
actions.append(action)
images.append(obs['image'])
directions.append(obs['direction'])
obs = new_obs
if reward > 0 and (args.filter_steps == 0 or len(images) <= args.filter_steps):
demos.append((mission, blosc.pack_array(np.array(images)), directions, actions))
just_crashed = False
if reward == 0:
if args.on_exception == 'crash':
raise Exception("mission failed, the seed is {}".format(seed + len(demos)))
just_crashed = True
logger.info("mission failed")
except (Exception, AssertionError):
if args.on_exception == 'crash':
raise
just_crashed = True
logger.exception("error while generating demo #{}".format(len(demos)))
continue
if len(demos) and len(demos) % args.log_interval == 0:
now = time.time()
demos_per_second = args.log_interval / (now - checkpoint_time)
to_go = (n_episodes - len(demos)) / demos_per_second
logger.info("demo #{}, {:.3f} demos per second, {:.3f} seconds to go".format(
len(demos) - 1, demos_per_second, to_go))
checkpoint_time = now
# Save demonstrations
if args.save_interval > 0 and len(demos) < n_episodes and len(demos) % args.save_interval == 0:
logger.info("Saving demos...")
utils.save_demos(demos, demos_path)
logger.info("{} demos saved".format(len(demos)))
# print statistics for the last 100 demonstrations
print_demo_lengths(demos[-100:])
# Save demonstrations
logger.info("Saving demos...")
utils.save_demos(demos, demos_path)
logger.info("{} demos saved".format(len(demos)))
print_demo_lengths(demos[-100:])
def generate_demos_cluster():
demos_per_job = args.episodes // args.jobs
demos_path = utils.get_demos_path(args.demos, args.env, 'agent')
job_demo_names = [os.path.realpath(demos_path + '.shard{}'.format(i))
for i in range(args.jobs)]
for demo_name in job_demo_names:
job_demos_path = utils.get_demos_path(demo_name)
if os.path.exists(job_demos_path):
os.remove(job_demos_path)
command = [args.job_script]
command += sys.argv[1:]
for i in range(args.jobs):
cmd_i = list(map(str,
command
+ ['--seed', args.seed + i * demos_per_job]
+ ['--demos', job_demo_names[i]]
+ ['--episodes', demos_per_job]
+ ['--jobs', 0]
+ ['--valid-episodes', 0]))
logger.info('LAUNCH COMMAND')
logger.info(cmd_i)
output = subprocess.check_output(cmd_i)
logger.info('LAUNCH OUTPUT')
logger.info(output.decode('utf-8'))
job_demos = [None] * args.jobs
while True:
jobs_done = 0
for i in range(args.jobs):
if job_demos[i] is None or len(job_demos[i]) < demos_per_job:
try:
logger.info("Trying to load shard {}".format(i))
job_demos[i] = utils.load_demos(utils.get_demos_path(job_demo_names[i]))
logger.info("{} demos ready in shard {}".format(
len(job_demos[i]), i))
except Exception:
logger.exception("Failed to load the shard")
if job_demos[i] and len(job_demos[i]) == demos_per_job:
jobs_done += 1
logger.info("{} out of {} shards done".format(jobs_done, args.jobs))
if jobs_done == args.jobs:
break
logger.info("sleep for 60 seconds")
time.sleep(60)
# Training demos
all_demos = []
for demos in job_demos:
all_demos.extend(demos)
utils.save_demos(all_demos, demos_path)
logging.basicConfig(level='INFO', format="%(asctime)s: %(levelname)s: %(message)s")
logger.info(args)
# Training demos
if args.jobs == 0:
generate_demos(args.episodes, False, args.seed)
else:
generate_demos_cluster()
# Validation demos
if args.valid_episodes:
generate_demos(args.valid_episodes, True, int(1e9))
| mila-iqia/babyai | scripts/make_agent_demos.py | Python | bsd-3-clause | 8,078 | 0.0026 |
from edges import EdgeExtractor
from extractor import Extractor
from parambfs import ParamExtractor | daajoe/trellis | trellis/extractor/__init__.py | Python | gpl-3.0 | 99 | 0.010101 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.