text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
import numpy
import sys
import time
from mceditlib.test import templevel
from mceditlib import relight
# run me with the source checkout as the working dir so I can find the test_files folder.
def natural_relight():
world = templevel.TempLevel("AnvilWorld")
dim = world.getDimension()
positions = []
for cx, cz in dim.chunkPositions():
chunk = dim.getChunk(cx, cz)
for cy in chunk.sectionPositions():
positions.append((cx, cy, cz))
poses = iter(positions)
def do_relight():
cx, cy, cz = poses.next()
indices = numpy.indices((16, 16, 16), numpy.int32)
indices.shape = 3, 16*16*16
indices += ([cx << 4], [cy << 4], [cz << 4])
x, y, z = indices
relight.updateLightsByCoord(dim, x, y, z)
# Find out how many sections we can do in `maxtime` seconds.
start = time.time()
count = 0
maxtime = 10
end = start + maxtime
while time.time() < end:
try:
do_relight()
except StopIteration:
break
count += 1
t = time.time() - start
print "Relight natural terrain: %d/%d chunk-sections in %.02f seconds (%f sections per second; %dms per section)" % (count, len(positions), t, count / t, 1000 * t / count)
if __name__ == '__main__':
if len(sys.argv) > 1:
method = sys.argv[1]
print "Using method", method
relight.setMethod(method)
natural_relight()
|
vorburger/mcedit2
|
src/mceditlib/bench/time_relight_natural.py
|
Python
|
bsd-3-clause
| 1,455 | 0.003436 |
"""
Docstrings are another source of information for functions and classes.
:mod:`jedi.evaluate.dynamic` tries to find all executions of functions, while
the docstring parsing is much easier. There are three different types of
docstrings that |jedi| understands:
- `Sphinx <http://sphinx-doc.org/markup/desc.html#info-field-lists>`_
- `Epydoc <http://epydoc.sourceforge.net/manual-fields.html>`_
- `Numpydoc <https://github.com/numpy/numpy/blob/master/doc/HOWTO_DOCUMENT.rst.txt>`_
For example, the sphinx annotation ``:type foo: str`` clearly states that the
type of ``foo`` is ``str``.
As an addition to parameter searching, this module also provides return
annotations.
"""
import re
from textwrap import dedent
from parso import parse, ParserSyntaxError
from jedi._compatibility import u
from jedi.evaluate.utils import indent_block
from jedi.evaluate.cache import evaluator_method_cache
from jedi.evaluate.base_context import iterator_to_context_set, ContextSet, \
NO_CONTEXTS
from jedi.evaluate.lazy_context import LazyKnownContexts
DOCSTRING_PARAM_PATTERNS = [
r'\s*:type\s+%s:\s*([^\n]+)', # Sphinx
r'\s*:param\s+(\w+)\s+%s:[^\n]*', # Sphinx param with type
r'\s*@type\s+%s:\s*([^\n]+)', # Epydoc
]
DOCSTRING_RETURN_PATTERNS = [
re.compile(r'\s*:rtype:\s*([^\n]+)', re.M), # Sphinx
re.compile(r'\s*@rtype:\s*([^\n]+)', re.M), # Epydoc
]
REST_ROLE_PATTERN = re.compile(r':[^`]+:`([^`]+)`')
_numpy_doc_string_cache = None
def _get_numpy_doc_string_cls():
global _numpy_doc_string_cache
try:
from numpydoc.docscrape import NumpyDocString
_numpy_doc_string_cache = NumpyDocString
except ImportError as e:
_numpy_doc_string_cache = e
if isinstance(_numpy_doc_string_cache, ImportError):
raise _numpy_doc_string_cache
return _numpy_doc_string_cache
def _search_param_in_numpydocstr(docstr, param_str):
"""Search `docstr` (in numpydoc format) for type(-s) of `param_str`."""
try:
# This is a non-public API. If it ever changes we should be
# prepared and return gracefully.
params = _get_numpy_doc_string_cls()(docstr)._parsed_data['Parameters']
except (KeyError, AttributeError, ImportError):
return []
for p_name, p_type, p_descr in params:
if p_name == param_str:
m = re.match('([^,]+(,[^,]+)*?)(,[ ]*optional)?$', p_type)
if m:
p_type = m.group(1)
return list(_expand_typestr(p_type))
return []
def _search_return_in_numpydocstr(docstr):
"""
Search `docstr` (in numpydoc format) for type(-s) of function returns.
"""
try:
doc = _get_numpy_doc_string_cls()(docstr)
except ImportError:
return
try:
# This is a non-public API. If it ever changes we should be
# prepared and return gracefully.
returns = doc._parsed_data['Returns']
returns += doc._parsed_data['Yields']
except (KeyError, AttributeError):
return
for r_name, r_type, r_descr in returns:
# Return names are optional and if so the type is in the name
if not r_type:
r_type = r_name
for type_ in _expand_typestr(r_type):
yield type_
def _expand_typestr(type_str):
"""
Attempts to interpret the possible types in `type_str`
"""
# Check if alternative types are specified with 'or'
if re.search('\\bor\\b', type_str):
for t in type_str.split('or'):
yield t.split('of')[0].strip()
# Check if like "list of `type`" and set type to list
elif re.search('\\bof\\b', type_str):
yield type_str.split('of')[0]
# Check if type has is a set of valid literal values eg: {'C', 'F', 'A'}
elif type_str.startswith('{'):
node = parse(type_str, version='3.6').children[0]
if node.type == 'atom':
for leaf in node.children[1].children:
if leaf.type == 'number':
if '.' in leaf.value:
yield 'float'
else:
yield 'int'
elif leaf.type == 'string':
if 'b' in leaf.string_prefix.lower():
yield 'bytes'
else:
yield 'str'
# Ignore everything else.
# Otherwise just work with what we have.
else:
yield type_str
def _search_param_in_docstr(docstr, param_str):
"""
Search `docstr` for type(-s) of `param_str`.
>>> _search_param_in_docstr(':type param: int', 'param')
['int']
>>> _search_param_in_docstr('@type param: int', 'param')
['int']
>>> _search_param_in_docstr(
... ':type param: :class:`threading.Thread`', 'param')
['threading.Thread']
>>> bool(_search_param_in_docstr('no document', 'param'))
False
>>> _search_param_in_docstr(':param int param: some description', 'param')
['int']
"""
# look at #40 to see definitions of those params
patterns = [re.compile(p % re.escape(param_str))
for p in DOCSTRING_PARAM_PATTERNS]
for pattern in patterns:
match = pattern.search(docstr)
if match:
return [_strip_rst_role(match.group(1))]
return _search_param_in_numpydocstr(docstr, param_str)
def _strip_rst_role(type_str):
"""
Strip off the part looks like a ReST role in `type_str`.
>>> _strip_rst_role(':class:`ClassName`') # strip off :class:
'ClassName'
>>> _strip_rst_role(':py:obj:`module.Object`') # works with domain
'module.Object'
>>> _strip_rst_role('ClassName') # do nothing when not ReST role
'ClassName'
See also:
http://sphinx-doc.org/domains.html#cross-referencing-python-objects
"""
match = REST_ROLE_PATTERN.match(type_str)
if match:
return match.group(1)
else:
return type_str
def _evaluate_for_statement_string(module_context, string):
code = dedent(u("""
def pseudo_docstring_stuff():
'''
Create a pseudo function for docstring statements.
Need this docstring so that if the below part is not valid Python this
is still a function.
'''
{}
"""))
if string is None:
return []
for element in re.findall('((?:\w+\.)*\w+)\.', string):
# Try to import module part in dotted name.
# (e.g., 'threading' in 'threading.Thread').
string = 'import %s\n' % element + string
# Take the default grammar here, if we load the Python 2.7 grammar here, it
# will be impossible to use `...` (Ellipsis) as a token. Docstring types
# don't need to conform with the current grammar.
grammar = module_context.evaluator.latest_grammar
try:
module = grammar.parse(code.format(indent_block(string)), error_recovery=False)
except ParserSyntaxError:
return []
try:
funcdef = next(module.iter_funcdefs())
# First pick suite, then simple_stmt and then the node,
# which is also not the last item, because there's a newline.
stmt = funcdef.children[-1].children[-1].children[-2]
except (AttributeError, IndexError):
return []
from jedi.evaluate.context import FunctionContext
function_context = FunctionContext(
module_context.evaluator,
module_context,
funcdef
)
func_execution_context = function_context.get_function_execution()
# Use the module of the param.
# TODO this module is not the module of the param in case of a function
# call. In that case it's the module of the function call.
# stuffed with content from a function call.
return list(_execute_types_in_stmt(func_execution_context, stmt))
def _execute_types_in_stmt(module_context, stmt):
"""
Executing all types or general elements that we find in a statement. This
doesn't include tuple, list and dict literals, because the stuff they
contain is executed. (Used as type information).
"""
definitions = module_context.eval_node(stmt)
return ContextSet.from_sets(
_execute_array_values(module_context.evaluator, d)
for d in definitions
)
def _execute_array_values(evaluator, array):
"""
Tuples indicate that there's not just one return value, but the listed
ones. `(str, int)` means that it returns a tuple with both types.
"""
from jedi.evaluate.context.iterable import SequenceLiteralContext, FakeSequence
if isinstance(array, SequenceLiteralContext):
values = []
for lazy_context in array.py__iter__():
objects = ContextSet.from_sets(
_execute_array_values(evaluator, typ)
for typ in lazy_context.infer()
)
values.append(LazyKnownContexts(objects))
return {FakeSequence(evaluator, array.array_type, values)}
else:
return array.execute_evaluated()
@evaluator_method_cache()
def infer_param(execution_context, param):
from jedi.evaluate.context.instance import AnonymousInstanceFunctionExecution
def eval_docstring(docstring):
return ContextSet.from_iterable(
p
for param_str in _search_param_in_docstr(docstring, param.name.value)
for p in _evaluate_for_statement_string(module_context, param_str)
)
module_context = execution_context.get_root_context()
func = param.get_parent_function()
if func.type == 'lambdef':
return NO_CONTEXTS
types = eval_docstring(execution_context.py__doc__())
if isinstance(execution_context, AnonymousInstanceFunctionExecution) and \
execution_context.function_context.name.string_name == '__init__':
class_context = execution_context.instance.class_context
types |= eval_docstring(class_context.py__doc__())
return types
@evaluator_method_cache()
@iterator_to_context_set
def infer_return_types(function_context):
def search_return_in_docstr(code):
for p in DOCSTRING_RETURN_PATTERNS:
match = p.search(code)
if match:
yield _strip_rst_role(match.group(1))
# Check for numpy style return hint
for type_ in _search_return_in_numpydocstr(code):
yield type_
for type_str in search_return_in_docstr(function_context.py__doc__()):
for type_eval in _evaluate_for_statement_string(function_context.get_root_context(), type_str):
yield type_eval
|
technologiescollege/Blockly-rduino-communication
|
scripts_XP/Lib/site-packages/jedi/evaluate/docstrings.py
|
Python
|
gpl-3.0
| 10,503 | 0.000952 |
def apple():
print "I AM APPLES!"
# this is just a variable
tangerine = "Living reflection of a dream."
|
Valka7a/python-playground
|
python-the-hard-way/mystuff.py
|
Python
|
mit
| 106 | 0.018868 |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from __future__ import absolute_import
import os, gc
import sys
from . import common
from time import time, sleep
from proton import *
from .common import pump, Skipped
from proton.reactor import Reactor
from proton._compat import str2bin
# older versions of gc do not provide the garbage list
if not hasattr(gc, "garbage"):
gc.garbage=[]
# future test areas
# + different permutations of setup
# - creating deliveries and calling input/output before opening the session/link
# + shrinking output_size down to something small? should the enginge buffer?
# + resuming
# - locally and remotely created deliveries with the same tag
# Jython 2.5 needs this:
try:
bytes()
except:
bytes = str
# and this...
try:
bytearray()
except:
def bytearray(x):
return str2bin('\x00') * x
OUTPUT_SIZE = 10*1024
class Test(common.Test):
def __init__(self, *args):
common.Test.__init__(self, *args)
self._wires = []
def connection(self):
c1 = Connection()
c2 = Connection()
t1 = Transport()
t1.bind(c1)
t2 = Transport()
t2.bind(c2)
self._wires.append((c1, t1, c2, t2))
mask1 = 0
mask2 = 0
for cat in ("TRACE_FRM", "TRACE_RAW"):
trc = os.environ.get("PN_%s" % cat)
if trc and trc.lower() in ("1", "2", "yes", "true"):
mask1 = mask1 | getattr(Transport, cat)
if trc == "2":
mask2 = mask2 | getattr(Transport, cat)
t1.trace(mask1)
t2.trace(mask2)
return c1, c2
def link(self, name, max_frame=None, idle_timeout=None):
c1, c2 = self.connection()
if max_frame:
c1.transport.max_frame_size = max_frame[0]
c2.transport.max_frame_size = max_frame[1]
if idle_timeout:
# idle_timeout in seconds expressed as float
c1.transport.idle_timeout = idle_timeout[0]
c2.transport.idle_timeout = idle_timeout[1]
c1.open()
c2.open()
ssn1 = c1.session()
ssn1.open()
self.pump()
ssn2 = c2.session_head(Endpoint.LOCAL_UNINIT | Endpoint.REMOTE_ACTIVE)
ssn2.open()
self.pump()
snd = ssn1.sender(name)
rcv = ssn2.receiver(name)
return snd, rcv
def cleanup(self):
self._wires = []
def pump(self, buffer_size=OUTPUT_SIZE):
for c1, t1, c2, t2 in self._wires:
pump(t1, t2, buffer_size)
class ConnectionTest(Test):
def setup(self):
gc.enable()
self.c1, self.c2 = self.connection()
def cleanup(self):
# release resources created by this class
super(ConnectionTest, self).cleanup()
self.c1 = None
self.c2 = None
def teardown(self):
self.cleanup()
gc.collect()
assert not gc.garbage
def test_open_close(self):
assert self.c1.state == Endpoint.LOCAL_UNINIT | Endpoint.REMOTE_UNINIT
assert self.c2.state == Endpoint.LOCAL_UNINIT | Endpoint.REMOTE_UNINIT
self.c1.open()
self.pump()
assert self.c1.state == Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_UNINIT
assert self.c2.state == Endpoint.LOCAL_UNINIT | Endpoint.REMOTE_ACTIVE
self.c2.open()
self.pump()
assert self.c1.state == Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_ACTIVE
assert self.c2.state == Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_ACTIVE
self.c1.close()
self.pump()
assert self.c1.state == Endpoint.LOCAL_CLOSED | Endpoint.REMOTE_ACTIVE
assert self.c2.state == Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_CLOSED
self.c2.close()
self.pump()
assert self.c1.state == Endpoint.LOCAL_CLOSED | Endpoint.REMOTE_CLOSED
assert self.c2.state == Endpoint.LOCAL_CLOSED | Endpoint.REMOTE_CLOSED
def test_simultaneous_open_close(self):
assert self.c1.state == Endpoint.LOCAL_UNINIT | Endpoint.REMOTE_UNINIT
assert self.c2.state == Endpoint.LOCAL_UNINIT | Endpoint.REMOTE_UNINIT
self.c1.open()
self.c2.open()
self.pump()
assert self.c1.state == Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_ACTIVE
assert self.c2.state == Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_ACTIVE
self.c1.close()
self.c2.close()
self.pump()
assert self.c1.state == Endpoint.LOCAL_CLOSED | Endpoint.REMOTE_CLOSED
assert self.c2.state == Endpoint.LOCAL_CLOSED | Endpoint.REMOTE_CLOSED
def test_capabilities(self):
self.c1.offered_capabilities = Array(UNDESCRIBED, Data.SYMBOL,
symbol("O_one"),
symbol("O_two"),
symbol("O_three"))
self.c1.desired_capabilities = Array(UNDESCRIBED, Data.SYMBOL,
symbol("D_one"),
symbol("D_two"),
symbol("D_three"))
self.c1.open()
assert self.c2.remote_offered_capabilities is None
assert self.c2.remote_desired_capabilities is None
self.pump()
assert self.c2.remote_offered_capabilities == self.c1.offered_capabilities, \
(self.c2.remote_offered_capabilities, self.c1.offered_capabilities)
assert self.c2.remote_desired_capabilities == self.c1.desired_capabilities, \
(self.c2.remote_desired_capabilities, self.c1.desired_capabilities)
def test_condition(self):
self.c1.open()
self.c2.open()
self.pump()
assert self.c1.state == Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_ACTIVE
assert self.c2.state == Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_ACTIVE
cond = Condition("blah:bleh", "this is a description", {symbol("foo"): "bar"})
self.c1.condition = cond
self.c1.close()
self.pump()
assert self.c1.state == Endpoint.LOCAL_CLOSED | Endpoint.REMOTE_ACTIVE
assert self.c2.state == Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_CLOSED
rcond = self.c2.remote_condition
assert rcond == cond, (rcond, cond)
def test_properties(self, p1={symbol("key"): symbol("value")}, p2=None):
self.c1.properties = p1
self.c2.properties = p2
self.c1.open()
self.c2.open()
self.pump()
assert self.c2.remote_properties == p1, (self.c2.remote_properties, p1)
assert self.c1.remote_properties == p2, (self.c2.remote_properties, p2)
# The proton implementation limits channel_max to 32767.
# If I set the application's limit lower than that, I should
# get my wish. If I set it higher -- not.
def test_channel_max_low(self, value=1234):
self.c1.transport.channel_max = value
self.c1.open()
self.pump()
assert self.c1.transport.channel_max == value, (self.c1.transport.channel_max, value)
def test_channel_max_high(self, value=65535):
self.c1.transport.channel_max = value
self.c1.open()
self.pump()
if "java" in sys.platform:
assert self.c1.transport.channel_max == 65535, (self.c1.transport.channel_max, value)
else:
assert self.c1.transport.channel_max == 32767, (self.c1.transport.channel_max, value)
def test_channel_max_raise_and_lower(self):
if "java" in sys.platform:
upper_limit = 65535
else:
upper_limit = 32767
# It's OK to lower the max below upper_limit.
self.c1.transport.channel_max = 12345
assert self.c1.transport.channel_max == 12345
# But it won't let us raise the limit above PN_IMPL_CHANNEL_MAX.
self.c1.transport.channel_max = 65535
assert self.c1.transport.channel_max == upper_limit
# send the OPEN frame
self.c1.open()
self.pump()
# Now it's too late to make any change, because
# we have already sent the OPEN frame.
try:
self.c1.transport.channel_max = 666
assert False, "expected session exception"
except:
pass
assert self.c1.transport.channel_max == upper_limit
def test_channel_max_limits_sessions(self):
return
# This is an index -- so max number of channels should be 1.
self.c1.transport.channel_max = 0
self.c1.open()
self.c2.open()
ssn_0 = self.c2.session()
assert ssn_0 != None
ssn_0.open()
self.pump()
try:
ssn_1 = self.c2.session()
assert False, "expected session exception"
except SessionException:
pass
def test_cleanup(self):
self.c1.open()
self.c2.open()
self.pump()
assert self.c1.state == Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_ACTIVE
assert self.c2.state == Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_ACTIVE
t1 = self.c1.transport
t2 = self.c2.transport
c2 = self.c2
self.c1.close()
# release all references to C1, except that held by the transport
self.cleanup()
gc.collect()
# transport should flush last state from C1:
pump(t1, t2)
assert c2.state == Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_CLOSED
def test_user_config(self):
if "java" in sys.platform:
raise Skipped("Unsupported API")
self.c1.user = "vindaloo"
self.c1.password = "secret"
self.c1.open()
self.pump()
self.c2.user = "leela"
self.c2.password = "trustno1"
self.c2.open()
self.pump()
assert self.c1.user == "vindaloo", self.c1.user
assert self.c1.password == None, self.c1.password
assert self.c2.user == "leela", self.c2.user
assert self.c2.password == None, self.c2.password
class SessionTest(Test):
def setup(self):
gc.enable()
self.c1, self.c2 = self.connection()
self.ssn = self.c1.session()
self.c1.open()
self.c2.open()
def cleanup(self):
# release resources created by this class
super(SessionTest, self).cleanup()
self.c1 = None
self.c2 = None
self.ssn = None
def teardown(self):
self.cleanup()
gc.collect()
assert not gc.garbage
def test_open_close(self):
assert self.ssn.state == Endpoint.LOCAL_UNINIT | Endpoint.REMOTE_UNINIT
self.ssn.open()
assert self.ssn.state == Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_UNINIT
self.pump()
assert self.ssn.state == Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_UNINIT
ssn = self.c2.session_head(Endpoint.REMOTE_ACTIVE | Endpoint.LOCAL_UNINIT)
assert ssn != None
assert ssn.state == Endpoint.LOCAL_UNINIT | Endpoint.REMOTE_ACTIVE
assert self.ssn.state == Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_UNINIT
ssn.open()
assert ssn.state == Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_ACTIVE
assert self.ssn.state == Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_UNINIT
self.pump()
assert ssn.state == Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_ACTIVE
assert self.ssn.state == Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_ACTIVE
ssn.close()
assert ssn.state == Endpoint.LOCAL_CLOSED | Endpoint.REMOTE_ACTIVE
assert self.ssn.state == Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_ACTIVE
self.pump()
assert ssn.state == Endpoint.LOCAL_CLOSED | Endpoint.REMOTE_ACTIVE
assert self.ssn.state == Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_CLOSED
self.ssn.close()
assert ssn.state == Endpoint.LOCAL_CLOSED | Endpoint.REMOTE_ACTIVE
assert self.ssn.state == Endpoint.LOCAL_CLOSED | Endpoint.REMOTE_CLOSED
self.pump()
assert ssn.state == Endpoint.LOCAL_CLOSED | Endpoint.REMOTE_CLOSED
assert self.ssn.state == Endpoint.LOCAL_CLOSED | Endpoint.REMOTE_CLOSED
def test_simultaneous_close(self):
self.ssn.open()
self.pump()
ssn = self.c2.session_head(Endpoint.REMOTE_ACTIVE | Endpoint.LOCAL_UNINIT)
assert ssn != None
ssn.open()
self.pump()
assert self.ssn.state == Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_ACTIVE
assert ssn.state == Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_ACTIVE
self.ssn.close()
ssn.close()
assert self.ssn.state == Endpoint.LOCAL_CLOSED | Endpoint.REMOTE_ACTIVE
assert ssn.state == Endpoint.LOCAL_CLOSED | Endpoint.REMOTE_ACTIVE
self.pump()
assert self.ssn.state == Endpoint.LOCAL_CLOSED | Endpoint.REMOTE_CLOSED
assert ssn.state == Endpoint.LOCAL_CLOSED | Endpoint.REMOTE_CLOSED
def test_closing_connection(self):
self.ssn.open()
self.pump()
self.c1.close()
self.pump()
self.ssn.close()
self.pump()
def test_condition(self):
self.ssn.open()
self.pump()
ssn = self.c2.session_head(Endpoint.REMOTE_ACTIVE | Endpoint.LOCAL_UNINIT)
assert ssn != None
ssn.open()
self.pump()
assert self.ssn.state == Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_ACTIVE
assert ssn.state == Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_ACTIVE
cond = Condition("blah:bleh", "this is a description", {symbol("foo"): "bar"})
self.ssn.condition = cond
self.ssn.close()
self.pump()
assert self.ssn.state == Endpoint.LOCAL_CLOSED | Endpoint.REMOTE_ACTIVE
assert ssn.state == Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_CLOSED
rcond = ssn.remote_condition
assert rcond == cond, (rcond, cond)
def test_cleanup(self):
snd, rcv = self.link("test-link")
snd.open()
rcv.open()
self.pump()
snd_ssn = snd.session
rcv_ssn = rcv.session
assert rcv_ssn.state == Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_ACTIVE
self.ssn = None
snd_ssn.close()
snd_ssn.free()
del snd_ssn
gc.collect()
self.pump()
assert rcv_ssn.state == Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_CLOSED
def test_reopen_on_same_session_without_free(self):
"""
confirm that a link is correctly opened when attaching to a previously
closed link *that has not been freed yet* on the same session
"""
self.ssn.open()
self.pump()
ssn2 = self.c2.session_head(Endpoint.LOCAL_UNINIT | Endpoint.REMOTE_ACTIVE)
ssn2.open()
self.pump()
snd = self.ssn.sender("test-link")
rcv = ssn2.receiver("test-link")
assert snd.state == Endpoint.LOCAL_UNINIT | Endpoint.REMOTE_UNINIT
assert rcv.state == Endpoint.LOCAL_UNINIT | Endpoint.REMOTE_UNINIT
snd.open()
rcv.open()
self.pump()
assert snd.state == Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_ACTIVE
assert rcv.state == Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_ACTIVE
snd.close()
rcv.close()
self.pump()
assert snd.state == Endpoint.LOCAL_CLOSED | Endpoint.REMOTE_CLOSED
assert rcv.state == Endpoint.LOCAL_CLOSED | Endpoint.REMOTE_CLOSED
snd = self.ssn.sender("test-link")
rcv = ssn2.receiver("test-link")
assert snd.state == Endpoint.LOCAL_UNINIT | Endpoint.REMOTE_UNINIT
assert rcv.state == Endpoint.LOCAL_UNINIT | Endpoint.REMOTE_UNINIT
snd.open()
rcv.open()
self.pump()
assert snd.state == Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_ACTIVE
assert rcv.state == Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_ACTIVE
def test_set_get_outgoing_window(self):
assert self.ssn.outgoing_window == 2147483647
self.ssn.outgoing_window = 1024
assert self.ssn.outgoing_window == 1024
class LinkTest(Test):
def setup(self):
gc.enable()
self.snd, self.rcv = self.link("test-link")
def cleanup(self):
# release resources created by this class
super(LinkTest, self).cleanup()
self.snd = None
self.rcv = None
def teardown(self):
self.cleanup()
gc.collect()
assert not gc.garbage, gc.garbage
def test_open_close(self):
assert self.snd.state == Endpoint.LOCAL_UNINIT | Endpoint.REMOTE_UNINIT
assert self.rcv.state == Endpoint.LOCAL_UNINIT | Endpoint.REMOTE_UNINIT
self.snd.open()
assert self.snd.state == Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_UNINIT
assert self.rcv.state == Endpoint.LOCAL_UNINIT | Endpoint.REMOTE_UNINIT
self.pump()
assert self.snd.state == Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_UNINIT
assert self.rcv.state == Endpoint.LOCAL_UNINIT | Endpoint.REMOTE_ACTIVE
self.rcv.open()
assert self.snd.state == Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_UNINIT
assert self.rcv.state == Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_ACTIVE
self.pump()
assert self.snd.state == Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_ACTIVE
assert self.rcv.state == Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_ACTIVE
self.snd.close()
assert self.snd.state == Endpoint.LOCAL_CLOSED | Endpoint.REMOTE_ACTIVE
assert self.rcv.state == Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_ACTIVE
self.pump()
assert self.snd.state == Endpoint.LOCAL_CLOSED | Endpoint.REMOTE_ACTIVE
assert self.rcv.state == Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_CLOSED
self.rcv.close()
assert self.snd.state == Endpoint.LOCAL_CLOSED | Endpoint.REMOTE_ACTIVE
assert self.rcv.state == Endpoint.LOCAL_CLOSED | Endpoint.REMOTE_CLOSED
self.pump()
assert self.snd.state == Endpoint.LOCAL_CLOSED | Endpoint.REMOTE_CLOSED
assert self.rcv.state == Endpoint.LOCAL_CLOSED | Endpoint.REMOTE_CLOSED
def test_simultaneous_open_close(self):
assert self.snd.state == Endpoint.LOCAL_UNINIT | Endpoint.REMOTE_UNINIT
assert self.rcv.state == Endpoint.LOCAL_UNINIT | Endpoint.REMOTE_UNINIT
self.snd.open()
self.rcv.open()
assert self.snd.state == Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_UNINIT
assert self.rcv.state == Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_UNINIT
self.pump()
assert self.snd.state == Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_ACTIVE
assert self.rcv.state == Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_ACTIVE
self.snd.close()
self.rcv.close()
assert self.snd.state == Endpoint.LOCAL_CLOSED | Endpoint.REMOTE_ACTIVE
assert self.rcv.state == Endpoint.LOCAL_CLOSED | Endpoint.REMOTE_ACTIVE
self.pump()
assert self.snd.state == Endpoint.LOCAL_CLOSED | Endpoint.REMOTE_CLOSED
assert self.rcv.state == Endpoint.LOCAL_CLOSED | Endpoint.REMOTE_CLOSED
def test_multiple(self):
rcv = self.snd.session.receiver("second-rcv")
assert rcv.name == "second-rcv"
self.snd.open()
rcv.open()
self.pump()
c2 = self.rcv.session.connection
l = c2.link_head(Endpoint.LOCAL_UNINIT | Endpoint.REMOTE_ACTIVE)
while l:
l.open()
l = l.next(Endpoint.LOCAL_UNINIT | Endpoint.REMOTE_ACTIVE)
self.pump()
assert self.snd
assert rcv
self.snd.close()
rcv.close()
ssn = rcv.session
conn = ssn.connection
ssn.close()
conn.close()
self.pump()
def test_closing_session(self):
self.snd.open()
self.rcv.open()
ssn1 = self.snd.session
self.pump()
ssn1.close()
self.pump()
self.snd.close()
self.pump()
def test_closing_connection(self):
self.snd.open()
self.rcv.open()
ssn1 = self.snd.session
c1 = ssn1.connection
self.pump()
c1.close()
self.pump()
self.snd.close()
self.pump()
def assertEqualTermini(self, t1, t2):
assert t1.type == t2.type, (t1.type, t2.type)
assert t1.address == t2.address, (t1.address, t2.address)
assert t1.durability == t2.durability, (t1.durability, t2.durability)
assert t1.expiry_policy == t2.expiry_policy, (t1.expiry_policy, t2.expiry_policy)
assert t1.timeout == t2.timeout, (t1.timeout, t2.timeout)
assert t1.dynamic == t2.dynamic, (t1.dynamic, t2.dynamic)
for attr in ["properties", "capabilities", "outcomes", "filter"]:
d1 = getattr(t1, attr)
d2 = getattr(t2, attr)
assert d1.format() == d2.format(), (attr, d1.format(), d2.format())
def _test_source_target(self, config_source, config_target):
if config_source is None:
self.snd.source.type = Terminus.UNSPECIFIED
else:
config_source(self.snd.source)
if config_target is None:
self.snd.target.type = Terminus.UNSPECIFIED
else:
config_target(self.snd.target)
self.snd.open()
self.pump()
self.assertEqualTermini(self.rcv.remote_source, self.snd.source)
self.assertEqualTermini(self.rcv.remote_target, self.snd.target)
self.rcv.target.copy(self.rcv.remote_target)
self.rcv.source.copy(self.rcv.remote_source)
self.rcv.open()
self.pump()
self.assertEqualTermini(self.snd.remote_target, self.snd.target)
self.assertEqualTermini(self.snd.remote_source, self.snd.source)
def test_source_target(self):
self._test_source_target(TerminusConfig(address="source"),
TerminusConfig(address="target"))
def test_source(self):
self._test_source_target(TerminusConfig(address="source"), None)
def test_target(self):
self._test_source_target(None, TerminusConfig(address="target"))
def test_coordinator(self):
self._test_source_target(None, TerminusConfig(type=Terminus.COORDINATOR))
def test_source_target_full(self):
self._test_source_target(TerminusConfig(address="source",
timeout=3,
dist_mode=Terminus.DIST_MODE_MOVE,
filter=[("int", 1), ("symbol", "two"), ("string", "three")],
capabilities=["one", "two", "three"]),
TerminusConfig(address="source",
timeout=7,
capabilities=[]))
def test_distribution_mode(self):
self._test_source_target(TerminusConfig(address="source",
dist_mode=Terminus.DIST_MODE_COPY),
TerminusConfig(address="target"))
assert self.rcv.remote_source.distribution_mode == Terminus.DIST_MODE_COPY
assert self.rcv.remote_target.distribution_mode == Terminus.DIST_MODE_UNSPECIFIED
def test_dynamic_link(self):
self._test_source_target(TerminusConfig(address=None, dynamic=True), None)
assert self.rcv.remote_source.dynamic
assert self.rcv.remote_source.address is None
def test_condition(self):
self.snd.open()
self.rcv.open()
self.pump()
assert self.snd.state == Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_ACTIVE
assert self.rcv.state == Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_ACTIVE
cond = Condition("blah:bleh", "this is a description", {symbol("foo"): "bar"})
self.snd.condition = cond
self.snd.close()
self.pump()
assert self.snd.state == Endpoint.LOCAL_CLOSED | Endpoint.REMOTE_ACTIVE
assert self.rcv.state == Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_CLOSED
rcond = self.rcv.remote_condition
assert rcond == cond, (rcond, cond)
def test_settle_mode(self):
self.snd.snd_settle_mode = Link.SND_UNSETTLED
assert self.snd.snd_settle_mode == Link.SND_UNSETTLED
self.rcv.rcv_settle_mode = Link.RCV_SECOND
assert self.rcv.rcv_settle_mode == Link.RCV_SECOND
assert self.snd.remote_rcv_settle_mode != Link.RCV_SECOND
assert self.rcv.remote_snd_settle_mode != Link.SND_UNSETTLED
self.snd.open()
self.rcv.open()
self.pump()
assert self.snd.remote_rcv_settle_mode == Link.RCV_SECOND
assert self.rcv.remote_snd_settle_mode == Link.SND_UNSETTLED
def test_cleanup(self):
snd, rcv = self.link("test-link")
snd.open()
rcv.open()
self.pump()
assert rcv.state == Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_ACTIVE
snd.close()
snd.free()
del snd
gc.collect()
self.pump()
assert rcv.state == Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_CLOSED
class TerminusConfig:
def __init__(self, type=None, address=None, timeout=None, durability=None,
filter=None, capabilities=None, dynamic=False, dist_mode=None):
self.address = address
self.timeout = timeout
self.durability = durability
self.filter = filter
self.capabilities = capabilities
self.dynamic = dynamic
self.dist_mode = dist_mode
self.type = type
def __call__(self, terminus):
if self.type is not None:
terminus.type = self.type
if self.address is not None:
terminus.address = self.address
if self.timeout is not None:
terminus.timeout = self.timeout
if self.durability is not None:
terminus.durability = self.durability
if self.capabilities is not None:
terminus.capabilities.put_array(False, Data.SYMBOL)
terminus.capabilities.enter()
for c in self.capabilities:
terminus.capabilities.put_symbol(c)
if self.filter is not None:
terminus.filter.put_map()
terminus.filter.enter()
for (t, v) in self.filter:
setter = getattr(terminus.filter, "put_%s" % t)
setter(v)
if self.dynamic:
terminus.dynamic = True
if self.dist_mode is not None:
terminus.distribution_mode = self.dist_mode
class TransferTest(Test):
def setup(self):
gc.enable()
self.snd, self.rcv = self.link("test-link")
self.c1 = self.snd.session.connection
self.c2 = self.rcv.session.connection
self.snd.open()
self.rcv.open()
self.pump()
def cleanup(self):
# release resources created by this class
super(TransferTest, self).cleanup()
self.c1 = None
self.c2 = None
self.snd = None
self.rcv = None
def teardown(self):
self.cleanup()
gc.collect()
assert not gc.garbage
def test_work_queue(self):
assert self.c1.work_head is None
self.snd.delivery("tag")
assert self.c1.work_head is None
self.rcv.flow(1)
self.pump()
d = self.c1.work_head
assert d is not None
tag = d.tag
assert tag == "tag", tag
assert d.writable
n = self.snd.send(str2bin("this is a test"))
assert self.snd.advance()
assert self.c1.work_head is None
self.pump()
d = self.c2.work_head
assert d.tag == "tag"
assert d.readable
def test_multiframe(self):
self.rcv.flow(1)
self.snd.delivery("tag")
msg = str2bin("this is a test")
n = self.snd.send(msg)
assert n == len(msg)
self.pump()
d = self.rcv.current
assert d
assert d.tag == "tag", repr(d.tag)
assert d.readable
binary = self.rcv.recv(1024)
assert binary == msg, (binary, msg)
binary = self.rcv.recv(1024)
assert binary == str2bin("")
msg = str2bin("this is more")
n = self.snd.send(msg)
assert n == len(msg)
assert self.snd.advance()
self.pump()
binary = self.rcv.recv(1024)
assert binary == msg, (binary, msg)
binary = self.rcv.recv(1024)
assert binary is None
def test_disposition(self):
self.rcv.flow(1)
self.pump()
sd = self.snd.delivery("tag")
msg = str2bin("this is a test")
n = self.snd.send(msg)
assert n == len(msg)
assert self.snd.advance()
self.pump()
rd = self.rcv.current
assert rd is not None
assert rd.tag == sd.tag
rmsg = self.rcv.recv(1024)
assert rmsg == msg
rd.update(Delivery.ACCEPTED)
self.pump()
rdisp = sd.remote_state
ldisp = rd.local_state
assert rdisp == ldisp == Delivery.ACCEPTED, (rdisp, ldisp)
assert sd.updated
sd.update(Delivery.ACCEPTED)
self.pump()
assert sd.local_state == rd.remote_state == Delivery.ACCEPTED
sd.settle()
def test_delivery_id_ordering(self):
self.rcv.flow(1024)
self.pump(buffer_size=64*1024)
#fill up delivery buffer on sender
for m in range(1024):
sd = self.snd.delivery("tag%s" % m)
msg = ("message %s" % m).encode('ascii')
n = self.snd.send(msg)
assert n == len(msg)
assert self.snd.advance()
self.pump(buffer_size=64*1024)
#receive a session-windows worth of messages and accept them
for m in range(1024):
rd = self.rcv.current
assert rd is not None, m
assert rd.tag == ("tag%s" % m), (rd.tag, m)
msg = self.rcv.recv(1024)
assert msg == ("message %s" % m).encode('ascii'), (msg, m)
rd.update(Delivery.ACCEPTED)
rd.settle()
self.pump(buffer_size=64*1024)
#add some new deliveries
for m in range(1024, 1450):
sd = self.snd.delivery("tag%s" % m)
msg = ("message %s" % m).encode('ascii')
n = self.snd.send(msg)
assert n == len(msg)
assert self.snd.advance()
#handle all disposition changes to sent messages
d = self.c1.work_head
while d:
next_d = d.work_next
if d.updated:
d.update(Delivery.ACCEPTED)
d.settle()
d = next_d
#submit some more deliveries
for m in range(1450, 1500):
sd = self.snd.delivery("tag%s" % m)
msg = ("message %s" % m).encode('ascii')
n = self.snd.send(msg)
assert n == len(msg)
assert self.snd.advance()
self.pump(buffer_size=64*1024)
self.rcv.flow(1024)
self.pump(buffer_size=64*1024)
#verify remaining messages can be received and accepted
for m in range(1024, 1500):
rd = self.rcv.current
assert rd is not None, m
assert rd.tag == ("tag%s" % m), (rd.tag, m)
msg = self.rcv.recv(1024)
assert msg == ("message %s" % m).encode('ascii'), (msg, m)
rd.update(Delivery.ACCEPTED)
rd.settle()
def test_cleanup(self):
self.rcv.flow(10)
self.pump()
for x in range(10):
self.snd.delivery("tag%d" % x)
msg = str2bin("this is a test")
n = self.snd.send(msg)
assert n == len(msg)
assert self.snd.advance()
self.snd.close()
self.snd.free()
self.snd = None
gc.collect()
self.pump()
for x in range(10):
rd = self.rcv.current
assert rd is not None
assert rd.tag == "tag%d" % x
rmsg = self.rcv.recv(1024)
assert self.rcv.advance()
assert rmsg == msg
# close of snd should've settled:
assert rd.settled
rd.settle()
class MaxFrameTransferTest(Test):
def setup(self):
pass
def cleanup(self):
# release resources created by this class
super(MaxFrameTransferTest, self).cleanup()
self.c1 = None
self.c2 = None
self.snd = None
self.rcv = None
def teardown(self):
self.cleanup()
def message(self, size):
parts = []
for i in range(size):
parts.append(str(i))
return "/".join(parts)[:size].encode("utf-8")
def testMinFrame(self):
"""
Configure receiver to support minimum max-frame as defined by AMQP-1.0.
Verify transfer of messages larger than 512.
"""
self.snd, self.rcv = self.link("test-link", max_frame=[0,512])
self.c1 = self.snd.session.connection
self.c2 = self.rcv.session.connection
self.snd.open()
self.rcv.open()
self.pump()
assert self.rcv.session.connection.transport.max_frame_size == 512
assert self.snd.session.connection.transport.remote_max_frame_size == 512
self.rcv.flow(1)
self.snd.delivery("tag")
msg = self.message(513)
n = self.snd.send(msg)
assert n == len(msg)
assert self.snd.advance()
self.pump()
binary = self.rcv.recv(513)
assert binary == msg
binary = self.rcv.recv(1024)
assert binary == None
def testOddFrame(self):
"""
Test an odd sized max limit with data that will require multiple frames to
be transfered.
"""
self.snd, self.rcv = self.link("test-link", max_frame=[0,521])
self.c1 = self.snd.session.connection
self.c2 = self.rcv.session.connection
self.snd.open()
self.rcv.open()
self.pump()
assert self.rcv.session.connection.transport.max_frame_size == 521
assert self.snd.session.connection.transport.remote_max_frame_size == 521
self.rcv.flow(2)
self.snd.delivery("tag")
msg = ("X" * 1699).encode('utf-8')
n = self.snd.send(msg)
assert n == len(msg)
assert self.snd.advance()
self.pump()
binary = self.rcv.recv(1699)
assert binary == msg
binary = self.rcv.recv(1024)
assert binary == None
self.rcv.advance()
self.snd.delivery("gat")
msg = self.message(1426)
n = self.snd.send(msg)
assert n == len(msg)
assert self.snd.advance()
self.pump()
binary = self.rcv.recv(1426)
assert binary == msg
self.pump()
binary = self.rcv.recv(1024)
assert binary == None
def testBigMessage(self):
"""
Test transfering a big message.
"""
self.snd, self.rcv = self.link("test-link")
self.c1 = self.snd.session.connection
self.c2 = self.rcv.session.connection
self.snd.open()
self.rcv.open()
self.pump()
self.rcv.flow(2)
self.snd.delivery("tag")
msg = self.message(1024*256)
n = self.snd.send(msg)
assert n == len(msg)
assert self.snd.advance()
self.pump()
binary = self.rcv.recv(1024*256)
assert binary == msg
binary = self.rcv.recv(1024)
assert binary == None
class IdleTimeoutTest(Test):
def setup(self):
pass
def cleanup(self):
# release resources created by this class
super(IdleTimeoutTest, self).cleanup()
self.snd = None
self.rcv = None
self.c1 = None
self.c2 = None
def teardown(self):
self.cleanup()
def message(self, size):
parts = []
for i in range(size):
parts.append(str(i))
return "/".join(parts)[:size]
def testGetSet(self):
"""
Verify the configuration and negotiation of the idle timeout.
"""
self.snd, self.rcv = self.link("test-link", idle_timeout=[1.0,2.0])
self.c1 = self.snd.session.connection
self.c2 = self.rcv.session.connection
self.snd.open()
self.rcv.open()
self.pump()
# proton advertises 1/2 the configured timeout to the peer:
assert self.rcv.session.connection.transport.idle_timeout == 2.0
assert self.rcv.session.connection.transport.remote_idle_timeout == 0.5
assert self.snd.session.connection.transport.idle_timeout == 1.0
assert self.snd.session.connection.transport.remote_idle_timeout == 1.0
def testTimeout(self):
"""
Verify the AMQP Connection idle timeout.
"""
# snd will timeout the Connection if no frame is received within 1000 ticks
self.snd, self.rcv = self.link("test-link", idle_timeout=[1.0,0])
self.c1 = self.snd.session.connection
self.c2 = self.rcv.session.connection
self.snd.open()
self.rcv.open()
self.pump()
t_snd = self.snd.session.connection.transport
t_rcv = self.rcv.session.connection.transport
assert t_rcv.idle_timeout == 0.0
# proton advertises 1/2 the timeout (see spec)
assert t_rcv.remote_idle_timeout == 0.5
assert t_snd.idle_timeout == 1.0
assert t_snd.remote_idle_timeout == 0.0
sndr_frames_in = t_snd.frames_input
rcvr_frames_out = t_rcv.frames_output
# at t+1msec, nothing should happen:
clock = 0.001
assert t_snd.tick(clock) == 1.001, "deadline for remote timeout"
assert t_rcv.tick(clock) == 0.251, "deadline to send keepalive"
self.pump()
assert sndr_frames_in == t_snd.frames_input, "unexpected received frame"
# at one tick from expected idle frame send, nothing should happen:
clock = 0.250
assert t_snd.tick(clock) == 1.001, "deadline for remote timeout"
assert t_rcv.tick(clock) == 0.251, "deadline to send keepalive"
self.pump()
assert sndr_frames_in == t_snd.frames_input, "unexpected received frame"
# this should cause rcvr to expire and send a keepalive
clock = 0.251
assert t_snd.tick(clock) == 1.001, "deadline for remote timeout"
assert t_rcv.tick(clock) == 0.501, "deadline to send keepalive"
self.pump()
sndr_frames_in += 1
rcvr_frames_out += 1
assert sndr_frames_in == t_snd.frames_input, "unexpected received frame"
assert rcvr_frames_out == t_rcv.frames_output, "unexpected frame"
# since a keepalive was received, sndr will rebase its clock against this tick:
# and the receiver should not change its deadline
clock = 0.498
assert t_snd.tick(clock) == 1.498, "deadline for remote timeout"
assert t_rcv.tick(clock) == 0.501, "deadline to send keepalive"
self.pump()
assert sndr_frames_in == t_snd.frames_input, "unexpected received frame"
# now expire sndr
clock = 1.499
t_snd.tick(clock)
self.pump()
assert self.c2.state & Endpoint.REMOTE_CLOSED
assert self.c2.remote_condition.name == "amqp:resource-limit-exceeded"
class CreditTest(Test):
def setup(self):
self.snd, self.rcv = self.link("test-link", max_frame=(16*1024, 16*1024))
self.c1 = self.snd.session.connection
self.c2 = self.rcv.session.connection
self.snd.open()
self.rcv.open()
self.pump()
def cleanup(self):
# release resources created by this class
super(CreditTest, self).cleanup()
self.c1 = None
self.snd = None
self.c2 = None
self.rcv2 = None
self.snd2 = None
def teardown(self):
self.cleanup()
def testCreditSender(self, count=1024):
credit = self.snd.credit
assert credit == 0, credit
self.rcv.flow(10)
self.pump()
credit = self.snd.credit
assert credit == 10, credit
self.rcv.flow(count)
self.pump()
credit = self.snd.credit
assert credit == 10 + count, credit
def testCreditReceiver(self):
self.rcv.flow(10)
self.pump()
assert self.rcv.credit == 10, self.rcv.credit
d = self.snd.delivery("tag")
assert d
assert self.snd.advance()
self.pump()
assert self.rcv.credit == 10, self.rcv.credit
assert self.rcv.queued == 1, self.rcv.queued
c = self.rcv.current
assert c.tag == "tag", c.tag
assert self.rcv.advance()
assert self.rcv.credit == 9, self.rcv.credit
assert self.rcv.queued == 0, self.rcv.queued
def _testBufferingOnClose(self, a, b):
for i in range(10):
d = self.snd.delivery("tag-%s" % i)
assert d
d.settle()
self.pump()
assert self.snd.queued == 10
endpoints = {"connection": (self.c1, self.c2),
"session": (self.snd.session, self.rcv.session),
"link": (self.snd, self.rcv)}
local_a, remote_a = endpoints[a]
local_b, remote_b = endpoints[b]
remote_b.close()
self.pump()
assert local_b.state == Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_CLOSED
local_a.close()
self.pump()
assert remote_a.state & Endpoint.REMOTE_CLOSED
assert self.snd.queued == 10
def testBufferingOnCloseLinkLink(self):
self._testBufferingOnClose("link", "link")
def testBufferingOnCloseLinkSession(self):
self._testBufferingOnClose("link", "session")
def testBufferingOnCloseLinkConnection(self):
self._testBufferingOnClose("link", "connection")
def testBufferingOnCloseSessionLink(self):
self._testBufferingOnClose("session", "link")
def testBufferingOnCloseSessionSession(self):
self._testBufferingOnClose("session", "session")
def testBufferingOnCloseSessionConnection(self):
self._testBufferingOnClose("session", "connection")
def testBufferingOnCloseConnectionLink(self):
self._testBufferingOnClose("connection", "link")
def testBufferingOnCloseConnectionSession(self):
self._testBufferingOnClose("connection", "session")
def testBufferingOnCloseConnectionConnection(self):
self._testBufferingOnClose("connection", "connection")
def testFullDrain(self):
assert self.rcv.credit == 0
assert self.snd.credit == 0
self.rcv.drain(10)
assert self.rcv.draining()
assert self.rcv.credit == 10
assert self.snd.credit == 0
self.pump()
assert self.rcv.credit == 10
assert self.snd.credit == 10
assert self.rcv.draining()
self.snd.drained()
assert self.rcv.credit == 10
assert self.snd.credit == 0
assert self.rcv.draining()
self.pump()
assert self.rcv.credit == 0
assert self.snd.credit == 0
assert not self.rcv.draining()
drained = self.rcv.drained()
assert drained == 10, drained
def testPartialDrain(self):
self.rcv.drain(2)
assert self.rcv.draining()
self.pump()
d = self.snd.delivery("tag")
assert d
assert self.snd.advance()
self.snd.drained()
assert self.rcv.draining()
self.pump()
assert not self.rcv.draining()
c = self.rcv.current
assert self.rcv.queued == 1, self.rcv.queued
assert c.tag == d.tag, c.tag
assert self.rcv.advance()
assert not self.rcv.current
assert self.rcv.credit == 0, self.rcv.credit
assert not self.rcv.draining()
drained = self.rcv.drained()
assert drained == 1, drained
def testDrainFlow(self):
assert self.rcv.credit == 0
assert self.snd.credit == 0
self.rcv.drain(10)
assert self.rcv.credit == 10
assert self.snd.credit == 0
self.pump()
assert self.rcv.credit == 10
assert self.snd.credit == 10
self.snd.drained()
assert self.rcv.credit == 10
assert self.snd.credit == 0
self.pump()
assert self.rcv.credit == 0
assert self.snd.credit == 0
self.rcv.flow(10)
assert self.rcv.credit == 10
assert self.snd.credit == 0
self.pump()
assert self.rcv.credit == 10
assert self.snd.credit == 10
self.snd.drained()
assert self.rcv.credit == 10
assert self.snd.credit == 10
self.pump()
assert self.rcv.credit == 10
assert self.snd.credit == 10
drained = self.rcv.drained()
assert drained == 10, drained
def testNegative(self):
assert self.snd.credit == 0
d = self.snd.delivery("tag")
assert d
assert self.snd.advance()
self.pump()
assert self.rcv.credit == 0
assert self.rcv.queued == 0
self.rcv.flow(1)
assert self.rcv.credit == 1
assert self.rcv.queued == 0
self.pump()
assert self.rcv.credit == 1
assert self.rcv.queued == 1, self.rcv.queued
c = self.rcv.current
assert c
assert c.tag == "tag"
assert self.rcv.advance()
assert self.rcv.credit == 0
assert self.rcv.queued == 0
def testDrainZero(self):
assert self.snd.credit == 0
assert self.rcv.credit == 0
assert self.rcv.queued == 0
drained = self.rcv.drained()
assert drained == 0
self.rcv.flow(10)
self.pump()
assert self.snd.credit == 10
assert self.rcv.credit == 10
assert self.rcv.queued == 0
self.snd.drained()
self.pump()
assert self.snd.credit == 10
assert self.rcv.credit == 10
assert self.rcv.queued == 0
drained = self.rcv.drained()
assert drained == 0
self.rcv.drain(0)
assert self.snd.credit == 10
assert self.rcv.credit == 10
assert self.rcv.queued == 0
self.pump()
assert self.snd.credit == 10
assert self.rcv.credit == 10
assert self.rcv.queued == 0
self.snd.drained()
assert self.snd.credit == 0
assert self.rcv.credit == 10
assert self.rcv.queued == 0
drained = self.rcv.drained()
assert drained == 0
self.pump()
assert self.snd.credit == 0
assert self.rcv.credit == 0
assert self.rcv.queued == 0
drained = self.rcv.drained()
assert drained == 10
def testDrainOrder(self):
""" Verify drain/drained works regardless of ordering. See PROTON-401
"""
assert self.snd.credit == 0
assert self.rcv.credit == 0
assert self.rcv.queued == 0
#self.rcv.session.connection.transport.trace(Transport.TRACE_FRM)
#self.snd.session.connection.transport.trace(Transport.TRACE_FRM)
## verify that a sender that has reached the drain state will respond
## promptly to a drain issued by the peer.
self.rcv.flow(10)
self.pump()
assert self.snd.credit == 10, self.snd.credit
assert self.rcv.credit == 10, self.rcv.credit
sd = self.snd.delivery("tagA")
assert sd
n = self.snd.send(str2bin("A"))
assert n == 1
self.pump()
self.snd.advance()
# done sending, so signal that we are drained:
self.snd.drained()
self.pump()
assert self.snd.credit == 9, self.snd.credit
assert self.rcv.credit == 10, self.rcv.credit
self.rcv.drain(0)
self.pump()
assert self.snd.credit == 9, self.snd.credit
assert self.rcv.credit == 10, self.rcv.credit
data = self.rcv.recv(10)
assert data == str2bin("A"), data
self.rcv.advance()
self.pump()
assert self.snd.credit == 9, self.snd.credit
assert self.rcv.credit == 9, self.rcv.credit
self.snd.drained()
self.pump()
assert self.snd.credit == 0, self.snd.credit
assert self.rcv.credit == 0, self.rcv.credit
# verify that a drain requested by the peer is not "acknowledged" until
# after the sender has completed sending its pending messages
self.rcv.flow(10)
self.pump()
assert self.snd.credit == 10, self.snd.credit
assert self.rcv.credit == 10, self.rcv.credit
sd = self.snd.delivery("tagB")
assert sd
n = self.snd.send(str2bin("B"))
assert n == 1
self.snd.advance()
self.pump()
assert self.snd.credit == 9, self.snd.credit
assert self.rcv.credit == 10, self.rcv.credit
self.rcv.drain(0)
self.pump()
assert self.snd.credit == 9, self.snd.credit
assert self.rcv.credit == 10, self.rcv.credit
sd = self.snd.delivery("tagC")
assert sd
n = self.snd.send(str2bin("C"))
assert n == 1
self.snd.advance()
self.pump()
assert self.snd.credit == 8, self.snd.credit
assert self.rcv.credit == 10, self.rcv.credit
# now that the sender has finished sending everything, it can signal
# drained
self.snd.drained()
self.pump()
assert self.snd.credit == 0, self.snd.credit
assert self.rcv.credit == 2, self.rcv.credit
data = self.rcv.recv(10)
assert data == str2bin("B"), data
self.rcv.advance()
data = self.rcv.recv(10)
assert data == str2bin("C"), data
self.rcv.advance()
self.pump()
assert self.snd.credit == 0, self.snd.credit
assert self.rcv.credit == 0, self.rcv.credit
def testPushback(self, count=10):
assert self.snd.credit == 0
assert self.rcv.credit == 0
self.rcv.flow(count)
self.pump()
for i in range(count):
d = self.snd.delivery("tag%s" % i)
assert d
self.snd.advance()
assert self.snd.queued == count
assert self.rcv.queued == 0
self.pump()
assert self.snd.queued == 0
assert self.rcv.queued == count
d = self.snd.delivery("extra")
self.snd.advance()
assert self.snd.queued == 1
assert self.rcv.queued == count
self.pump()
assert self.snd.queued == 1
assert self.rcv.queued == count
def testHeadOfLineBlocking(self):
self.snd2 = self.snd.session.sender("link-2")
self.rcv2 = self.rcv.session.receiver("link-2")
self.snd2.open()
self.rcv2.open()
self.pump()
assert self.snd2.state == Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_ACTIVE
assert self.rcv2.state == Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_ACTIVE
self.rcv.flow(5)
self.rcv2.flow(10)
self.pump()
assert self.snd.credit == 5
assert self.snd2.credit == 10
for i in range(10):
tag = "test %d" % i
self.snd.delivery( tag )
self.snd.send( tag.encode("ascii") )
assert self.snd.advance()
self.snd2.delivery( tag )
self.snd2.send( tag.encode("ascii") )
assert self.snd2.advance()
self.pump()
for i in range(5):
b = self.rcv.recv( 512 )
assert self.rcv.advance()
b = self.rcv2.recv( 512 )
assert self.rcv2.advance()
for i in range(5):
b = self.rcv2.recv( 512 )
assert self.rcv2.advance()
class SessionCreditTest(Test):
def teardown(self):
self.cleanup()
def testBuffering(self, count=32, size=1024, capacity=16*1024, max_frame=1024):
snd, rcv = self.link("test-link", max_frame=(max_frame, max_frame))
rcv.session.incoming_capacity = capacity
snd.open()
rcv.open()
rcv.flow(count)
self.pump()
assert count > 0
total_bytes = count * size
assert snd.session.outgoing_bytes == 0, snd.session.outgoing_bytes
assert rcv.session.incoming_bytes == 0, rcv.session.incoming_bytes
assert snd.queued == 0, snd.queued
assert rcv.queued == 0, rcv.queued
data = bytes(bytearray(size))
idx = 0
while snd.credit:
d = snd.delivery("tag%s" % idx)
assert d
n = snd.send(data)
assert n == size, (n, size)
assert snd.advance()
self.pump()
idx += 1
assert idx == count, (idx, count)
assert snd.session.outgoing_bytes < total_bytes, (snd.session.outgoing_bytes, total_bytes)
assert rcv.session.incoming_bytes < capacity, (rcv.session.incoming_bytes, capacity)
assert snd.session.outgoing_bytes + rcv.session.incoming_bytes == total_bytes, \
(snd.session.outgoing_bytes, rcv.session.incoming_bytes, total_bytes)
if snd.session.outgoing_bytes > 0:
available = rcv.session.incoming_capacity - rcv.session.incoming_bytes
assert available < max_frame, (available, max_frame)
for i in range(count):
d = rcv.current
assert d, i
pending = d.pending
before = rcv.session.incoming_bytes
assert rcv.advance()
after = rcv.session.incoming_bytes
assert before - after == pending, (before, after, pending)
snd_before = snd.session.incoming_bytes
self.pump()
snd_after = snd.session.incoming_bytes
assert rcv.session.incoming_bytes < capacity
if snd_before > 0:
assert capacity - after <= max_frame
assert snd_before > snd_after
if snd_after > 0:
available = rcv.session.incoming_capacity - rcv.session.incoming_bytes
assert available < max_frame, available
def testBufferingSize16(self):
self.testBuffering(size=16)
def testBufferingSize256(self):
self.testBuffering(size=256)
def testBufferingSize512(self):
self.testBuffering(size=512)
def testBufferingSize2048(self):
self.testBuffering(size=2048)
def testBufferingSize1025(self):
self.testBuffering(size=1025)
def testBufferingSize1023(self):
self.testBuffering(size=1023)
def testBufferingSize989(self):
self.testBuffering(size=989)
def testBufferingSize1059(self):
self.testBuffering(size=1059)
def testCreditWithBuffering(self):
snd, rcv = self.link("test-link", max_frame=(1024, 1024))
rcv.session.incoming_capacity = 64*1024
snd.open()
rcv.open()
rcv.flow(128)
self.pump()
assert snd.credit == 128, snd.credit
assert rcv.queued == 0, rcv.queued
idx = 0
while snd.credit:
d = snd.delivery("tag%s" % idx)
snd.send(("x"*1024).encode('ascii'))
assert d
assert snd.advance()
self.pump()
idx += 1
assert idx == 128, idx
assert rcv.queued < 128, rcv.queued
rcv.flow(1)
self.pump()
assert snd.credit == 1, snd.credit
class SettlementTest(Test):
def setup(self):
self.snd, self.rcv = self.link("test-link")
self.c1 = self.snd.session.connection
self.c2 = self.rcv.session.connection
self.snd.open()
self.rcv.open()
self.pump()
def cleanup(self):
# release resources created by this class
super(SettlementTest, self).cleanup()
self.c1 = None
self.snd = None
self.c2 = None
self.rcv2 = None
self.snd2 = None
def teardown(self):
self.cleanup()
def testSettleCurrent(self):
self.rcv.flow(10)
self.pump()
assert self.snd.credit == 10, self.snd.credit
d = self.snd.delivery("tag")
e = self.snd.delivery("tag2")
assert d
assert e
c = self.snd.current
assert c.tag == "tag", c.tag
c.settle()
c = self.snd.current
assert c.tag == "tag2", c.tag
c.settle()
c = self.snd.current
assert not c
self.pump()
c = self.rcv.current
assert c
assert c.tag == "tag", c.tag
assert c.settled
c.settle()
c = self.rcv.current
assert c
assert c.tag == "tag2", c.tag
assert c.settled
c.settle()
c = self.rcv.current
assert not c
def testUnsettled(self):
self.rcv.flow(10)
self.pump()
assert self.snd.unsettled == 0, self.snd.unsettled
assert self.rcv.unsettled == 0, self.rcv.unsettled
d = self.snd.delivery("tag")
assert d
assert self.snd.unsettled == 1, self.snd.unsettled
assert self.rcv.unsettled == 0, self.rcv.unsettled
assert self.snd.advance()
self.pump()
assert self.snd.unsettled == 1, self.snd.unsettled
assert self.rcv.unsettled == 1, self.rcv.unsettled
c = self.rcv.current
assert c
c.settle()
assert self.snd.unsettled == 1, self.snd.unsettled
assert self.rcv.unsettled == 0, self.rcv.unsettled
def testMultipleUnsettled(self, count=1024, size=1024):
self.rcv.flow(count)
self.pump()
assert self.snd.unsettled == 0, self.snd.unsettled
assert self.rcv.unsettled == 0, self.rcv.unsettled
unsettled = []
for i in range(count):
sd = self.snd.delivery("tag%s" % i)
assert sd
n = self.snd.send(("x"*size).encode('ascii'))
assert n == size, n
assert self.snd.advance()
self.pump()
rd = self.rcv.current
assert rd, "did not receive delivery %s" % i
n = rd.pending
b = self.rcv.recv(n)
assert len(b) == n, (b, n)
rd.update(Delivery.ACCEPTED)
assert self.rcv.advance()
self.pump()
unsettled.append(rd)
assert self.rcv.unsettled == count
for rd in unsettled:
rd.settle()
def testMultipleUnsettled2K1K(self):
self.testMultipleUnsettled(2048, 1024)
def testMultipleUnsettled4K1K(self):
self.testMultipleUnsettled(4096, 1024)
def testMultipleUnsettled1K2K(self):
self.testMultipleUnsettled(1024, 2048)
def testMultipleUnsettled2K2K(self):
self.testMultipleUnsettled(2048, 2048)
def testMultipleUnsettled4K2K(self):
self.testMultipleUnsettled(4096, 2048)
class PipelineTest(Test):
def setup(self):
self.c1, self.c2 = self.connection()
def cleanup(self):
# release resources created by this class
super(PipelineTest, self).cleanup()
self.c1 = None
self.c2 = None
def teardown(self):
self.cleanup()
def test(self):
ssn = self.c1.session()
snd = ssn.sender("sender")
self.c1.open()
ssn.open()
snd.open()
for i in range(10):
d = snd.delivery("delivery-%s" % i)
snd.send(str2bin("delivery-%s" % i))
d.settle()
snd.close()
ssn.close()
self.c1.close()
self.pump()
state = self.c2.state
assert state == (Endpoint.LOCAL_UNINIT | Endpoint.REMOTE_ACTIVE), "%x" % state
ssn2 = self.c2.session_head(Endpoint.LOCAL_UNINIT)
assert ssn2
state == ssn2.state
assert state == (Endpoint.LOCAL_UNINIT | Endpoint.REMOTE_ACTIVE), "%x" % state
rcv = self.c2.link_head(Endpoint.LOCAL_UNINIT)
assert rcv
state = rcv.state
assert state == (Endpoint.LOCAL_UNINIT | Endpoint.REMOTE_ACTIVE), "%x" % state
self.c2.open()
ssn2.open()
rcv.open()
rcv.flow(10)
assert rcv.queued == 0, rcv.queued
self.pump()
assert rcv.queued == 10, rcv.queued
state = rcv.state
assert state == (Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_CLOSED), "%x" % state
state = ssn2.state
assert state == (Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_CLOSED), "%x" % state
state = self.c2.state
assert state == (Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_CLOSED), "%x" % state
for i in range(rcv.queued):
d = rcv.current
assert d
assert d.tag == "delivery-%s" % i
d.settle()
assert rcv.queued == 0, rcv.queued
class ServerTest(Test):
def testKeepalive(self):
""" Verify that idle frames are sent to keep a Connection alive
"""
if "java" in sys.platform:
raise Skipped()
idle_timeout = self.delay
server = common.TestServer()
server.start()
class Program:
def on_reactor_init(self, event):
self.conn = event.reactor.connection()
self.conn.hostname = "%s:%s" % (server.host, server.port)
self.conn.open()
self.old_count = None
event.reactor.schedule(3 * idle_timeout, self)
def on_connection_bound(self, event):
event.transport.idle_timeout = idle_timeout
def on_connection_remote_open(self, event):
self.old_count = event.transport.frames_input
def on_timer_task(self, event):
assert self.conn.state == (Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_ACTIVE), "Connection terminated"
assert self.conn.transport.frames_input > self.old_count, "No idle frames received"
self.conn.close()
Reactor(Program()).run()
server.stop()
def testIdleTimeout(self):
""" Verify that a Connection is terminated properly when Idle frames do not
arrive in a timely manner.
"""
if "java" in sys.platform:
raise Skipped()
idle_timeout = self.delay
server = common.TestServer(idle_timeout=idle_timeout)
server.start()
class Program:
def on_reactor_init(self, event):
self.conn = event.reactor.connection()
self.conn.hostname = "%s:%s" % (server.host, server.port)
self.conn.open()
self.remote_condition = None
self.old_count = None
# verify the connection stays up even if we don't explicitly send stuff
# wait up to 3x the idle timeout
event.reactor.schedule(3 * idle_timeout, self)
def on_connection_bound(self, event):
self.transport = event.transport
def on_connection_remote_open(self, event):
self.old_count = event.transport.frames_output
def on_connection_remote_close(self, event):
assert self.conn.remote_condition
assert self.conn.remote_condition.name == "amqp:resource-limit-exceeded"
self.remote_condition = self.conn.remote_condition
def on_timer_task(self, event):
assert self.conn.state == (Endpoint.LOCAL_ACTIVE | Endpoint.REMOTE_ACTIVE), "Connection terminated"
assert self.conn.transport.frames_output > self.old_count, "No idle frames sent"
# now wait to explicitly cause the other side to expire:
sleep(3 * idle_timeout)
p = Program()
Reactor(p).run()
assert p.remote_condition
assert p.remote_condition.name == "amqp:resource-limit-exceeded"
server.stop()
class NoValue:
def __init__(self):
pass
def apply(self, dlv):
pass
def check(self, dlv):
assert dlv.data == None
assert dlv.section_number == 0
assert dlv.section_offset == 0
assert dlv.condition == None
assert dlv.failed == False
assert dlv.undeliverable == False
assert dlv.annotations == None
class RejectValue:
def __init__(self, condition):
self.condition = condition
def apply(self, dlv):
dlv.condition = self.condition
def check(self, dlv):
assert dlv.data == None, dlv.data
assert dlv.section_number == 0
assert dlv.section_offset == 0
assert dlv.condition == self.condition, (dlv.condition, self.condition)
assert dlv.failed == False
assert dlv.undeliverable == False
assert dlv.annotations == None
class ReceivedValue:
def __init__(self, section_number, section_offset):
self.section_number = section_number
self.section_offset = section_offset
def apply(self, dlv):
dlv.section_number = self.section_number
dlv.section_offset = self.section_offset
def check(self, dlv):
assert dlv.data == None, dlv.data
assert dlv.section_number == self.section_number, (dlv.section_number, self.section_number)
assert dlv.section_offset == self.section_offset
assert dlv.condition == None
assert dlv.failed == False
assert dlv.undeliverable == False
assert dlv.annotations == None
class ModifiedValue:
def __init__(self, failed, undeliverable, annotations):
self.failed = failed
self.undeliverable = undeliverable
self.annotations = annotations
def apply(self, dlv):
dlv.failed = self.failed
dlv.undeliverable = self.undeliverable
dlv.annotations = self.annotations
def check(self, dlv):
assert dlv.data == None, dlv.data
assert dlv.section_number == 0
assert dlv.section_offset == 0
assert dlv.condition == None
assert dlv.failed == self.failed
assert dlv.undeliverable == self.undeliverable
assert dlv.annotations == self.annotations, (dlv.annotations, self.annotations)
class CustomValue:
def __init__(self, data):
self.data = data
def apply(self, dlv):
dlv.data = self.data
def check(self, dlv):
assert dlv.data == self.data, (dlv.data, self.data)
assert dlv.section_number == 0
assert dlv.section_offset == 0
assert dlv.condition == None
assert dlv.failed == False
assert dlv.undeliverable == False
assert dlv.annotations == None
class DeliveryTest(Test):
def teardown(self):
self.cleanup()
def testDisposition(self, count=1, tag="tag%i", type=Delivery.ACCEPTED, value=NoValue()):
snd, rcv = self.link("test-link")
snd.open()
rcv.open()
snd_deliveries = []
for i in range(count):
d = snd.delivery(tag % i)
snd_deliveries.append(d)
snd.advance()
rcv.flow(count)
self.pump()
rcv_deliveries = []
for i in range(count):
d = rcv.current
assert d.tag == (tag % i)
rcv_deliveries.append(d)
rcv.advance()
for d in rcv_deliveries:
value.apply(d.local)
d.update(type)
self.pump()
for d in snd_deliveries:
assert d.remote_state == type
assert d.remote.type == type
value.check(d.remote)
value.apply(d.local)
d.update(type)
self.pump()
for d in rcv_deliveries:
assert d.remote_state == type
assert d.remote.type == type
value.check(d.remote)
for d in snd_deliveries:
d.settle()
self.pump()
for d in rcv_deliveries:
assert d.settled, d.settled
d.settle()
def testReceived(self):
self.testDisposition(type=Disposition.RECEIVED, value=ReceivedValue(1, 2))
def testRejected(self):
self.testDisposition(type=Disposition.REJECTED, value=RejectValue(Condition(symbol("foo"))))
def testReleased(self):
self.testDisposition(type=Disposition.RELEASED)
def testModified(self):
self.testDisposition(type=Disposition.MODIFIED,
value=ModifiedValue(failed=True, undeliverable=True,
annotations={"key": "value"}))
def testCustom(self):
self.testDisposition(type=0x12345, value=CustomValue([1, 2, 3]))
class CollectorTest(Test):
def setup(self):
self.collector = Collector()
def drain(self):
result = []
while True:
e = self.collector.peek()
if e:
result.append(e)
self.collector.pop()
else:
break
return result
def expect(self, *types):
return self.expect_oneof(types)
def expect_oneof(self, *sequences):
events = self.drain()
types = tuple([e.type for e in events])
for alternative in sequences:
if types == alternative:
if len(events) == 1:
return events[0]
elif len(events) > 1:
return events
else:
return
assert False, "actual events %s did not match any of the expected sequences: %s" % (events, sequences)
def expect_until(self, *types):
events = self.drain()
etypes = tuple([e.type for e in events[-len(types):]])
assert etypes == types, "actual events %s did not end in expect sequence: %s" % (events, types)
class EventTest(CollectorTest):
def teardown(self):
self.cleanup()
def testEndpointEvents(self):
c1, c2 = self.connection()
c1.collect(self.collector)
self.expect(Event.CONNECTION_INIT)
self.pump()
self.expect()
c2.open()
self.pump()
self.expect(Event.CONNECTION_REMOTE_OPEN)
self.pump()
self.expect()
ssn = c2.session()
snd = ssn.sender("sender")
ssn.open()
snd.open()
self.expect()
self.pump()
self.expect(Event.SESSION_INIT, Event.SESSION_REMOTE_OPEN,
Event.LINK_INIT, Event.LINK_REMOTE_OPEN)
c1.open()
ssn2 = c1.session()
ssn2.open()
rcv = ssn2.receiver("receiver")
rcv.open()
self.pump()
self.expect(Event.CONNECTION_LOCAL_OPEN, Event.TRANSPORT,
Event.SESSION_INIT, Event.SESSION_LOCAL_OPEN,
Event.TRANSPORT, Event.LINK_INIT, Event.LINK_LOCAL_OPEN,
Event.TRANSPORT)
rcv.close()
self.expect(Event.LINK_LOCAL_CLOSE, Event.TRANSPORT)
self.pump()
rcv.free()
del rcv
self.expect(Event.LINK_FINAL)
ssn2.free()
del ssn2
self.pump()
c1.free()
c1.transport.unbind()
self.expect_oneof((Event.SESSION_FINAL, Event.LINK_FINAL, Event.SESSION_FINAL,
Event.CONNECTION_UNBOUND, Event.CONNECTION_FINAL),
(Event.CONNECTION_UNBOUND, Event.SESSION_FINAL, Event.LINK_FINAL,
Event.SESSION_FINAL, Event.CONNECTION_FINAL))
def testConnectionINIT_FINAL(self):
c = Connection()
c.collect(self.collector)
self.expect(Event.CONNECTION_INIT)
c.free()
self.expect(Event.CONNECTION_FINAL)
def testSessionINIT_FINAL(self):
c = Connection()
c.collect(self.collector)
self.expect(Event.CONNECTION_INIT)
s = c.session()
self.expect(Event.SESSION_INIT)
s.free()
self.expect(Event.SESSION_FINAL)
c.free()
self.expect(Event.CONNECTION_FINAL)
def testLinkINIT_FINAL(self):
c = Connection()
c.collect(self.collector)
self.expect(Event.CONNECTION_INIT)
s = c.session()
self.expect(Event.SESSION_INIT)
r = s.receiver("asdf")
self.expect(Event.LINK_INIT)
r.free()
self.expect(Event.LINK_FINAL)
c.free()
self.expect(Event.SESSION_FINAL, Event.CONNECTION_FINAL)
def testFlowEvents(self):
snd, rcv = self.link("test-link")
snd.session.connection.collect(self.collector)
rcv.open()
rcv.flow(10)
self.pump()
self.expect(Event.CONNECTION_INIT, Event.SESSION_INIT,
Event.LINK_INIT, Event.LINK_REMOTE_OPEN, Event.LINK_FLOW)
rcv.flow(10)
self.pump()
self.expect(Event.LINK_FLOW)
return snd, rcv
def testDeliveryEvents(self):
snd, rcv = self.link("test-link")
rcv.session.connection.collect(self.collector)
rcv.open()
rcv.flow(10)
self.pump()
self.expect(Event.CONNECTION_INIT, Event.SESSION_INIT,
Event.LINK_INIT, Event.LINK_LOCAL_OPEN, Event.TRANSPORT)
snd.delivery("delivery")
snd.send(str2bin("Hello World!"))
snd.advance()
self.pump()
self.expect()
snd.open()
self.pump()
self.expect(Event.LINK_REMOTE_OPEN, Event.DELIVERY)
rcv.session.connection.transport.unbind()
rcv.session.connection.free()
self.expect(Event.CONNECTION_UNBOUND, Event.TRANSPORT, Event.LINK_FINAL,
Event.SESSION_FINAL, Event.CONNECTION_FINAL)
def testDeliveryEventsDisp(self):
snd, rcv = self.testFlowEvents()
snd.open()
dlv = snd.delivery("delivery")
snd.send(str2bin("Hello World!"))
assert snd.advance()
self.expect(Event.LINK_LOCAL_OPEN, Event.TRANSPORT)
self.pump()
self.expect(Event.LINK_FLOW)
rdlv = rcv.current
assert rdlv != None
assert rdlv.tag == "delivery"
rdlv.update(Delivery.ACCEPTED)
self.pump()
event = self.expect(Event.DELIVERY)
assert event.context == dlv, (dlv, event.context)
def testConnectionBOUND_UNBOUND(self):
c = Connection()
c.collect(self.collector)
self.expect(Event.CONNECTION_INIT)
t = Transport()
t.bind(c)
self.expect(Event.CONNECTION_BOUND)
t.unbind()
self.expect(Event.CONNECTION_UNBOUND, Event.TRANSPORT)
def testTransportERROR_CLOSE(self):
c = Connection()
c.collect(self.collector)
self.expect(Event.CONNECTION_INIT)
t = Transport()
t.bind(c)
self.expect(Event.CONNECTION_BOUND)
assert t.condition is None
t.push(str2bin("asdf"))
self.expect(Event.TRANSPORT_ERROR, Event.TRANSPORT_TAIL_CLOSED)
assert t.condition is not None
assert t.condition.name == "amqp:connection:framing-error"
assert "AMQP header mismatch" in t.condition.description
p = t.pending()
assert p > 0
t.pop(p)
self.expect(Event.TRANSPORT_HEAD_CLOSED, Event.TRANSPORT_CLOSED)
def testTransportCLOSED(self):
c = Connection()
c.collect(self.collector)
self.expect(Event.CONNECTION_INIT)
t = Transport()
t.bind(c)
c.open()
self.expect(Event.CONNECTION_BOUND, Event.CONNECTION_LOCAL_OPEN, Event.TRANSPORT)
c2 = Connection()
t2 = Transport()
t2.bind(c2)
c2.open()
c2.close()
pump(t, t2)
self.expect(Event.CONNECTION_REMOTE_OPEN, Event.CONNECTION_REMOTE_CLOSE,
Event.TRANSPORT_TAIL_CLOSED)
c.close()
pump(t, t2)
self.expect(Event.CONNECTION_LOCAL_CLOSE, Event.TRANSPORT,
Event.TRANSPORT_HEAD_CLOSED, Event.TRANSPORT_CLOSED)
def testLinkDetach(self):
c1 = Connection()
c1.collect(self.collector)
t1 = Transport()
t1.bind(c1)
c1.open()
s1 = c1.session()
s1.open()
l1 = s1.sender("asdf")
l1.open()
l1.detach()
self.expect_until(Event.LINK_LOCAL_DETACH, Event.TRANSPORT)
c2 = Connection()
c2.collect(self.collector)
t2 = Transport()
t2.bind(c2)
pump(t1, t2)
self.expect_until(Event.LINK_REMOTE_DETACH)
class PeerTest(CollectorTest):
def setup(self):
CollectorTest.setup(self)
self.connection = Connection()
self.connection.collect(self.collector)
self.transport = Transport()
self.transport.bind(self.connection)
self.peer = Connection()
self.peer_transport = Transport()
self.peer_transport.bind(self.peer)
self.peer_transport.trace(Transport.TRACE_OFF)
def pump(self):
pump(self.transport, self.peer_transport)
class TeardownLeakTest(PeerTest):
def doLeak(self, local, remote):
self.connection.open()
self.expect(Event.CONNECTION_INIT, Event.CONNECTION_BOUND,
Event.CONNECTION_LOCAL_OPEN, Event.TRANSPORT)
ssn = self.connection.session()
ssn.open()
self.expect(Event.SESSION_INIT, Event.SESSION_LOCAL_OPEN, Event.TRANSPORT)
snd = ssn.sender("sender")
snd.open()
self.expect(Event.LINK_INIT, Event.LINK_LOCAL_OPEN, Event.TRANSPORT)
self.pump()
self.peer.open()
self.peer.session_head(0).open()
self.peer.link_head(0).open()
self.pump()
self.expect_oneof((Event.CONNECTION_REMOTE_OPEN, Event.SESSION_REMOTE_OPEN,
Event.LINK_REMOTE_OPEN, Event.LINK_FLOW),
(Event.CONNECTION_REMOTE_OPEN, Event.SESSION_REMOTE_OPEN,
Event.LINK_REMOTE_OPEN))
if local:
snd.close() # ha!!
self.expect(Event.LINK_LOCAL_CLOSE, Event.TRANSPORT)
ssn.close()
self.expect(Event.SESSION_LOCAL_CLOSE, Event.TRANSPORT)
self.connection.close()
self.expect(Event.CONNECTION_LOCAL_CLOSE, Event.TRANSPORT)
if remote:
self.peer.link_head(0).close() # ha!!
self.peer.session_head(0).close()
self.peer.close()
self.pump()
if remote:
self.expect(Event.TRANSPORT_HEAD_CLOSED, Event.LINK_REMOTE_CLOSE,
Event.SESSION_REMOTE_CLOSE, Event.CONNECTION_REMOTE_CLOSE,
Event.TRANSPORT_TAIL_CLOSED, Event.TRANSPORT_CLOSED)
else:
self.expect(Event.TRANSPORT_HEAD_CLOSED, Event.SESSION_REMOTE_CLOSE,
Event.CONNECTION_REMOTE_CLOSE, Event.TRANSPORT_TAIL_CLOSED,
Event.TRANSPORT_CLOSED)
self.connection.free()
self.expect(Event.LINK_FINAL, Event.SESSION_FINAL)
self.transport.unbind()
self.expect(Event.CONNECTION_UNBOUND, Event.CONNECTION_FINAL)
def testLocalRemoteLeak(self):
self.doLeak(True, True)
def testLocalLeak(self):
self.doLeak(True, False)
def testRemoteLeak(self):
self.doLeak(False, True)
def testLeak(self):
self.doLeak(False, False)
class IdleTimeoutEventTest(PeerTest):
def half_pump(self):
p = self.transport.pending()
if p>0:
self.transport.pop(p)
def testTimeoutWithZombieServer(self, expectOpenCloseFrames=True):
self.transport.idle_timeout = self.delay
self.connection.open()
self.half_pump()
self.transport.tick(time())
sleep(self.delay*2)
self.transport.tick(time())
self.expect(Event.CONNECTION_INIT, Event.CONNECTION_BOUND,
Event.CONNECTION_LOCAL_OPEN, Event.TRANSPORT,
Event.TRANSPORT_ERROR, Event.TRANSPORT_TAIL_CLOSED)
assert self.transport.capacity() < 0
if expectOpenCloseFrames:
assert self.transport.pending() > 0
self.half_pump()
self.expect(Event.TRANSPORT_HEAD_CLOSED, Event.TRANSPORT_CLOSED)
assert self.transport.pending() < 0
def testTimeoutWithZombieServerAndSASL(self):
sasl = self.transport.sasl()
self.testTimeoutWithZombieServer(expectOpenCloseFrames=False)
class DeliverySegFaultTest(Test):
def testDeliveryAfterUnbind(self):
conn = Connection()
t = Transport()
ssn = conn.session()
snd = ssn.sender("sender")
dlv = snd.delivery("tag")
dlv.settle()
del dlv
t.bind(conn)
t.unbind()
dlv = snd.delivery("tag")
class SaslEventTest(CollectorTest):
def testAnonymousNoInitialResponse(self):
if "java" in sys.platform:
raise Skipped()
conn = Connection()
conn.collect(self.collector)
transport = Transport(Transport.SERVER)
transport.bind(conn)
self.expect(Event.CONNECTION_INIT, Event.CONNECTION_BOUND)
transport.push(str2bin('AMQP\x03\x01\x00\x00\x00\x00\x00 \x02\x01\x00\x00\x00SA'
'\xd0\x00\x00\x00\x10\x00\x00\x00\x02\xa3\tANONYMOUS@'
'AMQP\x00\x01\x00\x00'))
self.expect(Event.TRANSPORT)
for i in range(1024):
p = transport.pending()
self.drain()
p = transport.pending()
self.expect()
def testPipelinedServerReadFirst(self):
if "java" in sys.platform:
raise Skipped()
conn = Connection()
conn.collect(self.collector)
transport = Transport(Transport.CLIENT)
s = transport.sasl()
s.allowed_mechs("ANONYMOUS PLAIN")
transport.bind(conn)
self.expect(Event.CONNECTION_INIT, Event.CONNECTION_BOUND)
transport.push(str2bin('AMQP\x03\x01\x00\x00\x00\x00\x00\x1c\x02\x01\x00\x00\x00S@'
'\xc0\x0f\x01\xe0\x0c\x01\xa3\tANONYMOUS\x00\x00\x00\x10'
'\x02\x01\x00\x00\x00SD\xc0\x03\x01P\x00AMQP\x00\x01\x00'
'\x00'))
self.expect(Event.TRANSPORT)
p = transport.pending()
bytes = transport.peek(p)
transport.pop(p)
server = Transport(Transport.SERVER)
server.push(bytes)
assert server.sasl().outcome == SASL.OK
def testPipelinedServerWriteFirst(self):
if "java" in sys.platform:
raise Skipped()
conn = Connection()
conn.collect(self.collector)
transport = Transport(Transport.CLIENT)
s = transport.sasl()
s.allowed_mechs("ANONYMOUS")
transport.bind(conn)
p = transport.pending()
bytes = transport.peek(p)
transport.pop(p)
self.expect(Event.CONNECTION_INIT, Event.CONNECTION_BOUND, Event.TRANSPORT)
transport.push(str2bin('AMQP\x03\x01\x00\x00\x00\x00\x00\x1c\x02\x01\x00\x00\x00S@'
'\xc0\x0f\x01\xe0\x0c\x01\xa3\tANONYMOUS\x00\x00\x00\x10'
'\x02\x01\x00\x00\x00SD\xc0\x03\x01P\x00AMQP\x00\x01\x00'
'\x00'))
self.expect(Event.TRANSPORT)
p = transport.pending()
bytes = transport.peek(p)
transport.pop(p)
# XXX: the bytes above appear to be correct, but we don't get any
# sort of event indicating that the transport is authenticated
|
clemensv/qpid-proton
|
tests/python/proton_tests/engine.py
|
Python
|
apache-2.0
| 75,989 | 0.007054 |
from slpp import SLPP
import re
class diObj(object):
def __init__(self,type,name,luaData):
self.objectType=type
self.objectName=name
self.objectKey=type+name
self.DictLuaData=luaData
self.DictTotal={}
self.setDict()
def setDict(self):
self.DictTotal["Name"]=self.objectName
self.DictTotal["Type"]=self.objectType
self.DictTotal["luaData"]=self.DictLuaData
def __str__(self):
return str(self.DictTotal)
def __eq__(self,objectTest):
return (objectTest.objectType==self.objectType and objectTest.ObjectName==self.objectName)
class diObjFile(SLPP):
def __init__(self,filename):
SLPP.__init__(self)
self.filename=filename;
self.diObjects={}
self.version=""
self.setString();
self.readVersion();
self.decode();
def setString(self):
f = open(self.filename,'r')
try:
text=f.read()
##todo remove all comments correctly
#for each character
#if =" then ignore until next " without a preceding backslash
#if / with preceding / then blank out everything until \n
#currently only removes comments that start at the beginning of a line
reg = re.compile('^//.*$', re.M)
text = reg.sub('', text, 0)
self.text = text
self.len = len(self.text)
finally:
f.close()
def readVersion(self):
self.white()
wordFound = self.word()
if wordFound=="version":
self.next_chr();
self.version = self.value();
#print "version: ", self.version;
self.next_chr();#skip semicolon
self.next_chr();
#decode
def decode(self):
self.white()
while self.ch:
wordFound = self.word()
if wordFound=="object":
self.next_chr();
objectType= self.value()
self.next_chr()
objectName=self.value()
self.next_chr()
luaData = self.value()
self.next_chr()
self.next_chr()
tmpdicObj=diObj(objectType,objectName,luaData)
self.diObjects[tmpdicObj.objectKey]=tmpdicObj.DictTotal
self.white()
__all__ = ['diObjFile']
|
guzzijones/parseDI
|
parseDI.py
|
Python
|
gpl-2.0
| 1,926 | 0.065421 |
import collections
from supriya import CalculationRate
from supriya.ugens.PureUGen import PureUGen
class LFTri(PureUGen):
"""
A non-band-limited triangle oscillator unit generator.
::
>>> supriya.ugens.LFTri.ar()
LFTri.ar()
"""
### CLASS VARIABLES ###
__documentation_section__ = "Oscillator UGens"
_ordered_input_names = collections.OrderedDict(
[("frequency", 440.0), ("initial_phase", 0.0)]
)
_valid_calculation_rates = (CalculationRate.AUDIO, CalculationRate.CONTROL)
|
Pulgama/supriya
|
supriya/ugens/LFTri.py
|
Python
|
mit
| 542 | 0.001845 |
from parsers.paragraphs import Paragraphs, Sentences
from parsers.blocks import Blocks, Words
class Saw:
paragraphs = Paragraphs
sentences = Sentences
blocks = Blocks
words = Words
@staticmethod
def load(text):
return Paragraphs.load(text)
|
diNard/Saw
|
saw/saw.py
|
Python
|
mit
| 274 | 0.00365 |
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Getting Things GNOME! - a personal organizer for the GNOME desktop
# Copyright (c) 2008-2013 - Lionel Dricot & Bertrand Rousseau
#
# This program is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program. If not, see <http://www.gnu.org/licenses/>.
# -----------------------------------------------------------------------------
"""
GTG's core functionality.
In order to not interact directly with the datastore, we provide
"requesters". The requester is only an interface and there can be as
many requesters as you want as long as they are all from the same
datastore. Requester also provides an interface for the tagstore.
If you want to display only a subset of tasks, you can either:
- have access to the main FilteredTree (the one displayed in the main
window) and apply filters on it. (You can create your own)
- get your own personal FilteredTree and apply on it the filters you
want without interfering with the main view. (This is how the closed
tasks pane is built currently)
"""
#=== IMPORT ===================================================================
from re import findall
import ConfigParser
from xdg.BaseDirectory import xdg_data_home, xdg_config_home, xdg_data_dirs
import os
from GTG.tools.borg import Borg
from GTG.tools.testingmode import TestingMode
import GTG
DEFAULTS = {
'browser': {
"bg_color_enable": True,
"contents_preview_enable": False,
'tag_pane': False,
"sidebar_width": 120,
"closed_task_pane": False,
'bottom_pane_position': 300,
'toolbar': True,
'quick_add': True,
'collapsed_tasks': [],
'expanded_tags': [],
'view': 'default',
"opened_tasks": [],
'width': 400,
'height': 400,
'max': False,
'x_pos': 10,
'y_pos': 10,
'tasklist_sort_column': 5,
'tasklist_sort_order': 1,
"font_name": "",
},
'tag_editor': {
"custom_colors": [],
},
'plugins': {
"enabled": [],
"disabled": [],
}
}
# Instead of accessing directly the ConfigParser, each module will have
# one SubConfig object. (one SubConfig object always match one first level
# element of the ConfigParser directory)
#
# The goal of the SubConfig object is to handle default value and converting
# String to Bool and Int when needed.
#
# Each GTG component using config should be ported to SubConfig and, for each
# setting, a default value should be written in the DEFAULTS above.
#
# Currently done : browser
# Todo : editor, plugins
class SubConfig():
def __init__(self, section, conf, conf_path):
self._section = section
self._conf = conf
self._conf_path = conf_path
# This return the value of the setting (or the default one)
#
# If a default value exists and is a Int or a Bool, the returned
# value is converted to that type.
def get(self, option):
if self._conf.has_option(self._section, option):
toreturn = self._conf.get(self._section, option)
# Converting to the good type
if option in DEFAULTS[self._section]:
ntype = type(DEFAULTS[self._section][option])
if ntype == int:
toreturn = int(toreturn)
elif ntype == list:
# All list config should be saved in ','.join(list) pattern
# This is just for backward compatibility
if toreturn and toreturn[0] == '[' and toreturn[-1] == ']':
toreturn = toreturn[1:-1]
# Splitting by ',' caused bugs #1218093 and #1216807.
# Parsing the below way
# does not split "('string1', 'string2', ... )" further
toreturn_backup_str = toreturn
toreturn = findall(r'\(.*?\)', toreturn)
if not toreturn:
toreturn = toreturn_backup_str.split(',')
while toreturn and toreturn[-1] == '':
toreturn = toreturn[:-1]
elif ntype == bool and type(toreturn) == str:
toreturn = toreturn.lower() == "true"
elif option in DEFAULTS[self._section]:
toreturn = DEFAULTS[self._section][option]
self.set(option, toreturn)
else:
print "Warning : no default conf value for %s in %s" % (
option, self._section)
toreturn = None
return toreturn
def clear(self):
for option in self._conf.options(self._section):
self._conf.remove_option(self._section, option)
def save(self):
self._conf.write(open(self._conf_path, 'w'))
def set(self, option, value):
if type(value) == list:
value = ','.join(value)
self._conf.set(self._section, option, str(value))
# Save immediately
self.save()
class TaskConfig():
""" TaskConfig is used to save the position and size of each task, both of
value are one tuple with two numbers, so set and get will use join and
split"""
def __init__(self, conf, conf_path):
self._conf = conf
self._conf_path = conf_path
def has_section(self, section):
return self._conf.has_section(section)
def has_option(self, section, option):
return self._conf.has_option(section, option)
def add_section(self, section):
self._conf.add_section(section)
def get(self, tid, option):
value = self._conf.get(tid, option)
# Check single quote for backward compatibility
if value[0] == '(' and value[-1] == ')':
value = value[1:-1]
# Remove all whitespaces, tabs, newlines and then split by ','
value_without_spaces = ''.join(value.split())
return value_without_spaces.split(',')
def set(self, tid, option, value):
value = ','.join(str(x) for x in value)
self._conf.set(tid, option, value)
self.save()
def save(self):
self._conf.write(open(self._conf_path, 'w'))
class CoreConfig(Borg):
# The projects and tasks are of course DATA !
# We then use XDG_DATA for them
# Don't forget the "/" at the end.
DATA_FILE = "projects.xml"
CONF_FILE = "gtg.conf"
TASK_CONF_FILE = "tasks.conf"
conf_dict = None
# DBus
BUSNAME = "org.gnome.GTG"
BUSINTERFACE = "/org/gnome/GTG"
# TAGS
ALLTASKS_TAG = "gtg-tags-all"
NOTAG_TAG = "gtg-tags-none"
SEP_TAG = "gtg-tags-sep"
SEARCH_TAG = "search"
def check_config_file(self, path):
""" This function bypasses the errors of config file and allows GTG
to open smoothly"""
config = ConfigParser.ConfigParser()
try:
config.read(path)
except ConfigParser.Error:
open(path, "w").close()
return config
def __init__(self):
if hasattr(self, 'data_dir'):
# Borg has already been initialized
return
if TestingMode().get_testing_mode():
# we avoid running tests in the user data dir
self.data_dir = '/tmp/GTG_TESTS/data'
self.conf_dir = '/tmp/GTG_TESTS/conf'
else:
self.data_dir = os.path.join(xdg_data_home, 'gtg/')
self.conf_dir = os.path.join(xdg_config_home, 'gtg/')
if not os.path.exists(self.conf_dir):
os.makedirs(self.conf_dir)
if not os.path.exists(self.data_dir):
os.makedirs(self.data_dir)
self.conf_path = os.path.join(self.conf_dir, self.CONF_FILE)
self.task_conf_path = os.path.join(self.conf_dir, self.TASK_CONF_FILE)
if not os.path.exists(self.conf_path):
open(self.conf_path, "w").close()
if not os.path.exists(self.task_conf_path):
open(self.task_conf_path, "w").close()
for conf_file in [self.conf_path, self.task_conf_path]:
if not os.access(conf_file, os.R_OK | os.W_OK):
raise Exception("File " + file +
" is a configuration file for gtg, but it "
"cannot be read or written. Please check it")
self._conf = self.check_config_file(self.conf_path)
self._task_conf = self.check_config_file(self.task_conf_path)
def save(self):
''' Saves the configuration of CoreConfig '''
self._conf.write(open(self.conf_path, 'w'))
self._task_conf.write(open(self.task_conf_path, 'w'))
def get_subconfig(self, section):
if not self._conf.has_section(section):
self._conf.add_section(section)
return SubConfig(section, self._conf, self.conf_path)
def get_taskconfig(self):
return TaskConfig(self._task_conf, self.task_conf_path)
def get_icons_directories(self):
""" Returns the directories containing the icons """
icons_dirs = [os.path.join(dir, 'gtg/icons') for dir in xdg_data_dirs]
icons_dirs.append(os.path.join(GTG.DATA_DIR, "icons"))
icons_dirs.append(GTG.DATA_DIR)
return icons_dirs
def get_data_dir(self):
return self.data_dir
def set_data_dir(self, path):
self.data_dir = path
def get_conf_dir(self):
return self.conf_dir
def set_conf_dir(self, path):
self.conf_dir = path
self.conf_path = os.path.join(self.conf_dir, self.CONF_FILE)
self.task_conf_path = os.path.join(self.conf_dir, self.TASK_CONF_FILE)
|
elianerpereira/gtg
|
GTG/core/__init__.py
|
Python
|
gpl-3.0
| 10,171 | 0.000098 |
__author__ = 'sweemeng'
from popit.models import Person
from popit.models import Contact
from popit.models import ContactDetail
from popit.models import Link
from popit.models import Identifier
from popit.models import OtherName
from hvad.contrib.restframework import TranslatableModelSerializer
from rest_framework.serializers import CharField
from popit.serializers.exceptions import ContentObjectNotAvailable
from popit.models import Area
from rest_framework.serializers import ValidationError
from popit.serializers.base import BasePopitSerializer
import re
class LinkSerializer(TranslatableModelSerializer):
id = CharField(max_length=255, required=False, allow_blank=True, allow_null=True)
def create(self, validated_data):
language = self.language
# Really where there language code come from!
validated_data.pop("language_code", None)
if not "content_object" in validated_data:
raise ContentObjectNotAvailable("Please save parent object by callsing serializer.save(content_object=ParentObject)")
link = Link.objects.language(language).create(**validated_data)
return link
def update(self, instance, data):
available_languages = instance.get_available_languages()
if not self.language in available_languages:
instance = instance.translate(self.language)
instance.label = data.get("label", instance.label)
instance.field = data.get("field", instance.field)
instance.url = data.get("url", instance.url)
instance.note = data.get("note", instance.note)
instance.save()
return instance
class Meta:
model = Link
exclude = ('object_id', 'content_type')
extra_kwargs = {'id': {'read_only': False, 'required': False}}
class ContactDetailSerializer(BasePopitSerializer):
id = CharField(max_length=255, required=False, allow_blank=True, allow_null=True)
links = LinkSerializer(many=True, required=False)
def create(self, validated_data):
links = validated_data.pop("links", [])
# Really where there language code come from!
validated_data.pop("language_code", None)
language = self.language
# content_object must be pass into save parameter
if not "content_object" in validated_data:
raise ContentObjectNotAvailable("Please save parent object by callsing serializer.save(content_object=ParentObject)")
contact = ContactDetail.objects.language(language).create(
**validated_data
)
for link in links:
self.create_links(link, contact)
return contact
def update(self, instance, data):
available_languages = instance.get_available_languages()
if not self.language in available_languages:
instance = instance.translate(self.language)
links = data.pop("links", [])
instance.label = data.get("label", instance.label)
instance.note = data.get('note', instance.note)
instance.type = data.get('type', instance.type)
instance.value = data.get('value', instance.value)
instance.valid_from = data.get('valid_from', instance.valid_from)
instance.valid_until = data.get('valid_until', instance.valid_until)
instance.save()
for link in links:
self.update_links(link, instance)
return instance
def to_representation(self, instance):
data = super(ContactDetailSerializer, self).to_representation(instance)
links_instance = instance.links.untranslated().all()
links_serializer = LinkSerializer(instance=links_instance, many=True, language=instance.language_code)
data["links"] = links_serializer.data
return data
def validate_valid_from(self, value):
if not re.match(r"^[0-9]{4}(-[0-9]{2}){0,2}$", value):
raise ValidationError("value need to be in ^[0-9]{4}(-[0-9]{2}){0,2}$ format")
return value
def validate_valid_to(self, value):
if not re.match(r"^[0-9]{4}(-[0-9]{2}){0,2}$", value):
raise ValidationError("value need to be in ^[0-9]{4}(-[0-9]{2}){0,2}$ format")
return value
class Meta:
model = ContactDetail
exclude = ('object_id', 'content_type')
extra_kwargs = {'id': {'read_only': False, 'required': False}}
class IdentifierSerializer(BasePopitSerializer):
id = CharField(max_length=255, required=False, allow_blank=True, allow_null=True)
links = LinkSerializer(many=True, required=False)
def create(self, validated_data):
links = validated_data.pop("links", [])
# Really where there language code come from!
validated_data.pop("language_code", None)
language = self.language
if not "content_object" in validated_data:
raise ContentObjectNotAvailable("Please save parent object by calling serializer.save(content_object=ParentObject)")
identifier = Identifier.objects.language(language).create(**validated_data)
for link in links:
self.create_links(link, identifier)
return identifier
def update(self, instance, data):
available_languages = instance.get_available_languages()
if not self.language in available_languages:
instance = instance.translate(self.language)
links = data.pop('links', [])
instance.scheme = data.get('scheme', instance.scheme)
instance.identifier = data.get('identifier', instance.identifier)
instance.save()
for link in links:
self.update_links(link, instance)
return instance
def to_representation(self, instance):
data = super(IdentifierSerializer, self).to_representation(instance)
links_instance = instance.links.untranslated().all()
links_serializer = LinkSerializer(instance=links_instance, many=True, language=instance.language_code)
data["links"] = links_serializer.data
return data
class Meta:
model = Identifier
exclude = ('object_id', 'content_type')
extra_kwargs = {'id': {'read_only': False, 'required': False}}
class OtherNameSerializer(BasePopitSerializer):
id = CharField(max_length=255, required=False, allow_blank=True, allow_null=True)
links = LinkSerializer(many=True, required=False)
start_date = CharField(allow_null=True, default=None, allow_blank=True)
end_date = CharField(allow_null=True, default=None, allow_blank=True)
def create(self, validated_data):
links = validated_data.pop('links', [])
# Really where there language code come from!
validated_data.pop("language_code", None)
language = self.language
if not "content_object" in validated_data:
raise ContentObjectNotAvailable("Please save parent object by calling serializer.save(content_object=ParentObject)")
if not validated_data.get("start_date"):
validated_data["start_date"] = None
if not validated_data.get("end_date"):
validated_data["end_date"] = None
othername = OtherName.objects.language(language).create(**validated_data)
for link in links:
self.create_links(link, othername)
return othername
def update(self, instance, data):
available_languages = instance.get_available_languages()
if not self.language in available_languages:
instance = instance.translate(self.language)
links = data.pop('links', [])
instance.name = data.get('name', instance.name)
instance.family_name = data.get('family_name', instance.family_name)
instance.given_name = data.get('given_name', instance.given_name)
instance.additional_name = data.get('additional_name', instance.additional_name)
instance.honorific_suffix = data.get('honorific_suffix', instance.honorific_suffix)
instance.honorific_prefix = data.get('honorific_prefix', instance.honorific_prefix)
instance.patronymic_name = data.get('patronymic_name', instance.patronymic_name)
instance.start_date = data.get('start_date', instance.start_date)
if not instance.start_date:
instance.start_date = None
instance.end_date = data.get('end_date', instance.end_date)
if not instance.end_date:
instance.end_date = None
instance.note = data.get('note', instance.note)
instance.save()
for link in links:
self.update_links(link, instance)
return instance
def to_representation(self, instance):
data = super(OtherNameSerializer, self).to_representation(instance)
links_instance = instance.links.untranslated().all()
links_serializer = LinkSerializer(instance=links_instance, many=True, language=instance.language_code)
data["links"] = links_serializer.data
return data
def validate_start_date(self, value):
if not value:
return value
if not re.match(r"^[0-9]{4}(-[0-9]{2}){0,2}$", value):
raise ValidationError("value need to be in ^[0-9]{4}(-[0-9]{2}){0,2}$ format")
return value
def validate_end_date(self, value):
if not value:
return value
if not re.match(r"^[0-9]{4}(-[0-9]{2}){0,2}$", value):
raise ValidationError("value need to be in ^[0-9]{4}(-[0-9]{2}){0,2}$ format")
return value
class Meta:
model = OtherName
exclude = ('object_id', 'content_type')
extra_kwargs = {'id': {'read_only': False, 'required': False}}
class AreaFlatSerializer(BasePopitSerializer):
id = CharField(max_length=255, required=False, allow_blank=True, allow_null=True)
links = LinkSerializer(many=True, required=False)
class Meta:
model = Area
extra_kwargs = {'id': {'read_only': False, 'required': False}}
class AreaSerializer(BasePopitSerializer):
id = CharField(max_length=255, required=False, allow_blank=True, allow_null=True)
parent = AreaFlatSerializer(required=False)
parent_id = CharField(allow_null=True, default=None, allow_blank=True)
children = AreaFlatSerializer(required=False, many=True)
links = LinkSerializer(many=True, required=False)
# Why create and update? Because we need to create an API endpoint to import data from mapit
def create(self, validated_data):
language = self.language
validated_data.pop("language_code", None)
parent_data = validated_data.pop("parent", {})
links = validated_data.pop("links", [])
validated_data.pop("children", None)
if parent_data:
if not "id" in parent_data:
parent = self.create(parent_data)
else:
parent = self.update_area(parent_data)
validated_data["parent"] = parent
area = Area.objects.language(language).create(**validated_data)
for link in links:
self.create_links(link, area)
return area
def update(self, instance, data):
available_languages = instance.get_available_languages()
if not self.language in available_languages:
instance = instance.translate(self.language)
links = data.pop("links", [])
if "parent_id" in data:
if data.get("parent_id"):
parent_area = Area.objects.untranslated().get(id=data.get("parent_id"))
instance.parent = parent_area
else:
instance.parent = None
data.pop("language", None)
instance.name = data.get("name", instance.name)
instance.identifier = data.get("identifier", instance.identifier)
instance.classification = data.get("classification", instance.classification)
instance.save()
for link in links:
self.update_links(link, instance)
return instance
def update_area(self, data):
# Raise excception if no id in field, it should be there
area_id = data.pop("id")
parent_data = data.pop("parent", None)
links = data.pop('links', [])
area = Area.objects.language(self.language).get(id=area_id)
area.name = data.get('name', area.name)
area.identifier = data.get('identifier', area.identifier)
area.classification = data.get('classficiation', area.classification)
if parent_data:
if "id" in parent_data:
parent = self.update_area(parent_data)
else:
parent = self.create(parent_data)
area.parent = parent
area.save()
for link in links:
self.update_links(link, area)
return area
def to_representation(self, instance):
data = super(AreaSerializer, self).to_representation(instance)
links_instance = instance.links.untranslated().all()
links_serializer = LinkSerializer(instance=links_instance, many=True, language=instance.language_code)
data["links"] = links_serializer.data
return data
class Meta:
model = Area
extra_kwargs = {'id': {'read_only': False, 'required': False}}
|
Sinar/popit_ng
|
popit/serializers/misc.py
|
Python
|
agpl-3.0
| 13,241 | 0.00287 |
from StringIO import StringIO
import unittest
import lob
# Setting the API key
lob.api_key = 'test_0dc8d51e0acffcb1880e0f19c79b2f5b0cc'
class ObjectFunctions(unittest.TestCase):
def setUp(self):
lob.api_key = 'test_0dc8d51e0acffcb1880e0f19c79b2f5b0cc'
self.obj = lob.Object.list(count=1).data[0]
def test_list_objects(self):
objects = lob.Object.list()
self.assertTrue(isinstance(objects.data[0], lob.Object))
self.assertEqual(objects.object, 'list')
def test_list_objects_limit(self):
objects = lob.Object.list(count=2)
self.assertTrue(isinstance(objects.data[0], lob.Object))
self.assertEqual(len(objects.data), 2)
def test_list_objects_fail(self):
self.assertRaises(lob.error.InvalidRequestError, lob.Object.list, count=1000)
def test_create_object_remote(self):
object = lob.Object.create(
name = 'Test Object',
file = 'https://s3-us-west-2.amazonaws.com/lob-assets/test.pdf',
setting = 201
)
self.assertTrue(isinstance(object, lob.Object))
self.assertEqual(object.name, 'Test Object')
def test_create_object_stringio(self):
object = lob.Object.create(
name = 'Test Object StringIO',
file = StringIO(open('tests/pc.pdf', 'rb').read()),
setting = 201
)
self.assertTrue(isinstance(object, lob.Object))
self.assertEqual(object.name, 'Test Object StringIO')
def test_create_object_local(self):
object = lob.Object.create(
name = 'Test Object Inline',
file = open('tests/pc.pdf', 'rb'),
setting = 201
)
self.assertTrue(isinstance(object, lob.Object))
self.assertEqual(object.name, 'Test Object Inline')
self.assertRaises(AttributeError, lambda: object.nonexistent_key)
object.name = "something new"
self.assertEqual(object.name, "something new")
def test_create_directly_specify_files(self):
object = lob.Object.create(
name = 'Test Object Direct Specify',
files = {'file': open('tests/pc.pdf', 'rb').read()},
setting = 201
)
self.assertTrue(isinstance(object, lob.Object))
self.assertEqual(object.name, 'Test Object Direct Specify')
def test_create_object_fail(self):
self.assertRaises(lob.error.InvalidRequestError, lob.Object.create)
def test_retrieve_job(self):
job = lob.Object.retrieve(id=lob.Object.list().data[0].id)
self.assertTrue(isinstance(job, lob.Object))
def test_retrieve_job_fail(self):
self.assertRaises(lob.error.InvalidRequestError, lob.Object.retrieve, id='test')
|
ami/lob-python
|
tests/test_object.py
|
Python
|
mit
| 2,744 | 0.00984 |
import os
from os import walk
templatePath = r'templates/serviceTemplate.txt'
writePath = r'/Source/Api/service-hmlFhirConverter/src/main/java/org/nmdp/hmlfhirconverter/service'
class ServiceGenerator:
def get_template(self):
with open(templatePath, 'r') as fileReader:
return fileReader.read()
def write_file(self, fileContents, fileName):
path = os.path.join(writePath, self.get_file_name(fileName))
with open(path, 'w') as fileWriter:
fileWriter.write(fileContents)
def get_file_name(self, className):
return className + 'Service.java'
def file_exists(self, className):
for (dirpath, dirnames, filenames) in walk(writePath):
return self.get_file_name(className) in filenames
|
nmdp-bioinformatics/service-hml-fhir-converter
|
CodeGen/hmlFhirConverterCodeGenerator/codegen/service/ServiceGenerator.py
|
Python
|
apache-2.0
| 776 | 0.002577 |
#!/usr/bin/env python
"""
Reduce samples that have too high energies by comparing
between the same group of samples.
The group is defined according to the name of directory before the last 5 digits.
For example, the directory `smpl_XX_YYYYYY_#####` where `#####` is the
last 5 digits and the group name would be `smpl_XX_YYYYY`.
Usage:
reduce_high_energy_samples.py [options] DIRS...
Options:
-h,--help Show this message and exit.
-o OUT Output file name. [default: out.high_energy_samples]
--threshold=THRESHOLD
Threshold of energy/atom that determines high energy samples.
[default: 1.0]
"""
from __future__ import print_function
import os,sys
from docopt import docopt
from datetime import datetime
from nappy.napsys import NAPSystem
__author__ = "RYO KOBAYASHI"
__version__ = "160727"
def get_obsolete_dirname():
prefix = "obsolete_"
today = datetime.today()
return prefix+today.strftime("%y%m%d")
def get_groups(smpldirs):
groups = {}
ns = len(smpldirs)
if ns < 100:
ms = 1
else:
ms = ns/100
for i,s in enumerate(smpldirs):
if i%ms == 0:
print('.',end=".")
try:
with open(s+'/erg.ref','r') as f:
erg = float(f.readline())
except:
print('Failed to read erg.ref, so skip '+s)
continue
key = s[:-6]
if not key in groups:
groups[key] = []
groups[key].append([s,erg])
print('')
return groups
def get_list_high_energy(gsmpls,threshold):
emin = 1e+30
highsmpls = []
ergs = []
for i,s in enumerate(gsmpls):
smpldir = s[0]
erg = s[1]
#atoms = read(smpldir+'/POSCAR',format='vasp')
atoms = NAPSystem(fname=smpldir+"/pos",format='pmd')
natm = atoms.num_atoms()
erg /= natm
ergs.append(erg)
emin = min(erg,emin)
for i,s in enumerate(gsmpls):
smpldir = s[0]
erg = ergs[i]
if erg-emin > threshold:
highsmpls.append(smpldir)
return highsmpls
if __name__ == "__main__":
args = docopt(__doc__)
smpldirs = args['DIRS']
outfname = args['-o']
threshold = float(args['--threshold'])
print('grouping samples...')
groups = get_groups(smpldirs)
print('looking for high-energy samples...')
highsmpls = []
for g,smpls in groups.items():
print('.',end='')
highsmpls.extend(get_list_high_energy(smpls,threshold))
print('')
with open(outfname,'w') as f:
for s in highsmpls:
f.write(s+'\n')
print('number of samples to be reduced = ',len(highsmpls))
print('check '+outfname+' and run the following commands:')
print('')
# obsdir = get_obsolete_dirname()
# print(' mkdir '+obsdir)
# print(' for d in `cat '+outfname+'`; do mv $d '+obsdir
# +'/; done')
# print('')
|
ryokbys/nap
|
nappy/fitpot/reduce_high_energy_samples.py
|
Python
|
mit
| 2,920 | 0.009589 |
#!/usr/bin/env python
#
# Copyright 2007-2013 The Python-Twitter Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''The setup and build script for the python-twitter library.'''
__author__ = 'python-twitter@googlegroups.com'
__version__ = '1.1'
# The base package metadata to be used by both distutils and setuptools
METADATA = dict(
name = "python-twitter",
version = __version__,
py_modules = ['twitter'],
author='The Python-Twitter Developers',
author_email='python-twitter@googlegroups.com',
description='A Python wrapper around the Twitter API',
license='Apache License 2.0',
url='https://github.com/bear/python-twitter',
keywords='twitter api',
)
# Extra package metadata to be used only if setuptools is installed
SETUPTOOLS_METADATA = dict(
install_requires = ['setuptools', 'simplejson', 'oauth2', 'requests', 'requests_oauthlib'],
include_package_data = True,
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Communications :: Chat',
'Topic :: Internet',
],
test_suite = 'twitter_test.suite',
)
def Read(file):
return open(file).read()
def BuildLongDescription():
return '\n'.join([Read('README.md'), Read('CHANGES')])
def Main():
# Build the long_description from the README and CHANGES
METADATA['long_description'] = BuildLongDescription()
# Use setuptools if available, otherwise fallback and use distutils
try:
import setuptools
METADATA.update(SETUPTOOLS_METADATA)
setuptools.setup(**METADATA)
except ImportError:
print "Could not import setuptools, using distutils"
print "NOTE: You will need to install dependencies manualy"
import distutils.core
distutils.core.setup(**METADATA)
if __name__ == '__main__':
Main()
|
MosheBerman/brisket-mashup
|
source/libraries/python-twitter-1.1/setup.py
|
Python
|
mit
| 2,426 | 0.010717 |
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from .models import Category, Product, Transaction, UserTag, ProductTag, Account
class AccountInline(admin.StackedInline):
model = Account
can_delete = False
verbose_name_plural = 'Account'
fk_name = 'user'
class BalanceListFilter(admin.SimpleListFilter):
title = 'balance'
parameter_name = 'balance'
default_value = None
def lookups(self, request, model_admin):
return [
("negative", "negative balances only")
]
def queryset(self, request, queryset):
if self.value() == "negative":
negative_users = []
for user in queryset:
if user.account.balance < 0:
negative_users.append(user.id)
return User.objects.filter(id__in=negative_users)
return queryset
class CustomUserAdmin(UserAdmin):
inlines = (AccountInline, )
list_display = ('email', 'is_staff', 'is_superuser', 'last_login', 'balance')
list_select_related = ('account', )
list_filter = UserAdmin.list_filter + (BalanceListFilter, )
def get_inline_instances(self, request, obj=None):
if not obj:
return list()
return super(CustomUserAdmin, self).get_inline_instances(request, obj)
def balance(self, instance):
return instance.account.balance
admin.site.unregister(User)
admin.site.register(User, CustomUserAdmin)
admin.site.register(Product)
admin.site.register(Category)
admin.site.register(Transaction)
admin.site.register(UserTag)
admin.site.register(ProductTag)
|
mylvari/namubufferi
|
namubufferiapp/admin.py
|
Python
|
mit
| 1,669 | 0.002996 |
"""
Boolean geometry utilities.
"""
from __future__ import absolute_import
#Init has to be imported first because it has code to workaround the python bug where relative imports don't work if the module is imported as a main module.
import __init__
import sys
__author__ = 'Enrique Perez (perez_enrique@yahoo.com)'
__credits__ = 'Art of Illusion <http://www.artofillusion.org/>'
__date__ = '$Date: 2008/02/05 $'
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
def _getAccessibleAttribute(attributeName):
'Get the accessible attribute.'
if attributeName in globalAccessibleAttributeDictionary:
return globalAccessibleAttributeDictionary[attributeName]
return None
def continuous(valueString):
'Print continuous.'
sys.stdout.write(str(valueString))
return valueString
def line(valueString):
'Print line.'
print(valueString)
return valueString
globalAccessibleAttributeDictionary = {'continuous' : continuous, 'line' : line}
|
dob71/x2swn
|
skeinforge/fabmetheus_utilities/geometry/geometry_utilities/evaluate_fundamentals/print.py
|
Python
|
gpl-3.0
| 984 | 0.018293 |
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from datetime import datetime
from optparse import make_option
from django.core.management.base import BaseCommand
from oneanddone.tasks.models import TaskMetrics
class Command(BaseCommand):
help = 'Updates stored metrics'
option_list = BaseCommand.option_list + (
make_option('--force_update',
action='store_true',
dest='force_update',
default=False,
help='Force updating of all tasks'),)
def handle(self, *args, **options):
updated = TaskMetrics.update_task_metrics(force_update=options['force_update'])
self.stdout.write('%s: %s tasks had their metrics updated\n' %
(datetime.now().isoformat(), updated))
|
jicksy/oneanddone_test
|
oneanddone/tasks/management/commands/updatemetrics.py
|
Python
|
mpl-2.0
| 954 | 0.001048 |
# -------------------------------------------------------------------------------
# Copyright IBM Corp. 2017
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -------------------------------------------------------------------------------
from pixiedust.display.app import *
from pixiedust.utils.userPreferences import *
import base64
import requests
access_token_key = 'cf.access_token'
api_base_url = 'api.ng.bluemix.net'
login_base_url = 'login.ng.bluemix.net'
@PixieApp
class CFBrowser:
def get_data_frame(self):
return self.df
@route(login="true")
def _login(self):
# Login
body = 'grant_type=password&passcode={}'.format(self.passcode)
url = 'https://{}/UAALoginServerWAR/oauth/token'.format(login_base_url)
r = requests.post(url, data=body, headers={
'Authorization': 'Basic Y2Y6',
'Content-Type': 'application/x-www-form-urlencoded',
'Accept': 'application/json'
})
json = r.json()
if 'access_token' in json.keys():
self.access_token = json['access_token']
setUserPreference(access_token_key, self.access_token)
self.show_orgs()
def show_orgs(self):
# Load organizations
orgs = self.get_orgs(self.access_token)
options = ""
for org in orgs:
options += """<option value="{}">{}</option>""".format(org['metadata']['guid'],org['entity']['name'])
self._addHTMLTemplateString("""
<div>
<div class="form-horizontal">
<div class="form-group">
<label for="org{{prefix}}" class="control-label col-sm-2">Select an organization:</label>
<div class="col-sm-5">
<select class="form-control" id="org{{prefix}}">""" + options + """</select>
</div>
<div class="col-sm-1">
<button type="submit" class="btn btn-primary" pd_refresh>Go
<pd_script>
self.org_id="$val(org{{prefix}})"
self.login="false"
self.select_org="true"
</pd_script>
</button>
</div>
</div>
</div>
</div>
""")
@route(select_org="true")
def _select_org(self):
spaces = self.get_spaces(self.access_token, self.org_id)
options = ""
for space in spaces:
options += """<option value="{}">{}</option>""".format(space['metadata']['guid'],space['entity']['name'])
self._addHTMLTemplateString("""
<div>
<div class="form-horizontal">
<div class="form-group">
<label for="org{{prefix}}" class="control-label col-sm-2">Select a space:</label>
<div class="col-sm-5">
<select class="form-control" id="space{{prefix}}">""" + options + """</select>
</div>
<div class="col-sm-1">
<button type="submit" class="btn btn-primary" pd_refresh>Go
<pd_script>
self.space_id="$val(space{{prefix}})"
self.select_org="false"
self.select_space="true"
</pd_script>
</button>
</div>
</div>
</div>
</div>
""")
@route(select_space="true")
def _select_space(self):
svcs = self.get_services(self.access_token, self.space_id)
output = """
<div>
<div class="form-horizontal">
"""
for svc in svcs:
svc_label = self.get_service_label(self.access_token, svc['entity']['service_plan_guid'])
svc_name = svc['entity']['name']
if svc_label == 'cloudantNoSQLDB' or "cloudant" in svc_name.lower():
svc_keys = self.get_service_keys(self.access_token, svc['metadata']['guid'])
for svc_key in svc_keys:
svc_key_entity = svc_key['entity']
if 'credentials' in svc_key_entity.keys():
credentials_str = json.dumps(svc_key_entity['credentials'])
credentials_str = credentials_str.replace('"', '\\"')
output += """
<div class="form-group">
<div class="col-sm-2"></div>
<div class="col-sm-5">
<b>""" + svc['entity']['name'] + """</b><br>
""" + svc_key_entity['credentials']['host'] + """<br>
<button type="submit" class="btn btn-primary" data-dismiss="modal" pd_refresh>Select
<pd_script>self.service_name=\"""" + svc['entity']['name'].replace('"', '\\"') + """\"
self.credentials=\"""" + credentials_str + """\"
self.select_space="false"
self.select_credentials="true"</pd_script>
</button>
</div>
</div>"""
return output
@route(select_credentials="true")
def _select_credentials(self):
return self.selectBluemixCredentials(self.service_name, self.credentials)
def is_valid_access_token(self, access_token):
url = 'https://{}/v2/organizations'.format(api_base_url)
authHeader = 'Bearer {}'.format(access_token)
r = requests.get(url, headers={
'Authorization': authHeader,
'Accept': 'application/json'
})
return r.status_code == 200
def get_orgs(self, access_token):
url = 'https://{}/v2/organizations'.format(api_base_url)
authHeader = 'Bearer {}'.format(access_token)
r = requests.get(url, headers={
'Authorization': authHeader,
'Accept': 'application/json'
})
json = r.json()
return json['resources']
def get_spaces(self, access_token, org_id):
url = 'https://{}/v2/organizations/{}/spaces'.format(api_base_url, org_id)
authHeader = 'Bearer {}'.format(access_token)
r = requests.get(url, headers={
'Authorization': authHeader,
'Accept': 'application/json'
})
json = r.json()
return json['resources']
def get_apps(self, access_token, space_id):
url = 'https://{}/v2/apps?q=space_guid:{}'.format(api_base_url, space_id)
authHeader = 'Bearer {}'.format(access_token)
r = requests.get(url, headers={
'Authorization': authHeader,
'Accept': 'application/json'
})
json = r.json()
return json['resources']
def get_services(self, access_token, space_id):
url = 'https://{}/v2/service_instances?q=space_guid:{}'.format(api_base_url, space_id)
authHeader = 'Bearer {}'.format(access_token)
r = requests.get(url, headers={
'Authorization': authHeader,
'Accept': 'application/json'
})
json = r.json()
return json['resources']
def get_service_keys(self, access_token, service_id):
url = 'https://{}/v2/service_keys?q=service_instance_guid:{}'.format(api_base_url, service_id)
authHeader = 'Bearer {}'.format(access_token)
r = requests.get(url, headers={
'Authorization': authHeader,
'Accept': 'application/json'
})
json = r.json()
return json['resources']
def get_service_label(self, access_token, service_plan_id):
# Load the service plan
url = 'https://{}/v2/service_plans/{}'.format(api_base_url, service_plan_id)
authHeader = 'Bearer {}'.format(access_token)
r = requests.get(url, headers={
'Authorization': authHeader,
'Accept': 'application/json'
})
json = r.json()
if 'entity' in json.keys():
service_id = json['entity']['service_guid']
else:
return "NO PLAN FOUND"
# Load the service
url = 'https://{}/v2/services/{}'.format(api_base_url, service_id)
path = '/v2/services/{}'.format(service_id)
r = requests.get(url, headers={
'Authorization': authHeader,
'Accept': 'application/json'
})
json = r.json()
return json['entity']['label']
def cloudant_all_dbs(self, host, username, password):
url = 'https://{}/_all_dbs'.format(host)
r = requests.get(url, headers={
'Authorization': 'Basic {}'.format(base64.b64encode('{}:{}'.format(username, password))),
'Accept': 'application/json'
})
return r.json()
@route()
def startBrowsingBM(self):
access_token_valid = False
access_token = getUserPreference(access_token_key)
if access_token is not None:
access_token_valid = self.is_valid_access_token(access_token)
if access_token_valid:
self.access_token = access_token
self.show_orgs()
else:
return """
<div>
<div class="form-horizontal">
<div class="form-group">
<div class="col-sm-2"></div>
<div class="col-sm-5">
<a href="https://login.ng.bluemix.net/UAALoginServerWAR/passcode" target="_blank">
Click here to get your one-time passcode
</a>
</div>
</div>
<div class="form-group">
<label for="passcode{{prefix}}" class="control-label col-sm-2">Passcode:</label>
<div class="col-sm-5">
<input type="text" class="form-control" id="passcode{{prefix}}">
</div>
<div class="col-sm-1">
<button type="submit" class="btn btn-primary" pd_refresh>Go
<pd_script>
self.passcode="$val(passcode{{prefix}})"
self.login="true"
</pd_script>
</button>
</div>
</div>
</div>
</div>
"""
|
ibm-cds-labs/pixiedust
|
pixiedust/apps/cfBrowser.py
|
Python
|
apache-2.0
| 9,821 | 0.004175 |
#!/usr/bin/env python
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
#
# See LICENSE for more details.
#
# Copyright: 2021 IBM
# Author: Nageswara R Sastry <rnsastry@linux.vnet.ibm.com>
import os
from avocado import Test
from avocado.utils import archive, process
from avocado.utils.software_manager import SoftwareManager
class EvmCtl(Test):
"""
evmctl-testsuite
:avocado: tags=security,testsuite
"""
def setUp(self):
'''
Install the basic packages to support evmctl
'''
# Check for basic utilities
smm = SoftwareManager()
deps = ['gcc', 'make']
for package in deps:
if not smm.check_installed(package) and not smm.install(package):
self.cancel('%s is needed for the test to be run' % package)
url = "https://sourceforge.net/projects/linux-ima/files/latest/download"
tarball = self.fetch_asset(name="download.tar.gz", locations=url, expire='7d')
archive.extract(tarball, self.workdir)
self.sourcedir = os.path.join(self.workdir, os.listdir(self.workdir)[0])
self.log.info("sourcedir - %s" % self.sourcedir)
os.chdir(self.sourcedir)
process.run('./autogen.sh', ignore_status=True)
def test(self):
'''
Running tests from evmctl
'''
count = 0
output = process.system_output('./build.sh', ignore_status=True).decode()
for line in reversed(output.splitlines()):
if '# FAIL' in line:
count = int(line.split(":")[1].strip())
self.log.info(line)
break
# If the fail count is more than 0 then there are some failed tests
if count:
self.fail("%s test(s) failed, please refer to the log" % count)
|
sacsant/avocado-misc-tests
|
security/evmctl-tests.py
|
Python
|
gpl-2.0
| 2,182 | 0.001833 |
# Copyright 2018 Gianmarco Conte (gconte@dinamicheaziendali.it)
import logging
from odoo import api, models
from odoo.tools.misc import formatLang
_logger = logging.getLogger(__name__)
class ReportGiornale(models.AbstractModel):
_name = "report.l10n_it_central_journal.report_giornale"
_description = "Journal report"
@api.model
def _get_report_values(self, docids, data=None):
lang_code = self._context.get("company_id", self.env.company.partner_id.lang)
lang = self.env["res.lang"]
lang_id = lang._lang_get(lang_code)
date_format = lang_id.date_format
return {
"doc_ids": data["ids"],
"doc_model": self.env["account.move.line"],
"data": data,
"docs": self.env["account.move.line"].browse(data["ids"]),
"get_move": self._get_move,
"save_print_info": self._save_print_info,
"env": self.env,
"formatLang": formatLang,
"l10n_it_count_fiscal_page_base": data["form"]["fiscal_page_base"],
"start_row": data["form"]["start_row"],
"date_move_line_to": data["form"]["date_move_line_to"],
"daterange": data["form"]["daterange"],
"print_state": data["form"]["print_state"],
"year_footer": data["form"]["year_footer"],
"progressive_credit": data["form"]["progressive_credit"],
"progressive_debit": data["form"]["progressive_debit"],
"date_format": date_format,
}
def _get_move(self, move_ids):
move_list = self.env["account.move.line"].browse(move_ids)
return move_list
def _save_print_info(
self, daterange_id, print_state, end_date_print, end_row, end_debit, end_credit
):
res = False
if print_state == "def":
datarange_obj = self.env["date.range"]
daterange_ids = datarange_obj.search([("id", "=", daterange_id)])
print_info = {
"date_last_print": end_date_print,
"progressive_line_number": end_row,
"progressive_debit": end_debit,
"progressive_credit": end_credit,
}
res = daterange_ids.write(print_info)
return res
|
OCA/l10n-italy
|
l10n_it_central_journal/models/central_journal.py
|
Python
|
agpl-3.0
| 2,270 | 0.000881 |
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
This module's main purpose is to act as a script to create new versions
of ufunc.c when ERFA is updated (or this generator is enhanced).
`Jinja2 <http://jinja.pocoo.org/>`_ must be installed for this
module/script to function.
Note that this does *not* currently automate the process of creating structs
or dtypes for those structs. They should be added manually in the template file.
"""
import re
import os.path
from collections import OrderedDict
from distutils.version import LooseVersion
import numpy
# Note: once we support only numpy >=1.16, all things related to "d3_fix"
# can be removed, here and in the templates (core.py.templ
# NOTE: we define this variable here instead of importing from astropy to
# ensure that running this script does not require importing astropy.
NUMPY_LT_1_16 = LooseVersion(numpy.__version__) < '1.16'
DEFAULT_ERFA_LOC = os.path.join(os.path.split(__file__)[0],
'../../cextern/erfa')
DEFAULT_TEMPLATE_LOC = os.path.split(__file__)[0]
NDIMS_REX = re.compile(re.escape("numpy.dtype([('fi0', '.*', <(.*)>)])").replace(r'\.\*', '.*').replace(r'\<', '(').replace(r'\>', ')'))
class FunctionDoc:
def __init__(self, doc):
self.doc = doc.replace("**", " ").replace("/*\n", "").replace("*/", "")
self.__input = None
self.__output = None
self.__ret_info = None
def _get_arg_doc_list(self, doc_lines):
"""Parse input/output doc section lines, getting arguments from them.
Ensure all elements of eraASTROM and eraLDBODY are left out, as those
are not input or output arguments themselves. Also remove the nb
argument in from of eraLDBODY, as we infer nb from the python array.
"""
doc_list = []
skip = []
for d in doc_lines:
arg_doc = ArgumentDoc(d)
if arg_doc.name is not None:
if skip:
if skip[0] == arg_doc.name:
skip.pop(0)
continue
else:
raise RuntimeError("We whould be skipping {} "
"but {} encountered."
.format(skip[0], arg_doc.name))
if arg_doc.type.startswith('eraLDBODY'):
# Special-case LDBODY: for those, the previous argument
# is always the number of bodies, but we don't need it
# as an input argument for the ufunc since we're going
# to determine this from the array itself. Also skip
# the description of its contents; those are not arguments.
doc_list.pop()
skip = ['bm', 'dl', 'pv']
elif arg_doc.type.startswith('eraASTROM'):
# Special-case ASTROM: need to skip the description
# of its contents; those are not arguments.
skip = ['pmt', 'eb', 'eh', 'em', 'v', 'bm1',
'bpn', 'along', 'xpl', 'ypl', 'sphi',
'cphi', 'diurab', 'eral', 'refa', 'refb']
doc_list.append(arg_doc)
return doc_list
@property
def input(self):
if self.__input is None:
self.__input = []
for regex in ("Given([^\n]*):\n(.+?) \n",
"Given and returned([^\n]*):\n(.+?) \n"):
result = re.search(regex, self.doc, re.DOTALL)
if result is not None:
doc_lines = result.group(2).split("\n")
self.__input += self._get_arg_doc_list(doc_lines)
return self.__input
@property
def output(self):
if self.__output is None:
self.__output = []
for regex in ("Given and returned([^\n]*):\n(.+?) \n",
"Returned([^\n]*):\n(.+?) \n"):
result = re.search(regex, self.doc, re.DOTALL)
if result is not None:
doc_lines = result.group(2).split("\n")
self.__output += self._get_arg_doc_list(doc_lines)
return self.__output
@property
def ret_info(self):
if self.__ret_info is None:
ret_info = []
result = re.search("Returned \\(function value\\)([^\n]*):\n(.+?) \n", self.doc, re.DOTALL)
if result is not None:
ret_info.append(ReturnDoc(result.group(2)))
if len(ret_info) == 0:
self.__ret_info = ''
elif len(ret_info) == 1:
self.__ret_info = ret_info[0]
else:
raise ValueError("Multiple C return sections found in this doc:\n" + self.doc)
return self.__ret_info
def __repr__(self):
return self.doc.replace(" \n", "\n")
class ArgumentDoc:
def __init__(self, doc):
match = re.search("^ +([^ ]+)[ ]+([^ ]+)[ ]+(.+)", doc)
if match is not None:
self.name = match.group(1)
self.type = match.group(2)
self.doc = match.group(3)
else:
self.name = None
self.type = None
self.doc = None
def __repr__(self):
return f" {self.name:15} {self.type:15} {self.doc}"
class Variable:
"""Properties shared by Argument and Return."""
@property
def npy_type(self):
"""Predefined type used by numpy ufuncs to indicate a given ctype.
Eg., NPY_DOUBLE for double.
"""
return "NPY_" + self.ctype.upper()
@property
def dtype(self):
"""Name of dtype corresponding to the ctype.
Specifically,
double : dt_double
int : dt_int
double[3]: dt_vector
double[2][3] : dt_pv
double[2] : dt_pvdpv
double[3][3] : dt_matrix
int[4] : dt_ymdf | dt_hmsf | dt_dmsf, depding on name
eraASTROM: dt_eraASTROM
eraLDBODY: dt_eraLDBODY
char : dt_sign
char[] : dt_type
The corresponding dtypes are defined in ufunc.c, where they are
used for the loop definitions. In core.py, they are also used
to view-cast regular arrays to these structured dtypes.
"""
if self.ctype == 'const char':
return 'dt_type'
elif self.ctype == 'char':
return 'dt_sign'
elif self.ctype == 'int' and self.shape == (4,):
return 'dt_' + self.name[1:]
elif self.ctype == 'double' and self.shape == (3,):
return 'dt_double'
elif self.ctype == 'double' and self.shape == (2, 3):
return 'dt_pv'
elif self.ctype == 'double' and self.shape == (2,):
return 'dt_pvdpv'
elif self.ctype == 'double' and self.shape == (3, 3):
return 'dt_double'
elif not self.shape:
return 'dt_' + self.ctype
else:
raise ValueError("ctype {} with shape {} not recognized."
.format(self.ctype, self.shape))
@property
def view_dtype(self):
"""Name of dtype corresponding to the ctype for viewing back as array.
E.g., dt_double for double, dt_double33 for double[3][3].
The types are defined in core.py, where they are used for view-casts
of structured results as regular arrays.
"""
if self.ctype == 'const char':
return 'dt_bytes12'
elif self.ctype == 'char':
return 'dt_bytes1'
else:
raise ValueError('Only char ctype should need view back!')
@property
def ndim(self):
return len(self.shape)
@property
def size(self):
size = 1
for s in self.shape:
size *= s
return size
@property
def cshape(self):
return ''.join([f'[{s}]' for s in self.shape])
@property
def signature_shape(self):
if self.ctype == 'eraLDBODY':
return '(n)'
elif self.ctype == 'double' and self.shape == (3,):
return '(d3)' if NUMPY_LT_1_16 else '(3)'
elif self.ctype == 'double' and self.shape == (3, 3):
return '(d3, d3)' if NUMPY_LT_1_16 else '(3, 3)'
else:
return '()'
class Argument(Variable):
def __init__(self, definition, doc):
self.definition = definition
self.doc = doc
self.__inout_state = None
self.ctype, ptr_name_arr = definition.strip().rsplit(" ", 1)
if "*" == ptr_name_arr[0]:
self.is_ptr = True
name_arr = ptr_name_arr[1:]
else:
self.is_ptr = False
name_arr = ptr_name_arr
if "[]" in ptr_name_arr:
self.is_ptr = True
name_arr = name_arr[:-2]
if "[" in name_arr:
self.name, arr = name_arr.split("[", 1)
self.shape = tuple([int(size) for size in arr[:-1].split("][")])
else:
self.name = name_arr
self.shape = ()
@property
def inout_state(self):
if self.__inout_state is None:
self.__inout_state = ''
for i in self.doc.input:
if self.name in i.name.split(','):
self.__inout_state = 'in'
for o in self.doc.output:
if self.name in o.name.split(','):
if self.__inout_state == 'in':
self.__inout_state = 'inout'
else:
self.__inout_state = 'out'
return self.__inout_state
@property
def name_for_call(self):
"""How the argument should be used in the call to the ERFA function.
This takes care of ensuring that inputs are passed by value,
as well as adding back the number of bodies for any LDBODY argument.
The latter presumes that in the ufunc inner loops, that number is
called 'nb'.
"""
if self.ctype == 'eraLDBODY':
assert self.name == 'b'
return 'nb, _' + self.name
elif self.is_ptr:
return '_'+self.name
else:
return '*_'+self.name
def __repr__(self):
return f"Argument('{self.definition}', name='{self.name}', ctype='{self.ctype}', inout_state='{self.inout_state}')"
class ReturnDoc:
def __init__(self, doc):
self.doc = doc
self.infoline = doc.split('\n')[0].strip()
self.type = self.infoline.split()[0]
self.descr = self.infoline.split()[1]
if self.descr.startswith('status'):
self.statuscodes = statuscodes = {}
code = None
for line in doc[doc.index(':')+1:].split('\n'):
ls = line.strip()
if ls != '':
if ' = ' in ls:
code, msg = ls.split(' = ')
if code != 'else':
code = int(code)
statuscodes[code] = msg
elif code is not None:
statuscodes[code] += ls
else:
self.statuscodes = None
def __repr__(self):
return f"Return value, type={self.type:15}, {self.descr}, {self.doc}"
class Return(Variable):
def __init__(self, ctype, doc):
self.name = 'c_retval'
self.inout_state = 'stat' if ctype == 'int' else 'ret'
self.ctype = ctype
self.shape = ()
self.doc = doc
def __repr__(self):
return f"Return(name='{self.name}', ctype='{self.ctype}', inout_state='{self.inout_state}')"
@property
def doc_info(self):
return self.doc.ret_info
class Function:
"""
A class representing a C function.
Parameters
----------
name : str
The name of the function
source_path : str
Either a directory, which means look for the function in a
stand-alone file (like for the standard ERFA distribution), or a
file, which means look for the function in that file (as for the
astropy-packaged single-file erfa.c).
match_line : str, optional
If given, searching of the source file will skip until it finds
a line matching this string, and start from there.
"""
def __init__(self, name, source_path, match_line=None):
self.name = name
self.pyname = name.split('era')[-1].lower()
self.filename = self.pyname+".c"
if os.path.isdir(source_path):
self.filepath = os.path.join(os.path.normpath(source_path), self.filename)
else:
self.filepath = source_path
with open(self.filepath) as f:
if match_line:
line = f.readline()
while line != '':
if line.startswith(match_line):
filecontents = '\n' + line + f.read()
break
line = f.readline()
else:
msg = ('Could not find the match_line "{0}" in '
'the source file "{1}"')
raise ValueError(msg.format(match_line, self.filepath))
else:
filecontents = f.read()
pattern = fr"\n([^\n]+{name} ?\([^)]+\)).+?(/\*.+?\*/)"
p = re.compile(pattern, flags=re.DOTALL | re.MULTILINE)
search = p.search(filecontents)
self.cfunc = " ".join(search.group(1).split())
self.doc = FunctionDoc(search.group(2))
self.args = []
for arg in re.search(r"\(([^)]+)\)", self.cfunc).group(1).split(', '):
self.args.append(Argument(arg, self.doc))
self.ret = re.search(f"^(.*){name}", self.cfunc).group(1).strip()
if self.ret != 'void':
self.args.append(Return(self.ret, self.doc))
def args_by_inout(self, inout_filter, prop=None, join=None):
"""
Gives all of the arguments and/or returned values, depending on whether
they are inputs, outputs, etc.
The value for `inout_filter` should be a string containing anything
that arguments' `inout_state` attribute produces. Currently, that can be:
* "in" : input
* "out" : output
* "inout" : something that's could be input or output (e.g. a struct)
* "ret" : the return value of the C function
* "stat" : the return value of the C function if it is a status code
It can also be a "|"-separated string giving inout states to OR
together.
"""
result = []
for arg in self.args:
if arg.inout_state in inout_filter.split('|'):
if prop is None:
result.append(arg)
else:
result.append(getattr(arg, prop))
if join is not None:
return join.join(result)
else:
return result
@property
def user_dtype(self):
"""The non-standard dtype, if any, needed by this function's ufunc.
This would be any structured array for any input or output, but
we give preference to LDBODY, since that also decides that the ufunc
should be a generalized ufunc.
"""
user_dtype = None
for arg in self.args_by_inout('in|inout|out'):
if arg.ctype == 'eraLDBODY':
return arg.dtype
elif user_dtype is None and arg.dtype not in ('dt_double',
'dt_int'):
user_dtype = arg.dtype
return user_dtype
@property
def signature(self):
"""Possible signature, if this function should be a gufunc."""
if all(arg.signature_shape == '()'
for arg in self.args_by_inout('in|inout|out')):
return None
return '->'.join(
[','.join([arg.signature_shape for arg in args])
for args in (self.args_by_inout('in|inout'),
self.args_by_inout('inout|out|ret|stat'))])
def _d3_fix_arg_and_index(self):
if not any('d3' in arg.signature_shape
for arg in self.args_by_inout('in|inout')):
for j, arg in enumerate(self.args_by_inout('out')):
if 'd3' in arg.signature_shape:
return j, arg
return None, None
@property
def d3_fix_op_index(self):
"""Whether only output arguments have a d3 dimension."""
index = self._d3_fix_arg_and_index()[0]
if index is not None:
len_in = len(list(self.args_by_inout('in')))
len_inout = len(list(self.args_by_inout('inout')))
index += + len_in + 2 * len_inout
return index
@property
def d3_fix_arg(self):
"""Whether only output arguments have a d3 dimension."""
return self._d3_fix_arg_and_index()[1]
@property
def python_call(self):
outnames = [arg.name for arg in self.args_by_inout('inout|out|stat|ret')]
argnames = [arg.name for arg in self.args_by_inout('in|inout')]
argnames += [arg.name for arg in self.args_by_inout('inout')]
d3fix_index = self._d3_fix_arg_and_index()[0]
if d3fix_index is not None:
argnames += ['None'] * d3fix_index + [self.d3_fix_arg.name]
return '{out} = {func}({args})'.format(out=', '.join(outnames),
func='ufunc.' + self.pyname,
args=', '.join(argnames))
def __repr__(self):
return f"Function(name='{self.name}', pyname='{self.pyname}', filename='{self.filename}', filepath='{self.filepath}')"
class Constant:
def __init__(self, name, value, doc):
self.name = name.replace("ERFA_", "")
self.value = value.replace("ERFA_", "")
self.doc = doc
class ExtraFunction(Function):
"""
An "extra" function - e.g. one not following the SOFA/ERFA standard format.
Parameters
----------
cname : str
The name of the function in C
prototype : str
The prototype for the function (usually derived from the header)
pathfordoc : str
The path to a file that contains the prototype, with the documentation
as a multiline string *before* it.
"""
def __init__(self, cname, prototype, pathfordoc):
self.name = cname
self.pyname = cname.split('era')[-1].lower()
self.filepath, self.filename = os.path.split(pathfordoc)
self.prototype = prototype.strip()
if prototype.endswith('{') or prototype.endswith(';'):
self.prototype = prototype[:-1].strip()
incomment = False
lastcomment = None
with open(pathfordoc, 'r') as f:
for l in f:
if incomment:
if l.lstrip().startswith('*/'):
incomment = False
lastcomment = ''.join(lastcomment)
else:
if l.startswith('**'):
l = l[2:]
lastcomment.append(l)
else:
if l.lstrip().startswith('/*'):
incomment = True
lastcomment = []
if l.startswith(self.prototype):
self.doc = lastcomment
break
else:
raise ValueError('Did not find prototype {} in file '
'{}'.format(self.prototype, pathfordoc))
self.args = []
argset = re.search(fr"{self.name}\(([^)]+)?\)",
self.prototype).group(1)
if argset is not None:
for arg in argset.split(', '):
self.args.append(Argument(arg, self.doc))
self.ret = re.match(f"^(.*){self.name}",
self.prototype).group(1).strip()
if self.ret != 'void':
self.args.append(Return(self.ret, self.doc))
def __repr__(self):
r = super().__repr__()
if r.startswith('Function'):
r = 'Extra' + r
return r
def main(srcdir=DEFAULT_ERFA_LOC, outfn='core.py', ufuncfn='ufunc.c',
templateloc=DEFAULT_TEMPLATE_LOC, extra='erfa_additions.h',
verbose=True):
from jinja2 import Environment, FileSystemLoader
if verbose:
print_ = lambda *args, **kwargs: print(*args, **kwargs)
else:
print_ = lambda *args, **kwargs: None
# Prepare the jinja2 templating environment
env = Environment(loader=FileSystemLoader(templateloc))
def prefix(a_list, pre):
return [pre+f'{an_element}' for an_element in a_list]
def postfix(a_list, post):
return [f'{an_element}'+post for an_element in a_list]
def surround(a_list, pre, post):
return [pre+f'{an_element}'+post for an_element in a_list]
env.filters['prefix'] = prefix
env.filters['postfix'] = postfix
env.filters['surround'] = surround
erfa_c_in = env.get_template(ufuncfn + '.templ')
erfa_py_in = env.get_template(outfn + '.templ')
# Extract all the ERFA function names from erfa.h
if os.path.isdir(srcdir):
erfahfn = os.path.join(srcdir, 'erfa.h')
multifilserc = True
else:
erfahfn = os.path.join(os.path.split(srcdir)[0], 'erfa.h')
multifilserc = False
with open(erfahfn, "r") as f:
erfa_h = f.read()
print_("read erfa header")
if extra:
with open(os.path.join(templateloc or '.', extra), "r") as f:
erfa_h += f.read()
print_("read extra header")
funcs = OrderedDict()
section_subsection_functions = re.findall(
r'/\* (\w*)/(\w*) \*/\n(.*?)\n\n', erfa_h,
flags=re.DOTALL | re.MULTILINE)
for section, subsection, functions in section_subsection_functions:
print_(f"{section}.{subsection}")
# Right now, we compile everything, but one could be more selective.
# In particular, at the time of writing (2018-06-11), what was
# actually require for astropy was not quite everything, but:
# ((section == 'Extra')
# or (section == "Astronomy")
# or (subsection == "AngleOps")
# or (subsection == "SphericalCartesian")
# or (subsection == "MatrixVectorProducts")
# or (subsection == 'VectorOps'))
if True:
func_names = re.findall(r' (\w+)\(.*?\);', functions,
flags=re.DOTALL)
for name in func_names:
print_(f"{section}.{subsection}.{name}...")
if multifilserc:
# easy because it just looks in the file itself
cdir = (srcdir if section != 'Extra' else
templateloc or '.')
funcs[name] = Function(name, cdir)
else:
# Have to tell it to look for a declaration matching
# the start of the header declaration, otherwise it
# might find a *call* of the function instead of the
# definition
for line in functions.split(r'\n'):
if name in line:
# [:-1] is to remove trailing semicolon, and
# splitting on '(' is because the header and
# C files don't necessarily have to match
# argument names and line-breaking or
# whitespace
match_line = line[:-1].split('(')[0]
funcs[name] = Function(name, cdir, match_line)
break
else:
raise ValueError("A name for a C file wasn't "
"found in the string that "
"spawned it. This should be "
"impossible!")
funcs = funcs.values()
# Extract all the ERFA constants from erfam.h
erfamhfn = os.path.join(srcdir, 'erfam.h')
with open(erfamhfn, 'r') as f:
erfa_m_h = f.read()
constants = []
for chunk in erfa_m_h.split("\n\n"):
result = re.findall(r"#define (ERFA_\w+?) (.+?)$", chunk,
flags=re.DOTALL | re.MULTILINE)
if result:
doc = re.findall(r"/\* (.+?) \*/\n", chunk, flags=re.DOTALL)
for (name, value) in result:
constants.append(Constant(name, value, doc))
# TODO: re-enable this when const char* return values and
# non-status code integer rets are possible
# #Add in any "extra" functions from erfaextra.h
# erfaextrahfn = os.path.join(srcdir, 'erfaextra.h')
# with open(erfaextrahfn, 'r') as f:
# for l in f:
# ls = l.strip()
# match = re.match('.* (era.*)\(', ls)
# if match:
# print_("Extra: {0} ...".format(match.group(1)))
# funcs.append(ExtraFunction(match.group(1), ls, erfaextrahfn))
print_("Rendering template")
erfa_c = erfa_c_in.render(funcs=funcs, NUMPY_LT_1_16=NUMPY_LT_1_16)
erfa_py = erfa_py_in.render(funcs=funcs, constants=constants,
NUMPY_LT_1_16=NUMPY_LT_1_16)
if outfn is not None:
print_("Saving to", outfn, 'and', ufuncfn)
with open(os.path.join(templateloc, outfn), "w") as f:
f.write(erfa_py)
with open(os.path.join(templateloc, ufuncfn), "w") as f:
f.write(erfa_c)
print_("Done!")
return erfa_c, erfa_py, funcs
if __name__ == '__main__':
from argparse import ArgumentParser
ap = ArgumentParser()
ap.add_argument('srcdir', default=DEFAULT_ERFA_LOC, nargs='?',
help='Directory where the ERFA c and header files '
'can be found or to a single erfa.c file '
'(which must be in the same directory as '
'erfa.h). Defaults to the builtin astropy '
'erfa: "{}"'.format(DEFAULT_ERFA_LOC))
ap.add_argument('-o', '--output', default='core.py',
help='The output filename for the pure-python output.')
ap.add_argument('-u', '--ufunc', default='ufunc.c',
help='The output filename for the ufunc .c output')
ap.add_argument('-t', '--template-loc',
default=DEFAULT_TEMPLATE_LOC,
help='the location where the "core.py.templ" and '
'"ufunc.c.templ templates can be found.')
ap.add_argument('-x', '--extra',
default='erfa_additions.h',
help='header file for any extra files in the template '
'location that should be included.')
ap.add_argument('-q', '--quiet', action='store_false', dest='verbose',
help='Suppress output normally printed to stdout.')
args = ap.parse_args()
main(args.srcdir, args.output, args.ufunc, args.template_loc,
args.extra)
|
bsipocz/astropy
|
astropy/_erfa/erfa_generator.py
|
Python
|
bsd-3-clause
| 27,369 | 0.000548 |
class A:
def foo(self):
print('A.foo()')
class B(A):
def foo(self):
print('B.foo()')
class C(A):
def foo(self):
print('C.foo()')
class D(B, C):
def foo(self):
print('D.foo()')
x = D()
print(D.__mro__) # (D, B, C, A, object)
x.foo() # D.foo()
|
s3rvac/talks
|
2017-03-07-Introduction-to-Python/examples/23-inheritance.py
|
Python
|
bsd-3-clause
| 303 | 0.016502 |
"""
WSGI config for hummer project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "hummer.settings")
application = get_wsgi_application()
|
wangtaoking1/hummer
|
hummer/wsgi.py
|
Python
|
apache-2.0
| 389 | 0 |
import cubed
print(cubed.cub(2,106))
|
Frikeer/LearnPython
|
exc8/exc8.py
|
Python
|
unlicense
| 38 | 0.026316 |
# Copyright (c) 2014 eBay Software Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from troveclient import base
from troveclient import common
class Cluster(base.Resource):
"""A Cluster is an opaque cluster used to store Database clusters."""
def __repr__(self):
return "<Cluster: %s>" % self.name
def delete(self):
"""Delete the cluster."""
self.manager.delete(self)
class Clusters(base.ManagerWithFind):
"""Manage :class:`Cluster` resources."""
resource_class = Cluster
def create(self, name, datastore, datastore_version, instances=None,
locality=None):
"""Create (boot) a new cluster."""
body = {"cluster": {
"name": name
}}
datastore_obj = {
"type": datastore,
"version": datastore_version
}
body["cluster"]["datastore"] = datastore_obj
if instances:
body["cluster"]["instances"] = instances
if locality:
body["cluster"]["locality"] = locality
return self._create("/clusters", body, "cluster")
def list(self, limit=None, marker=None):
"""Get a list of all clusters.
:rtype: list of :class:`Cluster`.
"""
return self._paginated("/clusters", "clusters", limit, marker)
def get(self, cluster):
"""Get a specific cluster.
:rtype: :class:`Cluster`
"""
return self._get("/clusters/%s" % base.getid(cluster),
"cluster")
def delete(self, cluster):
"""Delete the specified cluster.
:param cluster: The cluster to delete
"""
url = "/clusters/%s" % base.getid(cluster)
resp, body = self.api.client.delete(url)
common.check_for_exceptions(resp, body, url)
def _action(self, cluster, body):
"""Perform a cluster "action" -- grow/shrink/etc."""
url = "/clusters/%s" % base.getid(cluster)
resp, body = self.api.client.post(url, body=body)
common.check_for_exceptions(resp, body, url)
if body:
return self.resource_class(self, body['cluster'], loaded=True)
return body
def add_shard(self, cluster):
"""Adds a shard to the specified cluster.
:param cluster: The cluster to add a shard to
"""
url = "/clusters/%s" % base.getid(cluster)
body = {"add_shard": {}}
resp, body = self.api.client.post(url, body=body)
common.check_for_exceptions(resp, body, url)
if body:
return self.resource_class(self, body, loaded=True)
return body
def grow(self, cluster, instances=None):
"""Grow a cluster.
:param cluster: The cluster to grow
:param instances: List of instances to add
"""
body = {"grow": instances}
return self._action(cluster, body)
def shrink(self, cluster, instances=None):
"""Shrink a cluster.
:param cluster: The cluster to shrink
:param instances: List of instances to drop
"""
body = {"shrink": instances}
return self._action(cluster, body)
class ClusterStatus(object):
ACTIVE = "ACTIVE"
BUILD = "BUILD"
FAILED = "FAILED"
SHUTDOWN = "SHUTDOWN"
|
Tesora-Release/tesora-python-troveclient
|
troveclient/v1/clusters.py
|
Python
|
apache-2.0
| 3,850 | 0 |
'''
pdbparser.py - Yevheniy Chuba - 6/1/2017
Parse local or external (PDB Database) 3D structure files (.pdb, .cif)
Usage:
parser = PdbParser("2128-1.pdb").pdb_processing()
output: [['L', 'DIQ...'], ['H', 'EVQL...']]
'''
import os
from Bio.PDB import *
from Bio.Seq import Seq
class PdbParser:
'''
PdbParser extracts amino acid sequence from PDB structure,
either downloaded from PDB database or uploaded localy .pdb file
Args:
pdb_struct_name (string): name of the file or PDB database ID
external (bool): True indicates the structure comes from an external
PDB database; default is False (local .pdb file)
Returns:
Extracted amino acid sequence from the PDB file, including chain information
'''
def __init__(self, struct_name, struct_dir='PDB_Struct', external=False):
self.struct_name = struct_name
self.struct_dir = struct_dir
self.external = external
def pdb_processing(self):
"""
Process either uploaded or externally downloaded 3D structure
"""
if self.external:
self.pdb_struct = self.get_external_struct()
else:
self.pdb_struct = self.get_uploaded_struct()
extracted_seq = self.extract_seq_from_structure(self.pdb_struct)
return extracted_seq
def get_external_struct(self):
"""
Create Structure object from externally downloed (PDB Database) structure file (.cif)
"""
self.download_structure()
parser = MMCIFParser()
structure = parser.get_structure('STRUCT_OBJ',
os.path.join(self.struct_dir, self.struct_name) + '.cif')
return structure
def get_uploaded_struct(self):
"""
Create Structure object from locally uploaded structure file (.pdb)
"""
parser = PDBParser()
structure = parser.get_structure('STRUCT_OBJ',
os.path.join(self.struct_dir, self.struct_name))
return structure
def download_structure(self):
"""
Download structure from PDB database based on PDB ID
"""
pdbl = PDBList()
pdbl.retrieve_pdb_file(self.struct_name, pdir=self.struct_dir)
def extract_seq_from_structure(self, struct):
"""
Extract Polypeptides from a Structure Object
"""
ppb = PPBuilder() # Polypeptide builder object
aa_seqs = []
chains = struct.get_chains()
for pp in ppb.build_peptides(struct):
seq = pp.get_sequence()
aa_seqs.append(str(seq))
chain_aa_map = [[chain.id, aa_seqs[index]] for index, chain in enumerate(chains)]
return chain_aa_map
|
yevheniyc/Projects
|
1h_NGL_3D_Viewer/pyscripts/pdbmapper/PdbParser.py
|
Python
|
mit
| 2,840 | 0.002817 |
#!/usr/bin/env python
from ConfigParser import SafeConfigParser, NoSectionError, Error
import argparse
import os
import smtplib
import re
import sys
def main():
argument_parser = argparse.ArgumentParser(prog='mail')
argument_parser.add_argument('subject', help='The subject of the email')
argument_parser.add_argument('to', help='The destination email address')
args = argument_parser.parse_args()
if not re.match(r'.*@.*', args.to):
print args.to, ' is not a vialid email address.'
sys.exit()
path = os.path.split(os.path.realpath(__file__))[0]
config_parser = SafeConfigParser()
try:
config_parser.read(os.path.join(path, 'settings.ini'))
except Error as e:
print 'Error, settings.ini.', e
sys.exit()
try:
settings = dict(config_parser.items('email'))
except NoSectionError as e:
print 'Error settings.ini.', e
sys.exit()
try:
smtp_server = settings['smtp_server']
smtp_port = settings['smtp_port']
user = settings['user']
password = settings['password']
except KeyError as e:
print e
sys.exit()
if sys.stdin.isatty():
msg = raw_input('Please enter your message >')
else:
msg = '<br>'.join(line for line in sys.stdin)
headers = ['From: ' + user,
'Subject: ' + args.subject,
'To: ' + args.to,
'MIME-Version: 1.0',
'Content-Type: text/html']
headers = '\r\n'.join(headers)
session = smtplib.SMTP(smtp_server, smtp_port)
session.ehlo()
session.starttls()
session.ehlo()
session.login(user, password)
try:
session.sendmail(user, args.to, headers + '\r\n\r\n' + msg)
except smtplib.SMTPException as e:
print 'Error.'
for key in e.args[0].keys():
print key
for msg in e.args[0][key]:
print msg
session.quit()
if __name__ == '__main__':
main()
|
cmcdowell/CLI-email
|
emailer.py
|
Python
|
mit
| 2,010 | 0.000498 |
# -*- coding: utf-8 -*-
from openerp.tests import common
from openerp.osv.orm import except_orm
from datetime import datetime
from . import test_util
class TestPlanning(common.TransactionCase):
def setUp(self):
super(TestPlanning, self).setUp()
# Cursor and user initialization
cr, uid = self.cr, self.uid
# Modules to test
# appt = appointment
# sched = schedule
self.space_obj = self.registry('planning.space')
self.service_obj = self.registry('planning.service')
self.employee_obj = self.registry('hr.employee')
self.sched_obj = self.registry('planning.schedule')
self.sched_line_obj = self.registry('planning.schedule.line')
self.client_obj = self.registry('res.partner')
self.pos_session_opening_obj = self.registry('pos.session.opening')
self.appt_obj = self.registry('planning.appointment')
self.pos_order_obj = self.registry('pos.order')
self.pos_order_line_obj = self.registry('pos.order.line')
# Utility module methods
self.create_sched = test_util.create_sched
self.create_sched_line = test_util.create_sched_line
self.create_appt = test_util.create_appt
# Create amount of spaces in range
space_ids = []
for space_num in range(2):
values = {'name': 'Space #' + str(space_num)}
self.space_id = self.space_obj.create(cr, uid, values)
space_ids.append(self.space_id)
# Create amount of services (in range) to be provided
service_ids = []
for service_num in range(2):
values = {'name': 'Service #' + str(service_num),
'service': 1, # Use default service product
'duration': 1 # 1 Hour
}
self.service_id = self.service_obj.create(cr, uid, values)
service_obj = self.service_obj.browse(cr, uid, self.service_id)
# Assign all spaces to the service
for space_id in space_ids:
service_obj.write({'space_ids': [(4, space_id)]})
service_ids.append(self.service_id)
# Create amount of employees (in range) to provide services
employee_ids = []
for employee_num in range(2):
values = {'name': 'Employee #' + str(employee_num)}
self.employee_id = self.employee_obj.create(cr, uid, values)
employee_obj = self.employee_obj.browse(cr, uid, self.employee_id)
# Assign all services to the employee
for service_id in service_ids:
employee_obj.write({'service_ids': [(4, service_id)]})
employee_ids.append(self.employee_id)
# TODO Pre-existing data
values = {'name': 'Break'}
self.client_obj.create(cr, uid, values, context={})
values = {'name': 'Libre',
'time_efficiency': 99}
break_space_id = self.space_obj.create(cr, uid, values)
values = {'name': 'Lunch',
'service': 1, # Use default service product
'duration': 1 # 1 Hour
}
break_service_id = self.service_obj.create(cr, uid, values)
service_obj = self.service_obj.browse(cr, uid, break_service_id)
service_obj.write({'space_ids': [(4, break_space_id)]})
# Create schedule for employee
self.date = '2000-01-01' # Old date chosen to avoid conflict with existing data.
self.sched_id = self.create_sched(cr, uid, self.sched_obj, self.date)
for employee_id in employee_ids:
self.sched__line_id = self.create_sched_line(cr, uid, self.sched_line_obj, self.sched_id, employee_id)
# Create client to provide service to
values = {'name': 'Client #1'}
self.client_id = self.client_obj.create(cr, uid, values, context={})
# Create appointment with a chosen time.
self.start = '2000-01-01 14:30:00' # TODO fix timezone problem (this time is actually 10:30)
self.appt_id = self.create_appt(cr, uid, self.appt_obj,
self.client_id,
self.start,
self.service_id,
context={'start_date': self.start})
# To open pos session (pos.session musn't be open when testing.)
# TODO use receptionist user
uid = 5 # self.uid
self.pos_session_opening_id = self.pos_session_opening_obj.create(cr, uid, {'pos_config_id': 1})
def testAppointmentCancel(self):
"""
Check canceling appointment changes it to proper status,
removes pos.order.line if it exists and doesn't allow modifications.
Also validate that it won't allow pos.order.line unlinking,
if an appointment_id is present.
"""
# TODO use receptionist user
cr, uid = self.cr, 5 # self.uid
# Open POS Session to be able to create pos.orders
self.pos_session_opening_obj.open_session_cb(cr, uid, [self.pos_session_opening_id])
appt_obj = self.appt_obj.browse(cr, uid, self.appt_id)
appt_obj.action_check_in()
# Validate pos.order.line can't be removed if it's related to an appt.
order_line_obj = self.pos_order_line_obj.browse(cr, uid, appt_obj.order_line_id.id)
with self.assertRaises(except_orm) as ex:
order_line_obj.unlink()
appt = self.appt_obj.browse(cr, uid, self.appt_id)
appt.action_cancel()
self.assertEqual(appt.state, 'cancel')
# Validates modifications are not allowed after cancel
with self.assertRaises(except_orm) as ex:
appt.write({'duration': 2})
# Validate pos.order.line is unlinked after appt is cancelled.
order_line_obj = self.pos_order_line_obj.browse(cr, uid, appt.order_line_id.id)
self.assertFalse(order_line_obj.id)
appt = self.appt_obj.browse(cr, uid, self.appt_id)
self.assertFalse(appt.order_line_id.id)
def testAppointmentOverCanceled(self):
"""
Check that you can create and appointment on top of a canceled one,
with the same resources.
"""
cr, uid = self.cr, self.uid
appt_cancel = self.appt_obj.browse(cr, uid, self.appt_id)
appt_cancel.action_cancel()
appt_id = self.create_appt(cr, uid, self.appt_obj,
appt_cancel.client_id.id,
appt_cancel.start,
appt_cancel.service_id.id,
context={'start_date': self.start})
appt = self.appt_obj.browse(cr, uid, appt_id)
self.assertTrue(appt.id)
def testClientAvailability(self):
"""
Check that the same client can't have two appointments
at the same time.
"""
cr, uid = self.cr, self.uid
appt_id = None
with self.assertRaises(except_orm) as ex:
appt_id = self.create_appt(cr, uid, self.appt_obj,
self.client_id,
self.start,
self.service_id,
context={'start_date': self.start})
appt = self.appt_obj.browse(cr, uid, appt_id)
self.assertFalse(appt)
def testEmployeeAvailability(self):
"""
Check that the same employee can't have two appointments
at the same time.
"""
cr, uid = self.cr, self.uid
first_appt = self.appt_obj.browse(cr, uid, self.appt_id)
# Create new client to provide service to
values = {'name': 'Client #2'}
client_id = self.client_obj.create(cr, uid, values, context={})
appt_id = self.create_appt(cr, uid, self.appt_obj,
client_id,
self.start,
self.service_id,
context={'start_date': self.start})
appt_obj = self.appt_obj.browse(cr, uid, appt_id)
with self.assertRaises(except_orm) as ex:
appt_obj.write({'employee_id': first_appt.employee_id.id})
def testSpaceAvailability(self):
"""
Check that the same space can't have two appointments
at the same time.
"""
cr, uid = self.cr, self.uid
first_appt = self.appt_obj.browse(cr, uid, self.appt_id)
# Create new client to provide service to
values = {'name': 'Client #3'}
client_id = self.client_obj.create(cr, uid, values, context={})
appt_id = self.create_appt(cr, uid, self.appt_obj,
client_id,
self.start,
self.service_id,
context={'start_date': self.start})
appt_obj = self.appt_obj.browse(cr, uid, appt_id)
with self.assertRaises(except_orm) as ex:
appt_obj.write({'space_id': first_appt.space_id.id})
def testAppointmentUnlink(self):
"""
Check a normal user can't unlink/delete an appointment.
"""
# TODO use receptionist user
cr, uid = self.cr, 5 # self.uid
appt_obj = self.appt_obj.browse(cr, uid, self.appt_id)
with self.assertRaises(except_orm) as ex:
appt_obj.unlink()
appt_obj = self.appt_obj.browse(cr, uid, self.appt_id)
self.assertTrue(appt_obj.id)
def testAppointmentUnlinkManager(self):
"""
Check a manager user can unlink/delete an appointment.
"""
cr, uid = self.cr, self.uid
appt_obj = self.appt_obj.browse(cr, uid, self.appt_id)
self.assertTrue(appt_obj.unlink())
def testAppointmentDone(self):
"""
Pay the POS order for a checked-in appointment,
and validate that appointment status has changed and can't be modified.
"""
# TODO use receptionist user
cr, uid = self.cr, 5 # self.uid
# Open POS Session to be able to create pos.orders
self.pos_session_opening_obj.open_session_cb(cr, uid, [self.pos_session_opening_id])
appt = self.appt_obj.browse(cr, uid, self.appt_id)
appt.action_check_in()
self.assertEqual(appt.state, 'open')
# Validate order exists
order_obj = self.pos_order_obj.browse(cr, uid, appt.order_line_id.order_id.id)
self.assertTrue(order_obj.id)
# Pay order and validate invoice was created and appt changed to 'done'
order_obj.action_create_invoice()
self.assertEqual(order_obj.state, 'invoiced')
invoice_obj = order_obj.invoice_id
self.assertEqual(invoice_obj.state, 'open')
appt = self.appt_obj.browse(cr, uid, self.appt_id)
self.assertEqual(appt.state, 'done')
# Validate modifications aren't allowed
with self.assertRaises(except_orm) as ex:
appt.write({'duration': 3})
# Only manager users can unlink
cr, uid = self.cr, self.uid
appt = self.appt_obj.browse(cr, uid, self.appt_id)
with self.assertRaises(except_orm) as ex:
appt.unlink()
def testScheduleDuplicate(self):
"""
Check if schedule creation is working and not allowing duplicates.
"""
cr, uid = self.cr, self.uid
sched_obj = self.sched_obj.browse(cr, uid, self.sched_id)
self.assertTrue(sched_obj.id)
with self.assertRaises(except_orm) as ex:
self.create_sched(cr, uid, self.sched_obj, sched_obj.date)
with self.assertRaises(except_orm) as ex:
sched_obj.write({'date': sched_obj.date})
def testScheduleLineAppointment(self):
"""
Validate schedule.line create/update.
Check if schedule.line creates an appointment assigned to it's employee_id,
if it does, don't allow modification (start/end hours or missing)
or unlinking, else allow it.
"""
# Create schedule and schedule.line
cr, uid = self.cr, self.uid
sched_obj = self.sched_obj.browse(cr, uid, self.sched_id)
# Validate creation
# Can't create schedule.line with hour_start less than schedule hour_start.
with self.assertRaises(except_orm) as ex:
self.create_sched_line(cr, uid, self.sched_line_obj,
self.sched_id, self.employee_id, hour_start=8, hour_end=17)
# Can't create schedule.line with hour_end greater than schedule hour_end.
with self.assertRaises(except_orm) as ex:
self.create_sched_line(cr, uid, self.sched_line_obj,
self.sched_id, self.employee_id, hour_start=9, hour_end=22)
# Can't create schedule.line with hour_end less than or equal to hour_start.
with self.assertRaises(except_orm) as ex:
self.create_sched_line(cr, uid, self.sched_line_obj,
self.sched_id, self.employee_id, hour_start=12, hour_end=10)
sched_line_obj = self.sched_line_obj.browse(cr, uid, sched_obj.schedule_line_ids[0].id)
self.assertTrue(sched_line_obj.id)
# Validate if lunch appointment was created.
date = datetime.strptime(sched_obj.date, "%Y-%m-%d")
day_start, day_end = self.appt_obj._day_start_end_time(str(date))
appt_ids = self.appt_obj.search(cr, uid, [('employee_id', '=', sched_line_obj.employee_id.id),
('start', '>=', day_start),
('start', '<=', day_end),
])
self.assertEqual(len(appt_ids), 2) # 1 For the lunch appt, and 1 for the appt in setUp.
appt_obj = self.appt_obj.browse(cr, uid, appt_ids[0])
self.assertEqual(appt_obj.start, '2000-01-01 17:00:00')
# Validate update
# Can't update if hour_start < schedule.hour_start.
with self.assertRaises(except_orm) as ex:
self.sched_line_obj.write(cr, uid, [sched_line_obj.id], {'hour_start': 8})
# Can't update if hour_end > schedule.hour_end.
with self.assertRaises(except_orm) as ex:
self.sched_line_obj.write(cr, uid, [sched_line_obj.id], {'hour_end': 22})
self.sched_line_obj.write(cr, uid, [sched_line_obj.id], {'hour_start': 9})
self.sched_line_obj.write(cr, uid, [sched_line_obj.id], {'hour_end': 19})
sched_line_obj = self.sched_line_obj.browse(cr, uid, sched_line_obj.id)
self.assertEqual(sched_line_obj.hour_start, 9)
self.assertEqual(sched_line_obj.hour_end, 19)
# Can't modifiy if schedule.line starting hour is after appt start.
with self.assertRaises(except_orm) as ex:
sched_line_obj.write({'hour_start': 17.25})
# Can't modify if schedule.line ending hour is before appt end.
with self.assertRaises(except_orm) as ex:
sched_line_obj.write({'hour_end': 17.75})
# Can't modify if schedule.line missing is true and has an appt.
with self.assertRaises(except_orm) as ex:
sched_line_obj.write({'missing': True})
# Can't delete if appt inside schedule period.
with self.assertRaises(except_orm) as ex:
sched_line_obj.unlink()
# Cancel existing lunch appointment
appt_obj.case_cancel()
# Cancel appointment created in setUp (not used in this scenario)
appt_cancel = self.appt_obj.browse(cr, uid, appt_ids[1])
appt_cancel.case_cancel()
# Can't update hour_end to less than or equal to hour_start.
with self.assertRaises(except_orm) as ex:
self.sched_line_obj.write(cr, uid, [sched_line_obj.id], {'hour_end': 9})
# Validate all is allowed after appt is removed/canceled.
sched_line_obj.write({'hour_start': 17.25})
sched_line_obj.write({'hour_end': 17.75})
sched_line_obj = self.sched_line_obj.browse(cr, uid, sched_line_obj.id)
self.assertEqual(sched_line_obj.hour_start, 17.25)
self.assertEqual(sched_line_obj.hour_end, 17.75)
sched_line_obj.unlink()
def testScheduleMissingEmployee(self):
"""
Check if when an employee is marked as missing, it won't allow
creating apppointments with that employee.
"""
# Create schedule and schedule line
cr, uid = self.cr, self.uid
sched_obj = self.sched_obj.browse(cr, uid, self.sched_id)
sched_line_id = self.create_sched_line(cr, uid, self.sched_line_obj,
self.sched_id, self.employee_id, hour_start=9, hour_end=17)
sched_line_obj = self.sched_line_obj.browse(cr, uid, sched_line_id)
# Cancel auto-created lunch break.
date = datetime.strptime(sched_obj.date, "%Y-%m-%d")
day_start, day_end = self.appt_obj._day_start_end_time(str(date))
appt_ids = self.appt_obj.search(cr, uid, [('employee_id', '=', sched_line_obj.employee_id.id),
('start', '>=', day_start),
('start', '<=', day_end),
])
appt_obj = self.appt_obj.browse(cr, uid, appt_ids[0])
appt_obj.case_cancel()
# Cancel appointment created in setUp (not used in this scenario)
appt_cancel = self.appt_obj.browse(cr, uid, appt_ids[1])
appt_cancel.case_cancel()
# Mark employee as missing
sched_line_obj.write({'missing': True})
# Attempt to create appointment
# TODO forsome reasing updating missing to True is not being saved.
# TODO fix timezone problem (this time is actually 09:00)
start = '2000-01-01 13:00:00'
employee_id = sched_line_obj.employee_id.id
#with self.assertRaises(except_orm) as ex:
# self.appt_id = self.create_appt(cr, uid, self.appt_obj,
# self.client_id,
# start,
# self.service_id,
# employee_id=employee_id,
# context={'tz': 'America/Santo_Domingo',
# 'start_date': start})
# Mark employee as not missing
sched_line_obj.write({'missing': False})
# Create appointment
self.appt_id = self.create_appt(cr, uid, self.appt_obj,
self.client_id,
start,
self.service_id,
employee_id=employee_id,
context={'tz': 'America/Santo_Domingo',
'start_date': start})
appt_obj = self.appt_obj.browse(cr, uid, self.appt_id)
self.assertTrue(appt_obj.id)
|
jeacaveo/planning_70
|
tests/test_planning.py
|
Python
|
agpl-3.0
| 19,305 | 0.002176 |
from django.contrib.auth import authenticate,login
from django.contrib import messages
from django.http import HttpResponse, HttpResponseForbidden, HttpResponseRedirect
from django.core.urlresolvers import reverse
from django.shortcuts import render_to_response
from django.template import RequestContext
def user_login(request):
if request.POST:
username = request.POST['username']
password = request.POST['password']
nextURL = request.POST['nextURL']
user = authenticate(username=username, password=password)
if user is not None:
if user.is_active:
login(request, user)
return HttpResponseRedirect(nextURL) # return 200 with the url to redirect
else:
messages.add_message(request, messages.INFO,
'Your account is not active. Please contact an administrator')
return HttpResponseRedirect(reverse("login:user_login") + "?next=" + nextURL)
else:
messages.add_message(request, messages.INFO,
'Incorrect username and/or password.')
return HttpResponseRedirect(reverse("user_login:user_login") + "?next=" + nextURL)
else:
nextURL = request.GET.get("next")
return render_to_response('login.html',
{"nextURL": nextURL},
RequestContext(request))
|
paleocore/paleocore
|
login/views.py
|
Python
|
gpl-2.0
| 1,470 | 0.005442 |
#!/usr/bin/env python
#
# Copyright (C) 2014-2017 Nextworks
# Author: Vincenzo Maffione <v.maffione@nextworks.it>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
# A tool for live demonstrations and regression testing for the IRATI stack
import multiprocessing
import gen_templates
import subprocess
import argparse
import json
import copy
import re
import os
def which(program):
FNULL = open(os.devnull, 'w')
retcode = subprocess.call(['which', program], stdout = FNULL,
stderr = subprocess.STDOUT)
if retcode != 0:
print('Fatal error: Cannot find "%s" program' % program)
quit(1)
def dict_dump_json(file_name, dictionary, env_dict):
dictionary_str = json.dumps(dictionary, indent = 4,
sort_keys = True) % env_dict
fout = open(file_name, 'w')
fout.write(dictionary_str);
fout.close()
def joincat(haystack, needle):
return ' '.join([needle, haystack])
def netem_validate(netem_args):
ret = True
try:
fdevnull = open(os.devnull, 'w')
subprocess.check_call('sudo ip tuntap add mode tap name tapiratiprobe'.split())
subprocess.check_call(('sudo tc qdisc add dev '\
'tapiratiprobe root netem %s'\
% netem_args).split(), stdout=fdevnull,
stderr=fdevnull)
fdevnull.close()
except:
ret = False
subprocess.call('sudo ip tuntap del mode tap name tapiratiprobe'.split())
return ret
description = "Python script to generate IRATI deployments for Virtual Machines"
epilog = "2016 Vincenzo Maffione <v.maffione@nextworks.it>"
argparser = argparse.ArgumentParser(description = description,
epilog = epilog)
argparser.add_argument('-c', '--conf',
help = "gen.conf configuration file", type = str,
default = 'gen.conf')
argparser.add_argument('-g', '--graphviz', action='store_true',
help = "Generate DIF graphs with graphviz")
argparser.add_argument('--legacy', action='store_true',
help = "Use qcow2 image rather than buildroot ramfs")
argparser.add_argument('-m', '--memory',
help = "Amount of memory in megabytes", type = int,
default = '164')
argparser.add_argument('-e', '--enrollment-strategy',
help = "Minimal uses a spanning tree of each DIF",
type = str, choices = ['minimal', 'full-mesh', 'manual'],
default = 'minimal')
argparser.add_argument('--ring',
help = "Use ring topology with variable number of nodes",
type = int)
argparser.add_argument('--kernel',
help = "custom kernel buildroot image", type = str,
default = 'buildroot/bzImage')
argparser.add_argument('--initramfs',
help = "custom initramfs buildroot image", type = str,
default = 'buildroot/rootfs.cpio')
argparser.add_argument('-f', '--frontend',
help = "Choose which emulated NIC the nodes will use",
type = str, choices = ['virtio-net-pci', 'e1000'],
default = 'virtio-net-pci')
argparser.add_argument('--vhost', action='store_true',
help = "Use vhost acceleration for virtio-net frontend")
argparser.add_argument('--manager', action='store_true',
help = "Add support for NMS manager and dedicated LAN")
argparser.add_argument('--manager-kernel',
help = "custom kernel buildroot image for the manager",
type = str, default = 'buildroot/bzImage')
argparser.add_argument('--manager-initramfs',
help = "custom initramfs buildroot image for the manager",
type = str, default = 'buildroot/rootfs.cpio')
argparser.add_argument('--overlay',
help = "Overlay the specified directory in the generated image",
type = str)
argparser.add_argument('--loglevel',
help = "Set verbosity level",
choices = ['DBG', 'INFO', 'NOTE', 'WARN', 'ERR', 'CRIT', 'ALERT', 'EMERG'],
default = 'DBG')
args = argparser.parse_args()
which('brctl')
which('qemu-system-x86_64')
subprocess.call(['chmod', '0400', 'buildroot/irati_rsa'])
if args.overlay:
args.overlay = os.path.abspath(args.overlay)
if not os.path.isdir(args.overlay):
args.overlay = None
if args.legacy:
sshopts = ''
sudo = 'sudo'
else:
sshopts = '-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null '\
'-o IdentityFile=buildroot/irati_rsa'
sudo = ''
if args.legacy:
######################## Compile mac2ifname program ########################
try:
subprocess.call(['cc', '-Wall', '-o', 'mac2ifname', 'mac2ifname.c'])
except:
print('Cannot find a C compiler to compile mac2ifname program')
quit(1)
env_dict = {}
keywords = ['vmimgpath', 'installpath', 'username', 'baseport']
############################## Parse gen.env ###############################
fin = open('gen.env', 'r')
while 1:
line = fin.readline()
if line == '':
break
m = re.match(r'(\S+)\s*=\s*(\S+)', line)
if m == None:
continue
key = m.group(1)
value = m.group(2)
if key not in keywords:
print('Unrecognized keyword %s' % (key))
continue
env_dict[key] = value
fin.close()
for key in keywords:
if key not in env_dict:
print("Configuration variables missing")
quit(1)
env_dict['baseport'] = int(env_dict['baseport'])
env_dict['varpath'] = env_dict['installpath']
if not args.legacy:
# overwrite vmimgpath, installpath, varpath, username
env_dict['vmimgpath'] = args.initramfs
env_dict['installpath'] = '/usr'
env_dict['varpath'] = ''
env_dict['username'] = 'root'
# Possibly autogenerate ring topology
if args.ring != None and args.ring > 0:
print("Ignoring %s, generating ring topology" % (args.conf,))
fout = open('ring.conf', 'w')
for i in range(args.ring):
i_next = i + 1
if i_next == args.ring:
i_next = 0
fout.write('eth %(vlan)s 0Mbps m%(i)s m%(inext)s\n' % \
{'i': i+1, 'inext': i_next+1, 'vlan': i+1+100})
for i in range(args.ring):
i_prev = i - 1
if i_prev < 0:
i_prev = args.ring - 1
fout.write('dif n m%(i)s %(vlan)s %(vprev)s\n' % \
{'i': i+1, 'vlan': i+1+100, 'vprev': i_prev+1+100})
fout.close()
args.conf = 'ring.conf'
# Some constants related to the RINA management
injected_lines = []
mgmt_shim_dif_name = '3456'
mgmt_dif_name = 'NMS'
mgmt_node_name = 'mgr'
if not os.path.exists(args.conf):
print("Error: %s not found" % args.conf)
quit()
# Try to check that gen.conf is ASCII
try:
o = subprocess.check_output(['file', args.conf])
o = str(o).upper()
if o.find('ASCII') == -1:
print("Error: %s is not ASCII encoded" % args.conf)
quit()
except Exception as e:
print(e)
pass
############################# Parse gen.conf ##############################
fin = open(args.conf, 'r')
vms = dict()
shims = dict()
links = []
difs = dict()
enrollments = dict()
dif_policies = dict()
dif_graphs = dict()
app_mappings = []
overlays = dict()
netems = dict()
manual_enrollments = dict()
linecnt = 0
conf_injection = True
while 1:
try:
line = fin.readline()
except UnicodeDecodeError:
print("Error: demo.conf must be ASCII encoded")
quit()
if line == '':
# EOF, try to pick from injected lines
if len(injected_lines) > 0:
line = injected_lines.pop(0)
if line == '':
if not conf_injection:
# Injection already done, let's stop now
break
# Inject new lines and continue
conf_injection = False
if args.manager:
vm_list = [vmname for vmname in sorted(vms)]
vm_list.append(mgmt_node_name) # a VM for the manager
injected_lines.append('eth %s 0Mbps %s' % (mgmt_shim_dif_name, ' '.join(vm_list)))
for vmname in vm_list:
injected_lines.append('dif %s %s %s' % (mgmt_dif_name, vmname, mgmt_shim_dif_name))
continue
linecnt += 1
line = line.replace('\n', '')
if line.startswith('#'):
continue
if re.match(r'\s*$', line):
continue
m = re.match(r'\s*eth\s+(\d+)\s+(\d+)([GMK])bps\s+(\w.*)$', line)
if m:
vlan = m.group(1)
speed = int(m.group(2))
speed_unit = m.group(3).lower()
vm_list = m.group(4).split()
if vlan in shims:
print('Error: Line %d: shim %s already defined' \
% (linecnt, vlan))
continue
shims[vlan] = {'bridge': 'rbr' + vlan, 'vlan': vlan, 'speed': speed,
'speed_unit': speed_unit}
for vm in vm_list:
if vm not in vms:
vms[vm] = {'name': vm, 'ports': []}
links.append((vlan, vm))
#for i in range(len(vm_list)-1):
# for j in range(i + 1, len(vm_list)):
# print(vm_list[i], vm_list[j])
continue
m = re.match(r'\s*dif\s+([\w-]+)\s+([\w-]+)\s+([\w-].*)$', line)
if m:
dif = m.group(1)
vm = m.group(2)
dif_list = m.group(3).split()
if vm not in vms:
vms[vm] = {'name': vm, 'ports': []}
if dif not in difs:
difs[dif] = dict()
if vm in difs[dif]:
print('Error: Line %d: vm %s in dif %s already specified' \
% (linecnt, vm, dif))
continue
difs[dif][vm] = dif_list
continue
m = re.match(r'\s*policy\s+([\w-]+)\s+(\*|(?:(?:[\w-]+,)*[\w-]+))\s+([*\w.-]+)\s+([\w-]+)((?:\s+[\w.-]+\s*=\s*[/\w.-]+)*)\s*$', line)
if m:
dif = m.group(1)
nodes = m.group(2)
path = m.group(3)
ps = m.group(4)
parms = list()
if m.group(5) != None:
parms_str = m.group(5).strip()
if parms_str != '':
parms = parms_str.split(' ')
if dif not in dif_policies:
dif_policies[dif] = []
if nodes == '*':
nodes = []
else:
nodes = nodes.split(',')
dif_policies[dif].append({'path': path, 'nodes': nodes,
'ps': ps, 'parms' : parms})
if not gen_templates.policy_path_valid(path):
print('Unknown component path "%s"' % path)
quit(1)
continue
m = re.match(r'\s*appmap\s+([\w-]+)\s+([\w.]+)\s+(\d+)\s*$', line)
if m:
dif = m.group(1)
apname = m.group(2)
apinst = m.group(3)
app_mappings.append({'name': '%s-%s--' % (apname, apinst), 'dif' : dif})
continue
m = re.match(r'\s*overlay\s+([\w-]+)\s+([\w.-/]+\s*$)', line)
if m:
vmname = m.group(1)
opath = m.group(2)
opath = os.path.abspath(opath)
if not os.path.isdir(opath):
print("Error: line %d: no such overlay path" % linecnt)
continue
overlays[vmname] = opath
continue
m = re.match(r'\s*netem\s+(\d+)\s+([\w-]+)\s+(\w.*)$', line)
if m:
dif = m.group(1)
vmname = m.group(2)
netem_args = m.group(3)
if dif not in netems:
netems[dif] = dict()
netems[dif][vmname] = {'args': netem_args, 'linecnt': linecnt}
continue
m = re.match(r'\s*enroll\s+([\w.-]+)\s+([\w.-]+)\s+([\w.-]+)\s+([\w.-]+)\s*$', line)
if m:
if args.enrollment_strategy != 'manual':
print('Warning: ignoring enroll directive at line %d' % linecnt)
continue
dif_name = m.group(1)
enrollee = m.group(2)
enroller = m.group(3)
n_1_dif = m.group(4)
if dif_name not in manual_enrollments:
manual_enrollments[dif_name] = []
manual_enrollments[dif_name].append({
'enrollee': enrollee,
'enroller': enroller,
'lower_dif': n_1_dif,
'linecnt': linecnt})
continue
print("Error: line %d not recognized" % linecnt)
quit()
fin.close()
for dif in difs:
if dif not in dif_policies:
dif_policies[dif] = []
boot_batch_size = max(1, multiprocessing.cpu_count() / 2)
wait_for_boot = 12 # in seconds
if len(vms) > 8:
print("You want to run a lot of nodes, so it's better if I give "
"each node some time to boot (since the boot is CPU-intensive)")
############ Compute registration/enrollment order for DIFs ###############
# Compute DIFs dependency graph, as both adjacency and incidence list.
difsdeps_adj = dict()
difsdeps_inc = dict()
for dif in difs:
difsdeps_inc[dif] = set()
difsdeps_adj[dif] = set()
for shim in shims:
difsdeps_inc[shim] = set()
difsdeps_adj[shim] = set()
for dif in difs:
for vmname in difs[dif]:
for lower_dif in difs[dif][vmname]:
difsdeps_inc[dif].add(lower_dif)
difsdeps_adj[lower_dif].add(dif)
# Kahn's algorithm below only needs per-node count of
# incident edges, so we compute these counts from the
# incidence list and drop the latter.
difsdeps_inc_cnt = dict()
for dif in difsdeps_inc:
difsdeps_inc_cnt[dif] = len(difsdeps_inc[dif])
del difsdeps_inc
#print(difsdeps_adj)
#print(difsdeps_inc_cnt)
# Run Kahn's algorithm to compute topological ordering on the DIFs graph.
frontier = set()
dif_ordering = []
for dif in difsdeps_inc_cnt:
if difsdeps_inc_cnt[dif] == 0:
frontier.add(dif)
while len(frontier):
cur = frontier.pop()
dif_ordering.append(cur)
for nxt in difsdeps_adj[cur]:
difsdeps_inc_cnt[nxt] -= 1
if difsdeps_inc_cnt[nxt] == 0:
frontier.add(nxt)
difsdeps_adj[cur] = set()
circular_set = [dif for dif in difsdeps_inc_cnt if difsdeps_inc_cnt[dif] != 0]
if len(circular_set):
print("Fatal error: The specified DIFs topology has one or more"\
"circular dependencies, involving the following"\
" DIFs: %s" % circular_set)
print(" DIFs dependency graph: %s" % difsdeps_adj);
quit(1)
####################### Compute DIF graphs #######################
for dif in difs:
neighsets = dict()
dif_graphs[dif] = dict()
first = None
# For each N-1-DIF supporting this DIF, compute the set of nodes that
# share such N-1-DIF. This set will be called the 'neighset' of
# the N-1-DIF for the current DIF.
for vmname in difs[dif]:
dif_graphs[dif][vmname] = [] # init for later use
if first == None: # pick any node for later use
first = vmname
for lower_dif in difs[dif][vmname]:
if lower_dif not in neighsets:
neighsets[lower_dif] = []
neighsets[lower_dif].append(vmname)
# Build the graph, represented as adjacency list
for lower_dif in neighsets:
# Each neighset corresponds to a complete (sub)graph.
for vm1 in neighsets[lower_dif]:
for vm2 in neighsets[lower_dif]:
if vm1 != vm2:
dif_graphs[dif][vm1].append((vm2, lower_dif))
enrollments[dif] = []
if args.manager and dif == mgmt_dif_name:
# Enrollment in the NMS DIF is managed as a special case:
# each node is enrolled against the manager node
for vmname in vms:
if vmname != mgmt_node_name:
enrollments[dif].append({'enrollee': vmname,
'enroller': mgmt_node_name,
'lower_dif': mgmt_shim_dif_name})
elif args.enrollment_strategy == 'minimal':
# To generate the list of enrollments, we simulate one,
# using breadth-first trasversal.
enrolled = set([first])
frontier = set([first])
while len(frontier):
cur = frontier.pop()
for edge in dif_graphs[dif][cur]:
if edge[0] not in enrolled:
enrolled.add(edge[0])
enrollments[dif].append({'enrollee': edge[0],
'enroller': cur,
'lower_dif': edge[1]})
frontier.add(edge[0])
elif args.enrollment_strategy == 'full-mesh':
for cur in dif_graphs[dif]:
for edge in dif_graphs[dif][cur]:
if cur < edge[0]:
enrollments[dif].append({'enrollee': cur,
'enroller': edge[0],
'lower_dif': edge[1]})
elif args.enrollment_strategy == 'manual':
if dif not in manual_enrollments:
continue
for e in manual_enrollments[dif]:
if e['enrollee'] not in difs[dif]:
print('Warning: ignoring line %d because VM %s does '\
'not belong to DIF %s' % (e['linecnt'],
e['enrollee'], dif))
continue
if e['enroller'] not in difs[dif]:
print('Warning: ignoring line %d because VM %s does '\
'not belong to DIF %s' % (e['linecnt'],
e['enroller'], dif))
continue
if e['lower_dif'] not in neighsets or \
e['enrollee'] not in neighsets[e['lower_dif']]:
print('Warning: ignoring line %d because VM %s cannot '\
'use N-1-DIF %s' % (e['linecnt'], e['enrollee'],
e['lower_dif']))
continue
if e['lower_dif'] not in neighsets or \
e['enroller'] not in neighsets[e['lower_dif']]:
print('Warning: ignoring line %d because VM %s cannot '\
'use N-1-DIF %s' % (e['linecnt'], e['enroller'],
e['lower_dif']))
continue
enrollments[dif].append(e)
else:
# This is a bug
assert(False)
#print(neighsets)
#print(dif_graphs[dif])
for shim in shims:
enrollments[shim] = dict()
###################### Generate UP script ########################
fout = open('up.sh', 'w')
outs = '#!/bin/bash\n' \
'\n' \
'set -x\n' \
'\n';
for shim in sorted(shims):
outs += 'sudo brctl addbr %(br)s\n' \
'sudo ip link set %(br)s up\n' \
'\n' % {'br': shims[shim]['bridge']}
for l in sorted(links):
shim, vm = l
b = shims[shim]['bridge']
idx = len(vms[vm]['ports']) + 1
tap = '%s.%02x' % (vm, idx)
outs += 'sudo ip tuntap add mode tap name %(tap)s\n' \
'sudo ip link set %(tap)s up\n' \
'sudo brctl addif %(br)s %(tap)s\n\n' \
% {'tap': tap, 'br': b}
if shims[shim]['speed'] > 0:
speed = '%d%sbit' % (shims[shim]['speed'], shims[shim]['speed_unit'])
# Rate limit the traffic transmitted on the TAP interface
outs += 'sudo tc qdisc add dev %(tap)s handle 1: root ' \
'htb default 11\n' \
'sudo tc class add dev %(tap)s parent 1: classid ' \
'1:1 htb rate 10gbit\n' \
'sudo tc class add dev %(tap)s parent 1:1 classid ' \
'1:11 htb rate %(speed)s\n' \
% {'tap': tap, 'speed': speed}
if shim in netems:
if vm in netems[shim]:
if not netem_validate(netems[shim][vm]['args']):
print('Warning: line %(linecnt)s is invalid and '\
'will be ignored' % netems[shim][vm])
continue
outs += 'sudo tc qdisc add dev %(tap)s root netem '\
'%(args)s\n'\
% {'tap': tap, 'args': netems[shim][vm]['args']}
vms[vm]['ports'].append({'tap': tap, 'br': b, 'idx': idx,
'vlan': shim})
vmid = 1
budget = boot_batch_size
for vmname in sorted(vms):
vm = vms[vmname]
vm['id'] = vmid
fwdp = env_dict['baseport'] + vmid
mac = '00:0a:0a:0a:%02x:%02x' % (vmid, 99)
vm['ssh'] = fwdp
vars_dict = {'fwdp': fwdp, 'id': vmid, 'mac': mac,
'vmimgpath': env_dict['vmimgpath'],
'memory': args.memory, 'kernel': args.kernel,
'frontend': args.frontend, 'vmname': vmname}
if vmname == mgmt_node_name:
vars_dict['vmimgpath'] = args.manager_initramfs
vars_dict['kernel'] = args.manager_kernel
outs += 'qemu-system-x86_64 '
if not args.legacy:
outs += '-kernel %(kernel)s ' \
'-append "console=ttyS0" ' \
'-initrd %(vmimgpath)s ' \
% vars_dict
else:
outs += '"%(vmimgpath)s" ' \
'-snapshot ' % vars_dict
outs += '-display none ' \
'--enable-kvm ' \
'-smp 2 ' \
'-m %(memory)sM ' \
'-device %(frontend)s,mac=%(mac)s,netdev=mgmt ' \
'-netdev user,id=mgmt,hostfwd=tcp::%(fwdp)s-:22 ' \
'-vga std ' \
'-pidfile rina-%(id)s.pid ' \
'-serial file:%(vmname)s.log ' \
% vars_dict
del vars_dict
for port in vm['ports']:
tap = port['tap']
mac = '00:0a:0a:0a:%02x:%02x' % (vmid, port['idx'])
port['mac'] = mac
outs += '' \
'-device %(frontend)s,mac=%(mac)s,netdev=data%(idx)s ' \
'-netdev tap,ifname=%(tap)s,id=data%(idx)s,script=no,downscript=no'\
'%(vhost)s '\
% {'mac': mac, 'tap': tap, 'idx': port['idx'],
'frontend': args.frontend,
'vhost': ',vhost=on' if args.vhost else ''}
outs += '&\n\n'
budget -= 1
if budget <= 0:
outs += 'sleep %s\n' % wait_for_boot
budget = boot_batch_size
vmid += 1
for vmname in sorted(vms):
vm = vms[vmname]
gen_files_conf = 'shimeth.%(name)s.*.dif da.map %(name)s.ipcm.conf' % {'name': vmname}
if any(vmname in difs[difname] for difname in difs):
gen_files_conf = joincat(gen_files_conf, 'normal.%(name)s.*.dif' % {'name': vmname})
gen_files_bin = 'enroll.py'
overlay = ''
per_vm_overlay = ''
if args.legacy:
gen_files_bin = joincat(gen_files_bin, 'mac2ifname')
if args.overlay:
overlay = args.overlay
if vmname in overlays:
per_vm_overlay = overlays[vmname]
ipcm_components = ['scripting', 'console']
if args.manager:
ipcm_components.append('mad')
ipcm_components = ', '.join(ipcm_components)
gen_files = ' '.join([gen_files_conf, gen_files_bin, overlay, per_vm_overlay])
outs += ''\
'DONE=255\n'\
'while [ $DONE != "0" ]; do\n'\
' scp %(sshopts)s -r -P %(ssh)s %(genfiles)s %(username)s@localhost: \n'\
' DONE=$?\n'\
' if [ $DONE != "0" ]; then\n'\
' sleep 1\n'\
' fi\n'\
'done\n\n'\
'ssh %(sshopts)s -p %(ssh)s %(username)s@localhost << \'ENDSSH\'\n'\
'set -x\n'\
'SUDO=%(sudo)s\n'\
'$SUDO hostname %(name)s\n'\
'$SUDO modprobe rina-irati-core\n'\
'$SUDO chmod a+rw /dev/irati\n'\
'\n'\
'$SUDO mv %(genfilesconf)s /etc\n'\
'$SUDO mv %(genfilesbin)s /usr/bin\n'\
'\n' % {'name': vm['name'], 'ssh': vm['ssh'], 'id': vm['id'],
'username': env_dict['username'],
'genfiles': gen_files, 'genfilesconf': gen_files_conf,
'genfilesbin': gen_files_bin, 'vmname': vm['name'],
'sshopts': sshopts, 'sudo': sudo}
for ov in [overlay, per_vm_overlay]:
if ov != '':
outs += '$SUDO cp -r %(ov)s/* /\n'\
'$SUDO rm -rf %(ov)s\n'\
% {'ov': os.path.basename(ov)}
for port in vm['ports']:
outs += 'PORT=$(mac2ifname %(mac)s)\n'\
'$SUDO ip link set $PORT up\n'\
'$SUDO ip link add link $PORT name $PORT.%(vlan)s type vlan id %(vlan)s\n'\
'$SUDO ip link set $PORT.%(vlan)s up\n'\
'$SUDO sed -i "s|ifc%(idx)s|$PORT|g" /etc/shimeth.%(vmname)s.%(vlan)s.dif\n'\
% {'mac': port['mac'], 'idx': port['idx'],
'id': vm['id'], 'vlan': port['vlan'],
'vmname': vm['name']}
outs += '$SUDO modprobe shim-eth-vlan\n'\
'$SUDO modprobe normal-ipcp\n'
outs += '$SUDO modprobe rina-default-plugin\n'\
'$SUDO %(installpath)s/bin/ipcm -a \"%(ipcmcomps)s\" '\
'-c /etc/%(vmname)s.ipcm.conf -l %(verb)s &> log &\n'\
'sleep 1\n'\
'true\n'\
'ENDSSH\n' % {'installpath': env_dict['installpath'],
'vmname': vm['name'], 'verb': args.loglevel,
'ipcmcomps': ipcm_components}
# Run the enrollment operations in an order which respect the dependencies
for dif in dif_ordering:
for enrollment in enrollments[dif]:
vm = vms[enrollment['enrollee']]
print('I am going to enroll %s to DIF %s against neighbor %s, through '\
'lower DIF %s' % (enrollment['enrollee'], dif,
enrollment['enroller'],
enrollment['lower_dif']))
outs += 'sleep 2\n' # important!!
outs += ''\
'DONE=255\n'\
'while [ $DONE != "0" ]; do\n'\
' ssh %(sshopts)s -p %(ssh)s %(username)s@localhost << \'ENDSSH\'\n'\
'set -x\n'\
'SUDO=%(sudo)s\n'\
'$SUDO enroll.py --lower-dif %(ldif)s --dif %(dif)s '\
'--ipcm-conf /etc/%(vmname)s.ipcm.conf '\
'--enrollee-name %(vmname)s.%(dif)s '\
'--enroller-name %(enroller)s.%(dif)s\n'\
'sleep 1\n'\
'true\n'\
'ENDSSH\n'\
' DONE=$?\n'\
' if [ $DONE != "0" ]; then\n'\
' sleep 1\n'\
' fi\n'\
'done\n\n' % {'ssh': vm['ssh'], 'id': vm['id'],
'pvid': vms[enrollment['enroller']]['id'],
'username': env_dict['username'],
'vmname': vm['name'],
'enroller': enrollment['enroller'],
'dif': dif, 'ldif': enrollment['lower_dif'],
'sshopts': sshopts, 'sudo': sudo}
fout.write(outs)
fout.close()
subprocess.call(['chmod', '+x', 'up.sh'])
###################### Generate DOWN script ########################
fout = open('down.sh', 'w')
outs = '#!/bin/bash\n' \
'\n' \
'set -x\n' \
'\n' \
'kill_qemu() {\n' \
' PIDFILE=$1\n' \
' PID=$(cat $PIDFILE)\n' \
' if [ -n $PID ]; then\n' \
' kill $PID\n' \
' while [ -n "$(ps -p $PID -o comm=)" ]; do\n' \
' sleep 1\n' \
' done\n' \
' fi\n' \
'\n' \
' rm $PIDFILE\n' \
'}\n\n'
for vmname in sorted(vms):
vm = vms[vmname]
outs += 'kill_qemu rina-%(id)s.pid\n' % {'id': vm['id']}
outs += '\n'
for vmname in sorted(vms):
vm = vms[vmname]
for port in vm['ports']:
tap = port['tap']
b = port['br']
outs += 'sudo brctl delif %(br)s %(tap)s\n' \
'sudo ip link set %(tap)s down\n' \
'sudo ip tuntap del mode tap name %(tap)s\n\n' \
% {'tap': tap, 'br': b}
for shim in sorted(shims):
outs += 'sudo ip link set %(br)s down\n' \
'sudo brctl delbr %(br)s\n' \
'\n' % {'br': shims[shim]['bridge']}
fout.write(outs)
fout.close()
subprocess.call(['chmod', '+x', 'down.sh'])
################## Generate IPCM/DIF configuration files ##################
ipcmconfs = dict()
# If some app directives were specified, use those to build da.map.
# Otherwise, assume the standard applications are to be mapped in
# the DIF with the highest rank.
if len(app_mappings) == 0:
if len(dif_ordering) > 0:
for adm in gen_templates.da_map_base["applicationToDIFMappings"]:
adm["difName"] = "%s" % (dif_ordering[-1],)
else:
gen_templates.da_map_base["applicationToDIFMappings"] = []
for apm in app_mappings:
gen_templates.da_map_base["applicationToDIFMappings"].append({
"encodedAppName": apm['name'],
"difName": "%s" % (apm['dif'])
})
if args.manager:
# Add MAD/Manager configuration
gen_templates.ipcmconf_base["addons"] = {
"mad": {
"managerConnections" : [ {
"managerAppName" : "manager-1--",
"DIF": "%s" % (mgmt_dif_name)
}
]
}
}
for vmname in vms:
ipcmconfs[vmname] = copy.deepcopy(gen_templates.ipcmconf_base)
difconfs = dict()
for dif in difs:
difconfs[dif] = dict()
for vmname in difs[dif]:
difconfs[dif][vmname] = copy.deepcopy(gen_templates.normal_dif_base)
for vmname in sorted(vms):
vm = vms[vmname]
ipcmconf = ipcmconfs[vmname]
for port in vm['ports']:
ipcmconf["ipcProcessesToCreate"].append({
"difName": port['vlan']
})
template_file_name = 'shimeth.%s.%s.dif' % (vm['name'], port['vlan'])
ipcmconf["difConfigurations"].append({
"name": port['vlan'],
"template": template_file_name
})
fout = open(template_file_name, 'w')
fout.write(json.dumps({"difType": "shim-eth-vlan",
"configParameters": {
"interface-name": "ifc%d" % (port['idx'],)
}
},
indent=4, sort_keys=True))
fout.close()
# Run over dif_ordering array, to make sure each IPCM config has
# the correct ordering for the ipcProcessesToCreate list of operations.
# If we iterated over the difs map, the order would be randomic, and so
# some IPCP registrations in lower DIFs may fail. This would happen because
# at the moment of registration, it may be that the IPCP of the lower DIF
# has not been created yet.
for dif in dif_ordering:
if dif in shims:
# Shims are managed separately, in the previous loop
continue
for vmname in difs[dif]:
vm = vms[vmname]
ipcmconf = ipcmconfs[vmname]
normal_ipcp = { "difName": "%s" % (dif,) }
normal_ipcp["difsToRegisterAt"] = []
for lower_dif in difs[dif][vmname]:
if lower_dif not in shims:
lower_dif = lower_dif
normal_ipcp["difsToRegisterAt"].append(lower_dif)
ipcmconf["ipcProcessesToCreate"].append(normal_ipcp)
ipcmconf["difConfigurations"].append({
"name": "%s" % (dif),
"template": "normal.%s.%s.dif" % (vmname, dif)
})
# Fill in the map of IPCP addresses. This could be moved at difconfs
# deepcopy-time
for ovm in difs[dif]:
difconfs[dif][ovm]["knownIPCProcessAddresses"].append({
"apName": "%s.%s" % (vmname, dif),
"apInstance": "1",
"address": 16 + vm['id']
})
for policy in dif_policies[dif]:
if policy['nodes'] == [] or vmname in policy['nodes']:
gen_templates.translate_policy(difconfs[dif][vmname], policy['path'],
policy['ps'], policy['parms'])
# Dump the DIF Allocator map
dict_dump_json('da.map', gen_templates.da_map_base, env_dict)
for vmname in vms:
# Dump the IPCM configuration files
env_dict['sysname'] = '%s' %(vmname)
dict_dump_json('%s.ipcm.conf' % (vmname), ipcmconfs[vmname], env_dict)
for dif in difs:
for vmname in difs[dif]:
# Dump the normal DIF configuration files
dict_dump_json('normal.%s.%s.dif' % (vmname, dif,),
difconfs[dif][vmname], env_dict)
# Dump the mapping from nodes to SSH ports
fout = open('gen.map', 'w')
for vmname in sorted(vms):
fout.write('%s %d\n' % (vmname, env_dict['baseport'] + vms[vmname]['id']))
fout.close()
if args.graphviz:
try:
import pydot
colors = ['red', 'green', 'blue', 'orange', 'yellow']
fcolors = ['black', 'black', 'white', 'black', 'black']
gvizg = pydot.Dot(graph_type = 'graph')
i = 0
for dif in difs:
for vmname in dif_graphs[dif]:
node = pydot.Node(dif + vmname,
label = "%s(%s)" % (vmname, dif),
style = "filled", fillcolor = colors[i],
fontcolor = fcolors[i])
gvizg.add_node(node)
for vmname in dif_graphs[dif]:
for (neigh, lower_dif) in dif_graphs[dif][vmname]:
if vmname > neigh:
# Use lexicographical filter to avoid duplicate edges
continue
color = 'black'
# If enrollment is going to happen on this edge, color
# it in red
for enrollment in enrollments[dif]:
ee = enrollment['enrollee']
er = enrollment['enroller']
lo = enrollment['lower_dif']
if lo.endswith(".DIF"):
lo = lo[:-4]
if lower_dif == lo and \
((vmname == ee and neigh == er) or \
(vmname == er and neigh == ee)):
color = 'red'
break
edge = pydot.Edge(dif + vmname, dif + neigh,
label = lower_dif, color = color)
gvizg.add_edge(edge)
i += 1
if i == len(colors):
i = 0
gvizg.write_png('difs.png')
except:
print("Warning: pydot module not installed, cannot produce DIF "\
"graphs images")
|
edugrasa/demonstrator
|
gen.py
|
Python
|
gpl-2.0
| 37,281 | 0.005526 |
"""Test config flow."""
from unittest.mock import Mock, patch
from homeassistant.components.hassio.handler import HassioAPIError
from homeassistant.const import EVENT_HOMEASSISTANT_START
from homeassistant.setup import async_setup_component
from tests.common import mock_coro
async def test_hassio_discovery_startup(hass, aioclient_mock, hassio_client):
"""Test startup and discovery after event."""
aioclient_mock.get(
"http://127.0.0.1/discovery",
json={
"result": "ok",
"data": {
"discovery": [
{
"service": "mqtt",
"uuid": "test",
"addon": "mosquitto",
"config": {
"broker": "mock-broker",
"port": 1883,
"username": "mock-user",
"password": "mock-pass",
"protocol": "3.1.1",
},
}
]
},
},
)
aioclient_mock.get(
"http://127.0.0.1/addons/mosquitto/info",
json={"result": "ok", "data": {"name": "Mosquitto Test"}},
)
assert aioclient_mock.call_count == 0
with patch(
"homeassistant.components.mqtt." "config_flow.FlowHandler.async_step_hassio",
Mock(return_value=mock_coro({"type": "abort"})),
) as mock_mqtt:
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
await hass.async_block_till_done()
assert aioclient_mock.call_count == 2
assert mock_mqtt.called
mock_mqtt.assert_called_with(
{
"broker": "mock-broker",
"port": 1883,
"username": "mock-user",
"password": "mock-pass",
"protocol": "3.1.1",
"addon": "Mosquitto Test",
}
)
async def test_hassio_discovery_startup_done(hass, aioclient_mock, hassio_client):
"""Test startup and discovery with hass discovery."""
aioclient_mock.get(
"http://127.0.0.1/discovery",
json={
"result": "ok",
"data": {
"discovery": [
{
"service": "mqtt",
"uuid": "test",
"addon": "mosquitto",
"config": {
"broker": "mock-broker",
"port": 1883,
"username": "mock-user",
"password": "mock-pass",
"protocol": "3.1.1",
},
}
]
},
},
)
aioclient_mock.get(
"http://127.0.0.1/addons/mosquitto/info",
json={"result": "ok", "data": {"name": "Mosquitto Test"}},
)
with patch(
"homeassistant.components.hassio.HassIO.update_hass_api",
Mock(return_value=mock_coro({"result": "ok"})),
), patch(
"homeassistant.components.hassio.HassIO." "get_homeassistant_info",
Mock(side_effect=HassioAPIError()),
), patch(
"homeassistant.components.mqtt." "config_flow.FlowHandler.async_step_hassio",
Mock(return_value=mock_coro({"type": "abort"})),
) as mock_mqtt:
await hass.async_start()
await async_setup_component(hass, "hassio", {})
await hass.async_block_till_done()
assert aioclient_mock.call_count == 2
assert mock_mqtt.called
mock_mqtt.assert_called_with(
{
"broker": "mock-broker",
"port": 1883,
"username": "mock-user",
"password": "mock-pass",
"protocol": "3.1.1",
"addon": "Mosquitto Test",
}
)
async def test_hassio_discovery_webhook(hass, aioclient_mock, hassio_client):
"""Test discovery webhook."""
aioclient_mock.get(
"http://127.0.0.1/discovery/testuuid",
json={
"result": "ok",
"data": {
"service": "mqtt",
"uuid": "test",
"addon": "mosquitto",
"config": {
"broker": "mock-broker",
"port": 1883,
"username": "mock-user",
"password": "mock-pass",
"protocol": "3.1.1",
},
},
},
)
aioclient_mock.get(
"http://127.0.0.1/addons/mosquitto/info",
json={"result": "ok", "data": {"name": "Mosquitto Test"}},
)
with patch(
"homeassistant.components.mqtt." "config_flow.FlowHandler.async_step_hassio",
Mock(return_value=mock_coro({"type": "abort"})),
) as mock_mqtt:
resp = await hassio_client.post(
"/api/hassio_push/discovery/testuuid",
json={"addon": "mosquitto", "service": "mqtt", "uuid": "testuuid"},
)
await hass.async_block_till_done()
assert resp.status == 200
assert aioclient_mock.call_count == 2
assert mock_mqtt.called
mock_mqtt.assert_called_with(
{
"broker": "mock-broker",
"port": 1883,
"username": "mock-user",
"password": "mock-pass",
"protocol": "3.1.1",
"addon": "Mosquitto Test",
}
)
|
leppa/home-assistant
|
tests/components/hassio/test_discovery.py
|
Python
|
apache-2.0
| 5,530 | 0.000723 |
import posixpath
class UrlPackage:
""" Represents a package specified as a Url """
def __init__(self, url):
""" Initialize with the url """
if ':' in url:
self.url = url
else:
self.url = posixpath.join('git+git://github.com', url)
@property
def installAs(self):
""" Return the string to use to Install the package via pip """
return self.url
def forRequirements(self, versions):
""" Return the text to use for adding to the Requirements file """
return self.url
|
cloew/tidypip
|
tidypip/packages/url_package.py
|
Python
|
mit
| 604 | 0.008278 |
class Solution:
def minDeletionSize(self, A):
"""
:type A: List[str]
:rtype: int
"""
rows, cols = len(A), len(A[0])
if rows == 1: return 0
ans = 0
for j in range(cols):
sort = True
for i in range(1, rows):
if A[i][j] < A[i-1][j]:
sort = False
break
if not sort: ans += 1
return ans
|
YiqunPeng/Leetcode-pyq
|
solutions/944DeleteColumnsToMakeSorted.py
|
Python
|
gpl-3.0
| 467 | 0.010707 |
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "buildops.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
|
lukewink/buildops
|
manage.py
|
Python
|
gpl-3.0
| 806 | 0 |
''' Module containing tests for the network data structure
'''
import unittest
from power_grid import network
class TestNetwork(unittest.TestCase):
''' Class containing all unit tests related to
the network data structure.
'''
def test_randomly_generated_network(self):
''' Tests if the average_connectivity is correct
'''
number_of_nodes = 100
average_connectivity = 2
my_net = network.RandomNetworkGenerator(
num_nodes=number_of_nodes,
average_connectivity=average_connectivity)
average_connectivity_computed = sum(
len(node.edges) for node in my_net.nodes)/number_of_nodes
self.assertAlmostEqual(
average_connectivity,
average_connectivity_computed,
delta=10e-6)
|
ABM-project/power-grid
|
test/test_network.py
|
Python
|
mit
| 814 | 0.001229 |
input = """
:- not b.
b :- a, not a.
a v c.
"""
output = """
"""
|
Yarrick13/hwasp
|
tests/wasp1/AllAnswerSets/choice_30.test.py
|
Python
|
apache-2.0
| 66 | 0 |
"""Add archived field to custom form
Revision ID: 391b3fa1471
Revises: 473f91b5874
Create Date: 2016-01-23 15:51:40.304025
"""
# revision identifiers, used by Alembic.
revision = '391b3fa1471'
down_revision = '473f91b5874'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('custom_form', sa.Column('archived', sa.Boolean(), nullable=True))
op.alter_column('custom_form', 'price',
existing_type=mysql.FLOAT(),
nullable=True,
existing_server_default=sa.text("'0'"))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.alter_column('custom_form', 'price',
existing_type=mysql.FLOAT(),
nullable=False,
existing_server_default=sa.text("'0'"))
op.drop_column('custom_form', 'archived')
### end Alembic commands ###
|
viaict/viaduct
|
migrations/versions/2016_01_23_391b3fa1471_add_archived_field_to_custom_form.py
|
Python
|
mit
| 1,019 | 0.01472 |
import numpy as np
from Interpolation import *
class HermiteInterpolation(Interpolation):
def __init__(self, x, y, yDrv):
Interpolation.__init__(self, x, y)
sizeyDrv = yDrv.shape
if not (len(sizeyDrv) == 1 or (len(sizeyDrv) == 2 and sizeyDrv[0] == 1)):
raise ValueError("Size of Parameter should be vector or one dimension matrix based on np.ndarray")
if sizeyDrv[0] != self.size:
raise ValueError("Size of Parameters should be same")
self.yDrv = yDrv.reshape(1, self.size)
def predict(self, x):
if not isinstance(x, np.ndarray):
raise TypeError("Type of Parameter should be numpy.ndarray")
else:
if len(x.shape) != 1:
raise TypeError("Type of Parameter should be vector based on numpy.ndarray")
y = []
l = np.empty(self.size)
alpha = np.empty(self.size)
alpha = alpha.reshape(self.size, 1)
beta = np.empty(self.size)
beta = beta.reshape(self.size, 1)
li = np.empty(self.size - 1)
for xi in x:
lNume = np.linspace(xi, xi, self.size) - self.x[0]
for i in range(self.size):
lDeno = np.linspace(self.x[0][i], self.x[0][i], self.size) - self.x[0]
li[ : i] = lNume[ : i] / lDeno[ : i]
li[i : ] = lNume[i + 1 : ] / lDeno[i + 1 : ]
l[i] = li.cumprod()[-1]
alpha[i][0] = (1 - 2 * (xi - self.x[0][i]) * ((1 / lDeno[ : i]).sum() + (1 / lDeno[i + 1 : ]).sum())) * l[i] * l[i]
beta[i][0] = (xi - self.x[0][i]) * l[i] * l[i]
y.append(np.dot(self.y, alpha)[0][0] + np.dot(self.yDrv, beta)[0][0])
return np.array(y)
|
LanceVan/SciCycle
|
HermiteInterpolation.py
|
Python
|
mit
| 1,740 | 0.011494 |
import copy
from django import http
from django.core import serializers
from django.core.exceptions import ImproperlyConfigured
from django.template import RequestContext
from django.utils.decorators import method_decorator
from django.utils.translation import ugettext_lazy as _
class View(object):
"""
Parent class for all views.
"""
def __init__(self, *args, **kwargs):
# TODO: Check if request is in *args and raise warning
self._load_config_values(kwargs,
context_processors = None,
mimetype = 'text/html',
template_loader = None,
template_name = None,
decorators = [],
allowed_methods = ['GET',],
strict_allowed_methods = False,
allowed_formats = ['html',],
default_format = 'html',
format_mimetypes = {
'html': 'text/html'
},
extra_context = {},
)
if kwargs:
raise TypeError("__init__() got an unexpected keyword argument '%s'" % iter(kwargs).next())
def __call__(self, request, *args, **kwargs):
view = copy.copy(self)
view.request = request
view.parse_params(*args, **kwargs)
callback = view.get_callback()
if callback:
# The request is passed around with args and kwargs like this so
# they appear as views for decorators
return callback(*args, **kwargs)
allowed_methods = [m for m in view.allowed_methods if hasattr(view, m)]
return http.HttpResponseNotAllowed(allowed_methods)
def parse_params(self, *args, **kwargs):
"""
this method is used to parse the parameters from the url
and store them on self.
"""
for key in kwargs:
setattr(self, key, kwargs[key])
def get_callback(self):
"""
Based on the request's HTTP method, get the callback on this class that
returns a response. If the method isn't allowed, None is returned.
"""
method = self.request.method.upper()
if method not in self.allowed_methods:
if self.strict_allowed_methods:
return None
else:
method = 'GET'
callback = getattr(self, method, getattr(self, 'GET', None))
return callback
def GET(self, *args, **kwargs):
content = self.get_content(*args, **kwargs)
mimetype = self.get_mimetype()
return self.get_response(content, mimetype=mimetype)
def get_response(self, content, **httpresponse_kwargs):
"""
Construct an `HttpResponse` object.
"""
return http.HttpResponse(content, **httpresponse_kwargs)
def get_content(self, *args, **kwargs):
"""
Get the content to go in the response.
"""
format = self.get_format()
return getattr(self, 'render_%s' % format)(*args, **kwargs)
def get_resource(self, *args, **kwargs):
"""
Get a dictionary representing the resource for this view.
"""
return {}
def get_mimetype(self):
"""
Get the mimetype to be used for the response.
"""
return self.format_mimetypes[self.get_format()]
def get_format(self):
"""
Get the format for the content, defaulting to ``default_format``.
The format is usually a short string to identify the format of the
content in the response. For example, 'html' or 'json'.
"""
format = self.request.GET.get('format', self.default_format)
if format not in self.allowed_formats:
format = self.default_format
return format
def render_html(self, *args, **kwargs):
"""
Render a template with a given resource
"""
context = self.get_context()
return self.get_template().render(context)
def get_template(self):
"""
Get a ``Template`` object for the given request.
"""
names = self.get_template_names()
if not names:
raise ImproperlyConfigured("'%s' must provide template_name."
% self.__class__.__name__)
return self.load_template(names)
def get_template_names(self):
"""
Return a list of template names to be used for the request. Must return
a list. May not be called if get_template is overridden.
"""
if self.template_name is None:
return []
elif isinstance(self.template_name, basestring):
return [self.template_name]
else:
return self.template_name
def load_template(self, names=[]):
"""
Load a template, using self.template_loader or the default.
"""
return self.get_template_loader().select_template(names)
def get_template_loader(self):
"""
Get the template loader to be used for this request. Defaults to
``django.template.loader``.
"""
import django.template.loader
return self.template_loader or django.template.loader
def get_context(self):
"""
Get the template context. Must return a Context (or subclass) instance.
"""
dictionary = self.get_resource()
for key, value in self.extra_context.items():
if callable(value):
dictionary[key] = value()
else:
dictionary[key] = value
context_processors = self.get_context_processors()
return RequestContext(self.request, dictionary, context_processors)
def get_context_processors(self):
"""
Get the template context processors to be used.
"""
return self.context_processors
def _load_config_values(self, initkwargs, **defaults):
"""
Set on self some config values possibly taken from __init__, or
attributes on self.__class__, or some default.
"""
for k in defaults:
default = getattr(self.__class__, k, defaults[k])
value = initkwargs.pop(k, default)
setattr(self, k, value)
|
livni/old-OK
|
src/knesset/hashnav/base.py
|
Python
|
bsd-3-clause
| 6,270 | 0.008931 |
from __future__ import division
def bound(c,(bndH,method),d1,f,g,d2,D,A=None,b=None,E=None,d=None,U=None,optimality=None,initialRadius=None):
# Norm function
from numpy import ones, sqrt
from numpy.linalg import norm
# Our own functions
from trsq import trsq
from mest import mest
from domain_reduction import RR
lr, ur = RR(c,bndH,f,g,D,A,b,E,d,U,optimality)
if optimality is not None:
# bounding box containing orginal circle
l1 = c.xc - c.r * ones(D)
u1 = c.xc + c.r * ones(D)
# Radius and centre ball containing the reduced box
r = norm(ur-lr)/2 # Radius
xc = (ur+lr)/2 # Centre
# bounds box containing ball around domain reduced
l2 = xc - r * ones(D)
u2 = xc + r * ones(D)
check = 1
for i in range(0,D):
if l2[i]<l1[i] or u2[i]>u1[i]:
check = 0
break
#if the box reduced is not completely contained in the orginal box
if check == 0:
# Revert to the orginal ball depending o the level of depth (= initialRadius/const)
#if c.r >= initialRadius/8:
# l=l1
# u=u1
# xc = c.xc
# r = c.r
#else:
r = r/sqrt(D)
l = xc - r * ones(D)
u = xc + r * ones(D)
else:
l = l2
u = u2
else:
l = lr
u = ur
xc = c.xc
r = c.r
# Get bounds on RBF Hessian over [l,u]
LH, UH = bndH(l, u)
# Estimate smallest eigenvalue on [l,u]
k = mest(LH,UH,method)
# Evaluate function and gradient at xc
fxc = f(xc)
gxc = g(xc)
# If gradient nonzero use trsq
if(norm(gxc) > 1e-10):
lbound,_ = trsq(k, r, xc, fxc, gxc)
else: # Calculate lower bound when gradient zero
if(k < 0):
lbound = fxc + k*(r**2)
else:
lbound = fxc
return lbound,lr,ur
|
coin-or/oBB
|
obb/lboundme_loc_rr.py
|
Python
|
lgpl-3.0
| 2,004 | 0.021956 |
from pysnmp.entity.rfc3413.oneliner import cmdgen
from pysnmp.proto import rfc1902
import time
def add_menu_entry(button, func, user, host, appnum,
duration=360, brid=''):
global cmdGen, authData, transportTarget
cmd = 'TYPE=BR;BUTTON=%s;USER="%s";FUNC=%s;HOST=%s;APPNUM=%s;DURATION=%s;BRID=%s;'%(button, user, func, host, appnum, duration, brid)
#print('Registering:', cmd)
errorIndication, errorStatus, errorIndex, varBinds = cmdGen.setCmd(
authData, transportTarget,
('1.3.6.1.4.1.2435.2.3.9.2.11.1.1.0', rfc1902.OctetString(cmd))
)
# See http://www.oidview.com/mibs/2435/BROTHER-MIB.html
# Check for errors and print out results
if errorIndication:
print(errorIndication)
else:
if errorStatus:
print('%s at %s' % (
errorStatus.prettyPrint(),
errorIndex and varBinds[int(errorIndex)-1] or '?'))
def launch(args, config):
global cmdGen, authData, transportTarget
cmdGen = cmdgen.CommandGenerator()
authData = cmdgen.CommunityData('internal', mpModel=0)
transportTarget = cmdgen.UdpTransportTarget((args.scanner_addr, 161))
addr = (args.advertise_addr, args.advertise_port)
print('Advertising %s:%d to %s' % (addr + (args.scanner_addr,)))
for func, users in config['menu'].items():
for user, entry in users.items():
print('Entry:', func.upper(), user, entry)
while(1):
print('Advertising to scanner')
appnum = 1
for func, users in config['menu'].items():
for user, entry in users.items():
add_menu_entry('SCAN', func.upper(), user,
'%s:%d'%(addr), appnum)
appnum += 1
time.sleep(60)
|
esben/brother-scan
|
brscan/snmp.py
|
Python
|
gpl-3.0
| 1,777 | 0.003376 |
from . import Model, CollectionModel
class Contact(Model):
"""
A Contact object model
.. attribute:: id
.. attribute:: phone
.. attribute:: email
.. attribute:: firstName
.. attribute:: lastName
.. attribute:: companyName
.. attribute:: country
Dictionary like this:
::
{
"id": "US",
"name": "United States"
}
.. attribute:: customFields
List of dictionaries, each looks like this:
::
{
"value": "30",
"id": "1044",
"name": "Age",
"createdAt": "2015-04-27T09:29:46+0000"
}
"""
class Contacts(CollectionModel):
name = "contacts"
instance = Contact
def list(self, search=False, **kwargs):
"""
Returns a list of :class:`Contact` objects and a pager dict.
:Example:
contacts, pager = client.contacts.list()
:param bool search: If True then search contacts using `query`, `ids` and/or `listId`. Default=False
:param int page: Fetch specified results page. Default=1
:param int limit: How many results on page. Default=10
:param int shared: Should shared contacts to be included. Default=0
:param str ids: Find contact by ID(s). Using with `search`=True.
:param int listId: Find contact by List ID. Using with `search`=True.
:param str query: Find contact by specified search query. Using with `search`=True.
"""
kwargs["search"] = search
return self.get_instances(kwargs)
def create(self, **kwargs):
"""
Create a new contact.
Returns :class:`Contact` object contains id and link to Contact.
:Example:
c = client.contacts.create(firstName="John", lastName="Doe", phone="19025555555", lists="1901901")
:param str firstName:
:param str lastName:
:param str phone: Contact's phone number. Required.
:param str email:
:param str companyName:
:param str country: 2-letter ISO country code.
:param str lists: String of Lists separated by commas to assign contact. Required.
"""
return self.create_instance(kwargs)
def update(self, uid, **kwargs):
"""
Updates the existing Contact for the given unique id.
Returns :class:`Contact` object contains id and link to Contact.
:Example:
client.contacts.update(uid=7981278, firstName="John", lastName="Doe", phone="19025555555", lists="1901901")
:param int uid: The unique id of the Contact to update. Required.
:param str firstName:
:param str lastName:
:param str phone: Contact's phone number. Required.
:param str email:
:param str companyName:
:param str lists: String of Lists separated by commas to assign contact. Required.
"""
return self.update_instance(uid, kwargs)
def delete(self, uid):
"""
Delete the specified Contact from TextMagic.
Returns True if success.
:Example:
client.contacts.delete(1901010)
:param int uid: The unique id of the Contact to delete.
"""
return self.delete_instance(uid)
def lists(self, uid=0, **kwargs):
"""
Returns a list of :class:`List` objects (lists which Contact belongs to) and a pager dict.
:Example:
lists, pager = client.contacts.lists(uid=1901010)
:param int uid: The unique id of the Contact to update. Required.
:param int page: Fetch specified results page. Default=1
:param int limit: How many results on page. Default=10
"""
lists = Lists(self.base_uri, self.auth)
return self.get_subresource_instances(uid, instance=lists,
resource="lists", params=kwargs)
class List(Model):
"""
A List object model
.. attribute:: id
.. attribute:: name
.. attribute:: description
.. attribute:: membersCount
.. attribute:: shared
"""
class Lists(CollectionModel):
name = "lists"
instance = List
def list(self, search=False, **kwargs):
"""
Returns a list of :class:`List` objects and a pager dict.
:Example:
lists, pager = client.lists.list()
:param bool search: If True then search lists using `ids` and/or `query`. Default=False
:param int page: Fetch specified results page. Default=1
:param int limit: How many results on page. Default=10
:param str ids: Find lists by ID(s). Using with `search`=True.
:param str query: Find lists by specified search query. Using with `search`=True.
"""
kwargs["search"] = search
return self.get_instances(kwargs)
def create(self, **kwargs):
"""
Create a new list.
Returns :class:`List` object contains id and link to List.
:Example:
list = client.lists.create(name="My List")
:param str name: List name. Required.
:param str description: List description.
:param int shared: Should this list be shared with sub-accounts. Can be 1 or 0. Default=0.
"""
return self.create_instance(kwargs)
def update(self, uid, **kwargs):
"""
Updates the List for the given unique id.
Returns :class:`List` object contains id and link to List.
:Example:
list = client.lists.update(uid=1901010, name="My List")
:param int uid: The unique id of the List to update. Required.
:param str name: List name. Required.
:param str description: List description.
:param int shared: Should this list be shared with sub-accounts. Can be 1 or 0. Default=0.
"""
return self.update_instance(uid, kwargs)
def delete(self, uid):
"""
Delete the specified List from TextMagic.
Returns True if success.
:Example:
client.lists.delete(1901010)
:param int uid: The unique id of the List to delete. Required.
"""
return self.delete_instance(uid)
def contacts(self, uid=0, **kwargs):
"""
Fetch user contacts by given group id.
A useful synonym for "contacts/search" command with provided `groupId` parameter.
:Example:
lists = client.lists.contacts(1901010)
:param int uid: The unique id of the List. Required.
:param int page: Fetch specified results page. Default=1
:param int limit: How many results on page. Default=10
"""
contacts = Contacts(self.base_uri, self.auth)
return self.get_subresource_instances(uid, instance=contacts,
resource="contacts", params=kwargs)
def put_contacts(self, uid, **kwargs):
"""
Assign contacts to the specified list.
:Example:
client.lists.put_contacts(uid=1901010, contacts="1723812,1239912")
:param int uid: The unique id of the List. Required.
:param str contacts: Contact ID(s), separated by comma. Required.
"""
return self.update_subresource_instance(uid,
body=kwargs,
subresource=None,
slug="contacts")
def delete_contacts(self, uid, **kwargs):
"""
Unassign contacts from the specified list.
If contacts assign only to the specified list, then delete permanently.
Returns True if success.
:Example:
client.lists.delete_contacts(uid=1901010, contacts="1723812,1239912")
:param int uid: The unique id of the List. Required.
:param str contacts: Contact ID(s), separated by comma. Required.
"""
uri = "%s/%s/contacts" % (self.uri, uid)
response, instance = self.request("DELETE", uri, data=kwargs)
return response.status == 204
|
textmagic/textmagic-rest-python
|
textmagic/rest/models/contacts.py
|
Python
|
mit
| 8,232 | 0.001944 |
# /bin/sh
def generate(cmd='/bin/sh'):
"""Executes cmd
Args:
cmd(str): executes cmd (default: ``/bin/sh``)
"""
sc = """
adr r0, bin_sh_1
mov r2, #0
push {r0, r2}
mov r1, sp
movw r7, #11
svc 1
bin_sh_1:
.asciz "%s"
""" % (cmd) # sometimes we have to change to specific things like id
return sc
|
alexpark07/ARMSCGen
|
shellcodes/arm/sh.py
|
Python
|
gpl-2.0
| 355 | 0.005634 |
#!/usr/bin/env python
import rospy
from sensor_msgs.msg import Image
from cv_bridge import CvBridge, CvBridgeError
import cv2
from flexbe_core import EventState, Logger
from flexbe_core.proxy import ProxySubscriberCached
from sensor_msgs.msg import PointCloud2
class TakePictureState(EventState):
'''
Stores the picture of the given topic.
#> Image Image The received pointcloud.
<= done The picture has been received and stored.
'''
def __init__(self):
super(TakePictureState, self).__init__(outcomes = ['done'], output_keys = ['Image'])
self._topic = '/head_xtion/rgb/image_rect_color'
self._sub = ProxySubscriberCached({self._topic:Image})
def execute(self, userdata):
if self._sub.has_msg(self._topic):
userdata.Image = self._sub.get_last_msg(self._topic)
print 'take_picture_state: took a picture'
return 'done'
|
pschillinger/lamor15
|
lamor_flexbe_states/src/lamor_flexbe_states/take_picture_state.py
|
Python
|
mit
| 877 | 0.028506 |
# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from nipype.testing import assert_equal
from nipype.interfaces.camino.convert import ProcStreamlines
def test_ProcStreamlines_inputs():
input_map = dict(allowmultitargets=dict(argstr='-allowmultitargets',
),
args=dict(argstr='%s',
),
datadims=dict(argstr='-datadims %s',
units='voxels',
),
directional=dict(argstr='-directional %s',
units='NA',
),
discardloops=dict(argstr='-discardloops',
),
endpointfile=dict(argstr='-endpointfile %s',
),
environ=dict(nohash=True,
usedefault=True,
),
exclusionfile=dict(argstr='-exclusionfile %s',
),
gzip=dict(argstr='-gzip',
),
ignore_exception=dict(nohash=True,
usedefault=True,
),
in_file=dict(argstr='-inputfile %s',
mandatory=True,
position=1,
),
inputmodel=dict(argstr='-inputmodel %s',
usedefault=True,
),
iterations=dict(argstr='-iterations %d',
units='NA',
),
maxtractlength=dict(argstr='-maxtractlength %d',
units='mm',
),
maxtractpoints=dict(argstr='-maxtractpoints %d',
units='NA',
),
mintractlength=dict(argstr='-mintractlength %d',
units='mm',
),
mintractpoints=dict(argstr='-mintractpoints %d',
units='NA',
),
noresample=dict(argstr='-noresample',
),
out_file=dict(argstr='> %s',
genfile=True,
position=-1,
),
outputacm=dict(argstr='-outputacm',
requires=['outputroot', 'seedfile'],
),
outputcbs=dict(argstr='-outputcbs',
requires=['outputroot', 'targetfile', 'seedfile'],
),
outputcp=dict(argstr='-outputcp',
requires=['outputroot', 'seedfile'],
),
outputroot=dict(argstr='-outputroot %s',
),
outputsc=dict(argstr='-outputsc',
requires=['outputroot', 'seedfile'],
),
outputtracts=dict(argstr='-outputtracts',
),
regionindex=dict(argstr='-regionindex %d',
units='mm',
),
resamplestepsize=dict(argstr='-resamplestepsize %d',
units='NA',
),
seedfile=dict(argstr='-seedfile %s',
),
seedpointmm=dict(argstr='-seedpointmm %s',
units='mm',
),
seedpointvox=dict(argstr='-seedpointvox %s',
units='voxels',
),
targetfile=dict(argstr='-targetfile %s',
),
terminal_output=dict(nohash=True,
),
truncateinexclusion=dict(argstr='-truncateinexclusion',
),
truncateloops=dict(argstr='-truncateloops',
),
voxeldims=dict(argstr='-voxeldims %s',
units='mm',
),
waypointfile=dict(argstr='-waypointfile %s',
),
)
inputs = ProcStreamlines.input_spec()
for key, metadata in input_map.items():
for metakey, value in metadata.items():
yield assert_equal, getattr(inputs.traits()[key], metakey), value
def test_ProcStreamlines_outputs():
output_map = dict(outputroot_files=dict(),
proc=dict(),
)
outputs = ProcStreamlines.output_spec()
for key, metadata in output_map.items():
for metakey, value in metadata.items():
yield assert_equal, getattr(outputs.traits()[key], metakey), value
|
JohnGriffiths/nipype
|
nipype/interfaces/camino/tests/test_auto_ProcStreamlines.py
|
Python
|
bsd-3-clause
| 3,121 | 0.030119 |
#!/usr/bin/env python
import traceback
import os
import re
import sys
if __name__ != "__main__": # we're imported as a module
_registered = []
_tests = 0
_fails = 0
def wvtest(func):
""" Use this decorator (@wvtest) in front of any function you want to run
as part of the unit test suite. Then run:
python wvtest.py path/to/yourtest.py
to run all the @wvtest functions in that file.
"""
_registered.append(func)
return func
def _result(msg, tb, code):
global _tests, _fails
_tests += 1
if code != 'ok':
_fails += 1
(filename, line, func, text) = tb
filename = os.path.basename(filename)
msg = re.sub(r'\s+', ' ', str(msg))
sys.stderr.flush()
print '! %-70s %s' % ('%s:%-4d %s' % (filename, line, msg),
code)
sys.stdout.flush()
def _check(cond, msg = 'unknown', tb = None):
if tb == None: tb = traceback.extract_stack()[-3]
if cond:
_result(msg, tb, 'ok')
else:
_result(msg, tb, 'FAILED')
return cond
def _code():
(filename, line, func, text) = traceback.extract_stack()[-3]
text = re.sub(r'^\w+\((.*)\)(\s*#.*)?$', r'\1', text);
return text
def WVPASS(cond = True):
''' Counts a test failure unless cond is true. '''
return _check(cond, _code())
def WVFAIL(cond = True):
''' Counts a test failure unless cond is false. '''
return _check(not cond, 'NOT(%s)' % _code())
def WVPASSEQ(a, b):
''' Counts a test failure unless a == b. '''
return _check(a == b, '%s == %s' % (repr(a), repr(b)))
def WVPASSNE(a, b):
''' Counts a test failure unless a != b. '''
return _check(a != b, '%s != %s' % (repr(a), repr(b)))
def WVPASSLT(a, b):
''' Counts a test failure unless a < b. '''
return _check(a < b, '%s < %s' % (repr(a), repr(b)))
def WVPASSLE(a, b):
''' Counts a test failure unless a <= b. '''
return _check(a <= b, '%s <= %s' % (repr(a), repr(b)))
def WVPASSGT(a, b):
''' Counts a test failure unless a > b. '''
return _check(a > b, '%s > %s' % (repr(a), repr(b)))
def WVPASSGE(a, b):
''' Counts a test failure unless a >= b. '''
return _check(a >= b, '%s >= %s' % (repr(a), repr(b)))
def WVEXCEPT(etype, func, *args, **kwargs):
''' Counts a test failure unless func throws an 'etype' exception.
You have to spell out the function name and arguments, rather than
calling the function yourself, so that WVEXCEPT can run before
your test code throws an exception.
'''
try:
func(*args, **kwargs)
except etype, e:
return _check(True, 'EXCEPT(%s)' % _code())
except:
_check(False, 'EXCEPT(%s)' % _code())
raise
else:
return _check(False, 'EXCEPT(%s)' % _code())
else: # we're the main program
# NOTE
# Why do we do this in such a convoluted way? Because if you run
# wvtest.py as a main program and it imports your test files, then
# those test files will try to import the wvtest module recursively.
# That actually *works* fine, because we don't run this main program
# when we're imported as a module. But you end up with two separate
# wvtest modules, the one that gets imported, and the one that's the
# main program. Each of them would have duplicated global variables
# (most importantly, wvtest._registered), and so screwy things could
# happen. Thus, we make the main program module *totally* different
# from the imported module. Then we import wvtest (the module) into
# wvtest (the main program) here and make sure to refer to the right
# versions of global variables.
#
# All this is done just so that wvtest.py can be a single file that's
# easy to import into your own applications.
import wvtest
def _runtest(modname, fname, f):
print
print 'Testing "%s" in %s.py:' % (fname, modname)
sys.stdout.flush()
try:
f()
except Exception, e:
print
print traceback.format_exc()
tb = sys.exc_info()[2]
wvtest._result(e, traceback.extract_tb(tb)[1], 'EXCEPTION')
# main code
for modname in sys.argv[1:]:
if not os.path.exists(modname):
print 'Skipping: %s' % modname
continue
if modname.endswith('.py'):
modname = modname[:-3]
print 'Importing: %s' % modname
wvtest._registered = []
oldwd = os.getcwd()
oldpath = sys.path
try:
path, mod = os.path.split(os.path.abspath(modname))
os.chdir(path)
sys.path += [path, os.path.split(path)[0]]
mod = __import__(modname.replace(os.path.sep, '.'), None, None, [])
for t in wvtest._registered:
_runtest(modname, t.func_name, t)
print
finally:
os.chdir(oldwd)
sys.path = oldpath
print
print 'WvTest: %d tests, %d failures.' % (wvtest._tests, wvtest._fails)
|
Zearin/git-bup
|
wvtest.py
|
Python
|
lgpl-2.1
| 5,331 | 0.003189 |
# -*- coding: utf-8 -*-
# _______ _______ _______ _______ _
# |\ /|( ___ )( ____ )( )( ___ )( ( /||\ /|
# | ) ( || ( ) || ( )|| () () || ( ) || \ ( |( \ / )
# | (___) || (___) || (____)|| || || || | | || \ | | \ (_) /
# | ___ || ___ || __)| |(_)| || | | || (\ \) | \ /
# | ( ) || ( ) || (\ ( | | | || | | || | \ | ) (
# | ) ( || ) ( || ) \ \__| ) ( || (___) || ) \ | | |
# |/ \||/ \||/ \__/|/ \|(_______)|/ )_) \_/
#
# Copyright (C) 2016 Laurynas Riliskis
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Created on 3/9/16.
class SqlBuilder:
CONCAT = "res.tablesWithJoins += "
HAS_COLUMNS = ".hasColumns(projection)"
OPEN_BRACE = ") {"
CLOSE_BRACE = "}"
IF = "if ("
OR = " || "
INDENT1 = " "
INDENT2 = " "
PLUS = " + "
COLUMNS = "Columns"
TABLE_NAME = ".TABLE_NAME"
LEFT_OUTER_JOIN = "\" LEFT OUTER JOIN \""
ON = "\" ON \""
EQUALS = "\"=\""
DOT = "\".\""
AS = "\" AS \""
PREFIX = ".PREFIX_"
NEW_LINE = "\n"
CHAR_SEMICOLON = ";"
CHAR_DOT = "."
@classmethod
def add_all_joined_clauses(cls, model, alias):
ret = ""
for field in model.fields:
foreign_key = field.foreign_key
if foreign_key:
continue
ret += SqlBuilder.NEW_LINE
ret += SqlBuilder.INDENT1
ret += SqlBuilder.IF
ret += SqlBuilder.column_clauses(foreign_key.model)
ret += SqlBuilder.OPEN_BRACE
ret += SqlBuilder.NEW_LINE
ret += SqlBuilder.INDENT2
ret += SqlBuilder.CONCAT
ret += SqlBuilder.LEFT_OUTER_JOIN
ret += SqlBuilder.PLUS
ret += field.foreign_key.model.name_camel_case
ret += SqlBuilder.COLUMNS
ret += SqlBuilder.TABLE_NAME
ret += SqlBuilder.PLUS
ret += SqlBuilder.AS
ret += SqlBuilder.PLUS
ret += SqlBuilder.prefix(model, foreign_key)
ret += SqlBuilder.PLUS
ret += SqlBuilder.ON
ret += SqlBuilder.PLUS
ret += SqlBuilder.column_name(model, field, alias)
ret += SqlBuilder.PLUS
ret += SqlBuilder.EQUALS
ret += SqlBuilder.PLUS
ret += SqlBuilder.prefix(model, foreign_key)
ret += SqlBuilder.PLUS
ret += SqlBuilder.DOT
ret += SqlBuilder.PLUS
ret += foreign_key.model.name_camel_case
ret += SqlBuilder.COLUMNS
ret += SqlBuilder.CHAR_DOT
ret += foreign_key.model.name_camel_case
ret += SqlBuilder.SEMICOLON
ret += SqlBuilder.NEW_LINE
ret += SqlBuilder.INDENT1
ret += SqlBuilder.CLOSE_BRACE
ret += SqlBuilder.add_all_joined_clauses(foreign_key.model,
cls.table_prefix(model,
foreign_key))
return ret
@classmethod
def table_prefix(cls, model, field):
ret = model.name_camel_case
ret += SqlBuilder.COLUMNS
ret += SqlBuilder.PREFIX
ret += field.model.name_upper_case
return ret
@classmethod
def column_clauses(cls, model):
ret = model.name_camel_case
ret += SqlBuilder.COLUMNS
ret += SqlBuilder.HAS_COLUMNS
for field in model.fields:
foreign_key = field.foreign_key
if foreign_key:
continue
ret += SqlBuilder.OR
ret += cls.column_clauses(foreign_key.model)
return ret
@classmethod
def column_name(cls, model, field, alias):
ret = ""
if alias:
ret += alias
ret += SqlBuilder.PLUS
else:
ret += model.name_camel_case
ret += SqlBuilder.COLUMNS
ret += SqlBuilder.TABLE_NAME
ret += SqlBuilder.PLUS
ret += SqlBuilder.DOT
ret += SqlBuilder.PLUS
ret += model.name_camel_case
ret += SqlBuilder.COLUMNS
ret += SqlBuilder.CHAR_DOT
ret += field.name_upper_case
return ret
|
Northshoot/AndroidContentProvider
|
acp/model/table.py
|
Python
|
apache-2.0
| 4,807 | 0.000208 |
# encoding: UTF-8
# Copyright Krzysztof Sopyła (krzysztofsopyla@gmail.com)
#
#
# Licensed under the MIT
# Network architecture:
# Five layer neural network, input layer 28*28= 784, output 10 (10 digits)
# Output labels uses one-hot encoding
# input layer - X[batch, 784]
# 1 layer - W1[784, 200] + b1[200]
# Y1[batch, 200]
# 2 layer - W2[200, 100] + b2[100]
# Y2[batch, 200]
# 3 layer - W3[100, 60] + b3[60]
# Y3[batch, 200]
# 4 layer - W4[60, 30] + b4[30]
# Y4[batch, 30]
# 5 layer - W5[30, 10] + b5[10]
# One-hot encoded labels Y5[batch, 10]
# model
# Y = softmax(X*W+b)
# Matrix mul: X*W - [batch,784]x[784,10] -> [batch,10]
# Training consists of finding good W elements. This will be handled automaticaly by
# Tensorflow optimizer
import visualizations as vis
import tensorflow as tf
from tensorflow.contrib.learn.python.learn.datasets.mnist import read_data_sets
NUM_ITERS=5000
DISPLAY_STEP=100
BATCH=100
tf.set_random_seed(0)
# Download images and labels
mnist = read_data_sets("MNISTdata", one_hot=True, reshape=False, validation_size=0)
# mnist.test (10K images+labels) -> mnist.test.images, mnist.test.labels
# mnist.train (60K images+labels) -> mnist.train.images, mnist.test.labels
# Placeholder for input images, each data sample is 28x28 grayscale images
# All the data will be stored in X - tensor, 4 dimensional matrix
# The first dimension (None) will index the images in the mini-batch
X = tf.placeholder(tf.float32, [None, 28, 28, 1])
# correct answers will go here
Y_ = tf.placeholder(tf.float32, [None, 10])
# layers sizes
L1 = 200
L2 = 100
L3 = 60
L4 = 30
L5 = 10
# weights - initialized with random values from normal distribution mean=0, stddev=0.1
# output of one layer is input for the next
W1 = tf.Variable(tf.truncated_normal([784, L1], stddev=0.1))
b1 = tf.Variable(tf.zeros([L1]))
W2 = tf.Variable(tf.truncated_normal([L1, L2], stddev=0.1))
b2 = tf.Variable(tf.zeros([L2]))
W3 = tf.Variable(tf.truncated_normal([L2, L3], stddev=0.1))
b3 = tf.Variable(tf.zeros([L3]))
W4 = tf.Variable(tf.truncated_normal([L3, L4], stddev=0.1))
b4 = tf.Variable(tf.zeros([L4]))
W5 = tf.Variable(tf.truncated_normal([L4, L5], stddev=0.1))
b5 = tf.Variable(tf.zeros([L5]))
# flatten the images, unrole eacha image row by row, create vector[784]
# -1 in the shape definition means compute automatically the size of this dimension
XX = tf.reshape(X, [-1, 784])
# Define model
Y1 = tf.nn.sigmoid(tf.matmul(XX, W1) + b1)
Y2 = tf.nn.sigmoid(tf.matmul(Y1, W2) + b2)
Y3 = tf.nn.sigmoid(tf.matmul(Y2, W3) + b3)
Y4 = tf.nn.sigmoid(tf.matmul(Y3, W4) + b4)
Ylogits = tf.matmul(Y4, W5) + b5
Y = tf.nn.softmax(Ylogits)
# loss function: cross-entropy = - sum( Y_i * log(Yi) )
# Y: the computed output vector
# Y_: the desired output vector
# cross-entropy
# log takes the log of each element, * multiplies the tensors element by element
# reduce_mean will add all the components in the tensor
# so here we end up with the total cross-entropy for all images in the batch
#cross_entropy = -tf.reduce_mean(Y_ * tf.log(Y)) * 100.0 # normalized for batches of 100 images,
# we can also use tensorflow function for softmax
cross_entropy = tf.nn.softmax_cross_entropy_with_logits(logits=Ylogits, labels=Y_)
cross_entropy = tf.reduce_mean(cross_entropy)*100
# accuracy of the trained model, between 0 (worst) and 1 (best)
correct_prediction = tf.equal(tf.argmax(Y, 1), tf.argmax(Y_, 1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
# training, learning rate = 0.005
train_step = tf.train.GradientDescentOptimizer(0.005).minimize(cross_entropy)
# matplotlib visualisation
allweights = tf.concat([tf.reshape(W1, [-1]), tf.reshape(W2, [-1]), tf.reshape(W3, [-1]), tf.reshape(W4, [-1]), tf.reshape(W5, [-1])], 0)
allbiases = tf.concat([tf.reshape(b1, [-1]), tf.reshape(b2, [-1]), tf.reshape(b3, [-1]), tf.reshape(b4, [-1]), tf.reshape(b5, [-1])], 0)
# Initializing the variables
init = tf.global_variables_initializer()
train_losses = list()
train_acc = list()
test_losses = list()
test_acc = list()
saver = tf.train.Saver()
# Launch the graph
with tf.Session() as sess:
sess.run(init)
for i in range(NUM_ITERS+1):
# training on batches of 100 images with 100 labels
batch_X, batch_Y = mnist.train.next_batch(BATCH)
if i%DISPLAY_STEP ==0:
# compute training values for visualisation
acc_trn, loss_trn, w, b = sess.run([accuracy, cross_entropy, allweights, allbiases], feed_dict={X: batch_X, Y_: batch_Y})
acc_tst, loss_tst = sess.run([accuracy, cross_entropy], feed_dict={X: mnist.test.images, Y_: mnist.test.labels})
print("#{} Trn acc={} , Trn loss={} Tst acc={} , Tst loss={}".format(i,acc_trn,loss_trn,acc_tst,loss_tst))
train_losses.append(loss_trn)
train_acc.append(acc_trn)
test_losses.append(loss_tst)
test_acc.append(acc_tst)
# the backpropagationn training step
sess.run(train_step, feed_dict={X: batch_X, Y_: batch_Y})
title = "MNIST 2.0 5 layers sigmoid"
vis.losses_accuracies_plots(train_losses,train_acc,test_losses, test_acc,title,DISPLAY_STEP)
# Restults
# mnist_single_layer_nn.py acc= 0.9237
# mnist__layer_nn.py TST acc = 0.9534
# sample output for 5k iterations
#0 Trn acc=0.10999999940395355 , Trn loss=230.5011444091797 Tst acc=0.0957999974489212 , Tst loss=232.8909912109375
#100 Trn acc=0.10000000149011612 , Trn loss=229.38812255859375 Tst acc=0.09799999743700027 , Tst loss=230.8378448486328
#200 Trn acc=0.07000000029802322 , Trn loss=231.29209899902344 Tst acc=0.09799999743700027 , Tst loss=230.82485961914062
#300 Trn acc=0.09000000357627869 , Trn loss=232.11734008789062 Tst acc=0.10090000182390213 , Tst loss=230.51341247558594
# ...
#4800 Trn acc=0.949999988079071 , Trn loss=11.355264663696289 Tst acc=0.948199987411499 , Tst loss=17.340219497680664
#4900 Trn acc=0.9399999976158142 , Trn loss=22.300941467285156 Tst acc=0.9466999769210815 , Tst loss=17.51348876953125
#5000 Trn acc=0.9200000166893005 , Trn loss=20.947153091430664 Tst acc=0.953499972820282 , Tst loss=15.77566909790039
|
ksopyla/tensorflow-mnist-convnets
|
mnist_2.0_5_layer_nn.py
|
Python
|
mit
| 6,492 | 0.00909 |
"""
tests.test_manager
~~~~~~~~~~~~~~~~~~
Provides unit tests for the :mod:`flask_restless.manager` module.
:copyright: 2012 Jeffrey Finkelstein <jeffrey.finkelstein@gmail.com>
:license: GNU AGPLv3+ or BSD
"""
import datetime
from flask import json
try:
from flask.ext.sqlalchemy import SQLAlchemy
except:
has_flask_sqlalchemy = False
else:
has_flask_sqlalchemy = True
from flask.ext.restless import APIManager
from flask.ext.restless.helpers import get_columns
from .helpers import FlaskTestBase
from .helpers import skip_unless
from .helpers import TestSupport
from .helpers import unregister_fsa_session_signals
dumps = json.dumps
loads = json.loads
class TestAPIManager(TestSupport):
"""Unit tests for the :class:`flask_restless.manager.APIManager` class.
"""
def test_constructor(self):
"""Tests that no error occurs on instantiation without any arguments to
the constructor.
"""
APIManager()
def test_init_app(self):
"""Tests for initializing the Flask application after instantiating the
:class:`flask.ext.restless.APIManager` object.
"""
# initialize the Flask application
self.manager.init_app(self.flaskapp, self.session)
# create an API
self.manager.create_api(self.Person)
# make a request on the API
#client = app.test_client()
response = self.app.get('/api/person')
assert response.status_code == 200
def test_create_api(self):
"""Tests that the :meth:`flask_restless.manager.APIManager.create_api`
method creates endpoints which are accessible by the client, only allow
specified HTTP methods, and which provide a correct API to a database.
"""
# create three different APIs for the same model
self.manager.create_api(self.Person, methods=['GET', 'POST'])
self.manager.create_api(self.Person, methods=['PATCH'],
url_prefix='/api2')
self.manager.create_api(self.Person, methods=['GET'],
url_prefix='/readonly')
# test that specified endpoints exist
response = self.app.post('/api/person', data=dumps(dict(name='foo')))
assert response.status_code == 201
assert loads(response.data)['id'] == 1
response = self.app.get('/api/person')
assert response.status_code == 200
assert len(loads(response.data)['objects']) == 1
assert loads(response.data)['objects'][0]['id'] == 1
# test that non-specified methods are not allowed
response = self.app.delete('/api/person/1')
assert response.status_code == 405
response = self.app.patch('/api/person/1',
data=dumps(dict(name='bar')))
assert response.status_code == 405
# test that specified endpoints exist
response = self.app.patch('/api2/person/1',
data=dumps(dict(name='bar')))
assert response.status_code == 200
assert loads(response.data)['id'] == 1
assert loads(response.data)['name'] == 'bar'
# test that non-specified methods are not allowed
response = self.app.get('/api2/person/1')
assert response.status_code == 405
response = self.app.delete('/api2/person/1')
assert response.status_code == 405
response = self.app.post('/api2/person',
data=dumps(dict(name='baz')))
assert response.status_code == 405
# test that the model is the same as before
response = self.app.get('/readonly/person')
assert response.status_code == 200
assert len(loads(response.data)['objects']) == 1
assert loads(response.data)['objects'][0]['id'] == 1
assert loads(response.data)['objects'][0]['name'] == 'bar'
def test_multi_pk(self):
"""Test for specifying a primary key from a set of primary keys to use
when registering routes.
"""
self.manager.create_api(self.User, methods=['GET', 'POST'],
primary_key='email')
data = dict(id=1, email='foo')
response = self.app.post('/api/user', data=dumps(data))
assert response.status_code == 201
data = loads(response.data)
assert data['email'] == 'foo'
response = self.app.get('/api/user/foo')
assert response.status_code == 200
data = loads(response.data)
assert data['email'] == 'foo'
assert data['id'] == 1
# user should not be accessible at this URL
response = self.app.get('/api/user/1')
assert response.status_code == 404
def test_different_collection_name(self):
"""Tests that providing a different collection name exposes the API at
the corresponding URL.
"""
self.manager.create_api(self.Person, methods=['POST', 'GET'],
collection_name='people')
response = self.app.post('/api/people', data=dumps(dict(name='foo')))
assert response.status_code == 201
assert loads(response.data)['id'] == 1
response = self.app.get('/api/people')
assert response.status_code == 200
assert len(loads(response.data)['objects']) == 1
assert loads(response.data)['objects'][0]['id'] == 1
response = self.app.get('/api/people/1')
assert response.status_code == 200
assert loads(response.data)['id'] == 1
def test_allow_functions(self):
"""Tests that the ``allow_functions`` keyword argument makes a
:http:get:`/api/eval/...` endpoint available.
"""
self.manager.create_api(self.Person, allow_functions=True)
response = self.app.get('/api/eval/person?q={}')
assert response.status_code != 400
assert response.status_code == 204
def test_disallow_functions(self):
"""Tests that if the ``allow_functions`` keyword argument if ``False``,
no endpoint will be made available at :http:get:`/api/eval/...`.
"""
self.manager.create_api(self.Person, allow_functions=False)
response = self.app.get('/api/eval/person')
assert response.status_code != 200
assert response.status_code == 404
def test_include_related(self):
"""Test for specifying included columns on related models."""
date = datetime.date(1999, 12, 31)
person = self.Person(name=u'Test', age=10, other=20, birth_date=date)
computer = self.Computer(name=u'foo', vendor=u'bar', buy_date=date)
self.session.add(person)
person.computers.append(computer)
self.session.commit()
include = frozenset(['name', 'age', 'computers', 'computers.id',
'computers.name'])
self.manager.create_api(self.Person, include_columns=include)
include = frozenset(['name', 'age', 'computers.id', 'computers.name'])
self.manager.create_api(self.Person, url_prefix='/api2',
include_columns=include)
response = self.app.get('/api/person/{0}'.format(person.id))
person_dict = loads(response.data)
for column in 'name', 'age', 'computers':
assert column in person_dict
for column in 'id', 'other', 'birth_date':
assert column not in person_dict
for column in 'id', 'name':
assert column in person_dict['computers'][0]
for column in 'vendor', 'owner_id', 'buy_date':
assert column not in person_dict['computers'][0]
response = self.app.get('/api2/person/{0}'.format(person.id))
assert 'computers' not in loads(response.data)
def test_include_column_attributes(self):
"""Test for specifying included columns as SQLAlchemy column attributes.
"""
date = datetime.date(1999, 12, 31)
person = self.Person(name=u'Test', age=10, other=20, birth_date=date)
self.session.add(person)
self.session.commit()
include = frozenset([self.Person.name, self.Person.age])
self.manager.create_api(self.Person, include_columns=include)
response = self.app.get('/api/person/{0}'.format(person.id))
person_dict = loads(response.data)
for column in 'name', 'age':
assert column in person_dict
for column in 'id', 'other', 'birth_date':
assert column not in person_dict
def test_exclude_related(self):
"""Test for specifying excluded columns on related models."""
date = datetime.date(1999, 12, 31)
person = self.Person(name=u'Test', age=10, other=20, birth_date=date)
computer = self.Computer(name=u'foo', vendor=u'bar', buy_date=date)
self.session.add(person)
person.computers.append(computer)
self.session.commit()
exclude = frozenset(['name', 'age', 'computers', 'computers.id',
'computers.name'])
self.manager.create_api(self.Person, exclude_columns=exclude)
exclude = frozenset(['name', 'age', 'computers.id', 'computers.name'])
self.manager.create_api(self.Person, url_prefix='/api2',
exclude_columns=exclude)
response = self.app.get('/api/person/{0}'.format(person.id))
person_dict = loads(response.data)
for column in 'name', 'age', 'computers':
assert column not in person_dict
for column in 'id', 'other', 'birth_date':
assert column in person_dict
response = self.app.get('/api2/person/{0}'.format(person.id))
person_dict = loads(response.data)
assert 'computers' in person_dict
for column in 'id', 'name':
assert column not in person_dict['computers'][0]
for column in 'vendor', 'owner_id', 'buy_date':
assert column in person_dict['computers'][0]
def test_exclude_column_attributes(self):
"""Test for specifying excluded columns as SQLAlchemy column attributes.
"""
date = datetime.date(1999, 12, 31)
person = self.Person(name=u'Test', age=10, other=20, birth_date=date)
self.session.add(person)
self.session.commit()
exclude = frozenset([self.Person.name, self.Person.age])
self.manager.create_api(self.Person, exclude_columns=exclude)
response = self.app.get('/api/person/{0}'.format(person.id))
person_dict = loads(response.data)
for column in 'name', 'age':
assert column not in person_dict
for column in 'id', 'other', 'birth_date':
assert column in person_dict
def test_include_columns(self):
"""Tests that the `include_columns` argument specifies which columns to
return in the JSON representation of instances of the model.
"""
all_columns = get_columns(self.Person)
# allow all
self.manager.create_api(self.Person, include_columns=None,
url_prefix='/all')
self.manager.create_api(self.Person, include_columns=all_columns,
url_prefix='/all2')
# allow some
self.manager.create_api(self.Person, include_columns=('name', 'age'),
url_prefix='/some')
# allow none
self.manager.create_api(self.Person, include_columns=(),
url_prefix='/none')
# create a test person
self.manager.create_api(self.Person, methods=['POST'],
url_prefix='/add')
d = dict(name=u'Test', age=10, other=20,
birth_date=datetime.date(1999, 12, 31).isoformat())
response = self.app.post('/add/person', data=dumps(d))
assert response.status_code == 201
personid = loads(response.data)['id']
# get all
response = self.app.get('/all/person/{0}'.format(personid))
for column in 'name', 'age', 'other', 'birth_date', 'computers':
assert column in loads(response.data)
response = self.app.get('/all2/person/{0}'.format(personid))
for column in 'name', 'age', 'other', 'birth_date', 'computers':
assert column in loads(response.data)
# get some
response = self.app.get('/some/person/{0}'.format(personid))
for column in 'name', 'age':
assert column in loads(response.data)
for column in 'other', 'birth_date', 'computers':
assert column not in loads(response.data)
# get none
response = self.app.get('/none/person/{0}'.format(personid))
for column in 'name', 'age', 'other', 'birth_date', 'computers':
assert column not in loads(response.data)
def test_include_methods(self):
"""Tests that the `include_methods` argument specifies which methods to
return in the JSON representation of instances of the model.
"""
# included
self.manager.create_api(self.Person, url_prefix='/included',
include_methods=['name_and_age',
'computers.speed'])
# not included
self.manager.create_api(self.Person, url_prefix='/not_included')
# related object
self.manager.create_api(self.Computer, url_prefix='/included',
include_methods=['owner.name_and_age'])
# included non-callable property
self.manager.create_api(self.Computer, url_prefix='/included_property',
include_methods=['speed_property'])
# create a test person
date = datetime.date(1999, 12, 31)
person = self.Person(name=u'Test', age=10, other=20, birth_date=date)
computer = self.Computer(name=u'foo', vendor=u'bar', buy_date=date)
self.session.add(person)
person.computers.append(computer)
self.session.commit()
# get one with included method
response = self.app.get('/included/person/{0}'.format(person.id))
assert loads(response.data)['name_and_age'] == 'Test (aged 10)'
# get one without included method
response = self.app.get('/not_included/person/{0}'.format(person.id))
assert 'name_and_age' not in loads(response.data)
# get many with included method
response = self.app.get('/included/person')
response_data = loads(response.data)
assert response_data['objects'][0]['name_and_age'] == 'Test (aged 10)'
# get one through a related object
response = self.app.get('/included/computer')
response_data = loads(response.data)
assert 'name_and_age' in response_data['objects'][0]['owner']
# get many through a related object
response = self.app.get('/included/person')
response_data = loads(response.data)
assert response_data['objects'][0]['computers'][0]['speed'] == 42
# get one with included property
response = self.app.get('/included_property/computer/{0}'.format(computer.id))
response_data = loads(response.data)
assert response_data['speed_property'] == 42
def test_included_method_returns_object(self):
"""Tests that objects are serialized when returned from a method listed
in the `include_methods` argument.
"""
date = datetime.date(1999, 12, 31)
person = self.Person(name=u'Test', age=10, other=20, birth_date=date)
computer = self.Computer(name=u'foo', vendor=u'bar', buy_date=date)
self.session.add(person)
person.computers.append(computer)
self.session.commit()
self.manager.create_api(self.Person,
include_methods=['first_computer'])
response = self.app.get('/api/person/1')
assert 200 == response.status_code
data = loads(response.data)
assert 'first_computer' in data
assert 'foo' == data['first_computer']['name']
def test_exclude_columns(self):
"""Tests that the ``exclude_columns`` argument specifies which columns
to exclude in the JSON representation of instances of the model.
"""
all_columns = get_columns(self.Person)
# allow all
self.manager.create_api(self.Person, exclude_columns=None,
url_prefix='/all')
self.manager.create_api(self.Person, exclude_columns=(),
url_prefix='/all2')
# allow some
exclude = ('other', 'birth_date', 'computers')
self.manager.create_api(self.Person, exclude_columns=exclude,
url_prefix='/some')
# allow none
self.manager.create_api(self.Person, exclude_columns=all_columns,
url_prefix='/none')
# create a test person
self.manager.create_api(self.Person, methods=['POST'],
url_prefix='/add')
d = dict(name=u'Test', age=10, other=20,
birth_date=datetime.date(1999, 12, 31).isoformat())
response = self.app.post('/add/person', data=dumps(d))
assert response.status_code == 201
personid = loads(response.data)['id']
# get all
response = self.app.get('/all/person/{0}'.format(personid))
for column in 'name', 'age', 'other', 'birth_date', 'computers':
assert column in loads(response.data)
response = self.app.get('/all2/person/{0}'.format(personid))
for column in 'name', 'age', 'other', 'birth_date', 'computers':
assert column in loads(response.data)
# get some
response = self.app.get('/some/person/{0}'.format(personid))
for column in 'name', 'age':
assert column in loads(response.data)
for column in 'other', 'birth_date', 'computers':
assert column not in loads(response.data)
# get none
response = self.app.get('/none/person/{0}'.format(personid))
for column in 'name', 'age', 'other', 'birth_date', 'computers':
assert column not in loads(response.data)
def test_different_urls(self):
"""Tests that establishing different URL endpoints for the same model
affect the same database table.
"""
methods = frozenset(('get', 'patch', 'post', 'delete'))
# create a separate endpoint for each HTTP method
for method in methods:
url = '/{0}'.format(method)
self.manager.create_api(self.Person, methods=[method.upper()],
url_prefix=url)
# test for correct requests
response = self.app.get('/get/person')
assert response.status_code == 200
response = self.app.post('/post/person', data=dumps(dict(name='Test')))
assert response.status_code == 201
response = self.app.patch('/patch/person/1',
data=dumps(dict(name='foo')))
assert response.status_code == 200
response = self.app.delete('/delete/person/1')
assert response.status_code == 204
# test for incorrect requests
response = self.app.get('/post/person')
assert response.status_code == 405
response = self.app.get('/patch/person/1')
assert response.status_code == 405
response = self.app.get('/delete/person/1')
assert response.status_code == 405
response = self.app.post('/get/person')
assert response.status_code == 405
response = self.app.post('/patch/person/1')
assert response.status_code == 405
response = self.app.post('/delete/person/1')
assert response.status_code == 405
response = self.app.patch('/get/person')
assert response.status_code == 405
response = self.app.patch('/post/person')
assert response.status_code == 405
response = self.app.patch('/delete/person/1')
assert response.status_code == 405
response = self.app.delete('/get/person')
assert response.status_code == 405
response = self.app.delete('/post/person')
assert response.status_code == 405
response = self.app.delete('/patch/person/1')
assert response.status_code == 405
# test that the same model is updated on all URLs
response = self.app.post('/post/person', data=dumps(dict(name='Test')))
assert response.status_code == 201
response = self.app.get('/get/person/1')
assert response.status_code == 200
assert loads(response.data)['name'] == 'Test'
response = self.app.patch('/patch/person/1',
data=dumps(dict(name='Foo')))
assert response.status_code == 200
response = self.app.get('/get/person/1')
assert response.status_code == 200
assert loads(response.data)['name'] == 'Foo'
response = self.app.delete('/delete/person/1')
assert response.status_code == 204
response = self.app.get('/get/person/1')
assert response.status_code == 404
def test_max_results_per_page(self):
"""Test for specifying the ``max_results_per_page`` keyword argument.
"""
self.manager.create_api(self.Person, methods=['GET', 'POST'],
max_results_per_page=15)
for n in range(20):
response = self.app.post('/api/person', data=dumps({}))
assert 201 == response.status_code
response = self.app.get('/api/person?results_per_page=20')
assert 200 == response.status_code
data = loads(response.data)
assert 15 == len(data['objects'])
def test_expose_relations(self):
"""Tests that relations are exposed at a URL which is a child of the
instance URL.
"""
date = datetime.date(1999, 12, 31)
person = self.Person(name=u'Test', age=10, other=20, birth_date=date)
computer = self.Computer(name=u'foo', vendor=u'bar', buy_date=date)
self.session.add(person)
person.computers.append(computer)
self.session.commit()
self.manager.create_api(self.Person)
response = self.app.get('/api/person/1/computers')
assert 200 == response.status_code
data = loads(response.data)
assert 'objects' in data
assert 1 == len(data['objects'])
assert 'foo' == data['objects'][0]['name']
def test_expose_lazy_relations(self):
"""Tests that lazy relations are exposed at a URL which is a child of
the instance URL.
"""
person = self.LazyPerson(name=u'Test')
computer = self.LazyComputer(name=u'foo')
self.session.add(person)
person.computers.append(computer)
self.session.commit()
self.manager.create_api(self.LazyPerson)
response = self.app.get('/api/lazyperson/1/computers')
assert 200 == response.status_code
data = loads(response.data)
assert 'objects' in data
assert 1 == len(data['objects'])
assert 'foo' == data['objects'][0]['name']
def test_universal_preprocessor(self):
"""Tests universal preprocessor and postprocessor applied to all
methods created with the API manager.
"""
class Counter(object):
def __init__(s):
s.count = 0
def increment(s):
s.count += 1
def __eq__(s, o):
return s.count == o.count if isinstance(o, Counter) \
else s.count == o
precount = Counter()
postcount = Counter()
def preget(**kw):
precount.increment()
def postget(**kw):
postcount.increment()
manager = APIManager(self.flaskapp, self.session,
preprocessors=dict(GET_MANY=[preget]),
postprocessors=dict(GET_MANY=[postget]))
manager.create_api(self.Person)
manager.create_api(self.Computer)
self.app.get('/api/person')
self.app.get('/api/computer')
self.app.get('/api/person')
assert precount == postcount == 3
@skip_unless(has_flask_sqlalchemy, 'Flask-SQLAlchemy not found.')
class TestFSA(FlaskTestBase):
"""Tests which use models defined using Flask-SQLAlchemy instead of pure
SQLAlchemy.
"""
def setUp(self):
"""Creates the Flask application, the APIManager, the database, and the
Flask-SQLAlchemy models.
"""
super(TestFSA, self).setUp()
# initialize SQLAlchemy and Flask-Restless
self.db = SQLAlchemy(self.flaskapp)
self.manager = APIManager(self.flaskapp, flask_sqlalchemy_db=self.db)
# for the sake of brevity...
db = self.db
# declare the models
class Computer(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.Unicode, unique=True)
vendor = db.Column(db.Unicode)
buy_date = db.Column(db.DateTime)
owner_id = db.Column(db.Integer, db.ForeignKey('person.id'))
owner = db.relationship('Person',
backref=db.backref('computers',
lazy='dynamic'))
class Person(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.Unicode, unique=True)
age = db.Column(db.Float)
other = db.Column(db.Float)
birth_date = db.Column(db.Date)
self.Person = Person
self.Computer = Computer
# create all the tables required for the models
self.db.create_all()
def tearDown(self):
"""Drops all tables from the temporary database."""
self.db.drop_all()
unregister_fsa_session_signals()
def test_flask_sqlalchemy(self):
"""Tests that :class:`flask.ext.restless.APIManager` correctly exposes
models defined using Flask-SQLAlchemy.
"""
# create three different APIs for the same model
self.manager.create_api(self.Person, methods=['GET', 'POST'])
self.manager.create_api(self.Person, methods=['PATCH'],
url_prefix='/api2')
self.manager.create_api(self.Person, methods=['GET'],
url_prefix='/readonly')
# test that specified endpoints exist
response = self.app.post('/api/person', data=dumps(dict(name='foo')))
assert response.status_code == 201
assert loads(response.data)['id'] == 1
response = self.app.get('/api/person')
assert response.status_code == 200
assert len(loads(response.data)['objects']) == 1
assert loads(response.data)['objects'][0]['id'] == 1
response = self.app.patch('/api2/person/1',
data=dumps(dict(name='bar')))
assert response.status_code == 200
assert loads(response.data)['id'] == 1
assert loads(response.data)['name'] == 'bar'
# test that the model is the same as before
response = self.app.get('/readonly/person')
assert response.status_code == 200
assert len(loads(response.data)['objects']) == 1
assert loads(response.data)['objects'][0]['id'] == 1
assert loads(response.data)['objects'][0]['name'] == 'bar'
|
CommonsCloud/CommonsRestless
|
tests/test_manager.py
|
Python
|
agpl-3.0
| 27,696 | 0.000217 |
import numpy as np
import theano
import theano.tensor as T
theano.config.exception_verbosity = 'high'
theano.config.compute_test_value = 'warn'
floatX = theano.config.floatX
def iscalar(name=None):
Av = 1
A = T.iscalar(name=name)
A.tag.test_value = Av
return A, Av
def fscalar(name=None):
Av = 1.23
A = T.fscalar(name=name)
A.tag.test_value = Av
return A, Av
def ivector(size, name=None):
Av = np.zeros(size, dtype=np.int)
A = T.iscalar(name=name)
A.tag.test_value = Av
return A, Av
def fvector(size, name=None):
Av = np.zeros(size, dtype=floatX)
A = T.fscalar(name=name)
A.tag.test_value = Av
return A, Av
def imatrix(shape, name=None):
Av = np.zeros(shape, dtype=np.int)
A = T.imatrix(name=name)
A.tag.test_value = Av
return A, Av
def fmatrix(shape, name=None):
Av = np.zeros(shape, dtype=floatX)
A = T.fmatrix(name=name)
A.tag.test_value = Av
return A, Av
|
jbornschein/y2k
|
learning/tests/testing.py
|
Python
|
agpl-3.0
| 974 | 0.013347 |
# encoding: utf-8
import os
HERE = os.path.dirname(__file__)
# Hosted config files
BUILDOUT_USER = 'pythonpackages'
BUILDOUT_INDEX = open(os.path.join(HERE, 'templates', 'buildout.html')).read()
BUILDOUT_REPOS = (
('buildout-apache-modwsgi', 'apache-modwsgi'),
('buildout-bluebream', 'bluebream'),
('buildout-django', 'django'),
('buildout-jenkins', 'jenkins'),
('buildout-plone', 'plone'),
('buildout-plone-getpaid', 'plone-getpaid'),
('buildout-wordpress', 'wordpress'),
('buildout-zope2', 'zope2'),
)
# Redis
REDIS_EXPIRE_ONE_DAY = 86400
REDIS_EXPIRE_ONE_HOUR = 3600
# Assign redis key names to variable names (and in some cases, poorly
# chosen key names to better chosen variable names)
REDIS_KEY_BETA_USER = 'users:%s.%s' # Hash (full_name, email_address,
# github_username, classifiers)
REDIS_KEY_BETA_USERS = 'beta_users' # Set
REDIS_KEY_NUM_PACKAGES_PYPI = 'num_packages_pypi'
REDIS_KEY_PACKAGE_DOWNLOADS = 'package:%s:downloads'
REDIS_KEY_PACKAGE_FEATURED_BY = 'package:%s:featured_by' # List
REDIS_KEY_PACKAGE_FEATURED_TIME = 'package:%s:featured_time' # List
REDIS_KEY_PACKAGE_METADATA = 'package:%s:metadata'
REDIS_KEY_PACKAGE_RELEASED_TIME = 'release:%s:featured_time' # List
REDIS_KEY_PACKAGE_TIMER = 'package:%s:timer'
REDIS_KEY_PACKAGE_TRASH = 'package:%s:trash'
REDIS_KEY_PACKAGE_VERSION = 'package:%s:latest_version'
REDIS_KEY_PACKAGES_DOWNLOADED = 'recent_entries' # Sorted set
REDIS_KEY_PACKAGES_ENTERED = 'entries' # List
REDIS_KEY_PACKAGES_ENTERED_USER = 'user:%s:package_list' # List
REDIS_KEY_PACKAGES_FEATURED = 'most_vain' # Sorted set
REDIS_KEY_PACKAGES_FEATURED_COUNT = 'featured_count'
REDIS_KEY_PACKAGES_FEATURED_USER = 'user:%s:package_set' # Sorted set
REDIS_KEY_PACKAGES_RELEASED_COUNT = 'released_count'
REDIS_KEY_PACKAGES_RELEASED_USER = 'user:%s:release_set' # Sorted set
REDIS_KEY_RELEASES_ENTERED = 'releases' # List
REDIS_KEY_RELEASES_ENTERED_USER = 'user:%s:release_list' # List
REDIS_KEY_RELEASES_FEATURED = 'most_released' # Sorted set
REDIS_KEY_TROVE_CLASSIFIER = 'trove:%s'
REDIS_KEY_TROVE_CLASSIFIERS_ENTERED = 'trove:list'
REDIS_KEY_TROVE_CLASSIFIERS_FEATURED = 'trove:set'
REDIS_KEY_USERS_ENTERED = 'users' # List
REDIS_KEY_USERS_FEATURED = 'recent_users' # Sorted set
REDIS_KEY_USERS_WHITELIST = 'users_whitelist' # Set
REDIS_KEY_USER_AVATAR = 'user:%s:avatar'
REDIS_KEY_USER_CUSTOMER = 'user:%s:customer'
REDIS_KEY_USER_EMAIL = 'user:%s:email'
REDIS_KEY_USER_GITHUB_OAUTH_TOKEN = 'user:%s:token'
REDIS_KEY_USER_NAME = 'user:%s:name'
REDIS_KEY_USER_ORG_SELECTED = 'user:%s:org_selected'
REDIS_KEY_USER_ORGS_SELECTED = 'user:%s:orgs_selected' # Hash
REDIS_KEY_USER_PACKAGE_SELECTED = 'user:%s:package_selected'
REDIS_KEY_USER_PACKAGES_SELECTED = 'user:%s:packages_selected' # Hash
REDIS_KEY_USER_PACKAGE_COUNT = 'user:%s:package:%s'
REDIS_KEY_USER_PLAN = 'user:%s:plan'
REDIS_KEY_USER_RELEASE_COUNT = 'user:%s:release:%s'
REDIS_KEY_USER_PYPI_OAUTH_SECRET = 'user:%s:pypi_oauth_secret' # PyPI oauth1
REDIS_KEY_USER_PYPI_OAUTH_TOKEN = 'user:%s:pypi_oauth_token' # PyPI oauth1
REDIS_KEY_USER_SLOTMAX_ORG = 'user:%s:slotnum_org' # For stripe
REDIS_KEY_USER_SLOTMAX_PACKAGE = 'user:%s:slotnum_package' # For stripe
REDIS_KEY_USER_SLOTNUM_ORG = 'user:%s:org_slots'
REDIS_KEY_USER_SLOTNUM_PACKAGE = 'user:%s:package_slots'
# Github
if 'STAGING' in os.environ:
GITHUB_SCOPES = 'delete_repo,repo'
else:
GITHUB_SCOPES = 'repo' # http://developer.github.com/v3/oauth/#scopes
if 'STAGING' in os.environ:
GITHUB_CLIENT_ID = ''
GITHUB_CLIENT_SECRET = ''
else:
GITHUB_CLIENT_ID = ''
GITHUB_CLIENT_SECRET = ''
GITHUB_URL = 'https://github.com'
GITHUB_URL_API = 'https://api.github.com'
GITHUB_URL_AUTH = (GITHUB_URL +
'/login/oauth/authorize?client_id=%s&scope=%s' % (
GITHUB_CLIENT_ID, GITHUB_SCOPES))
GITHUB_URL_ORG_INFO = GITHUB_URL_API + '/orgs/%s'
GITHUB_URL_ORG_REPO = GITHUB_URL_API + '/repos/%s/%s'
GITHUB_URL_ORG_REPOS = GITHUB_URL_API + '/orgs/%s/repos?per_page=100&page=%s'
GITHUB_URL_ORG_REPOS_NEW = GITHUB_URL_API + '/orgs/%s/repos?%s'
GITHUB_URL_REPOS_NEW = GITHUB_URL_API + '/user/repos?%s'
GITHUB_URL_REPOS_BLOB = (GITHUB_URL_API +
'/repos/%s/%s/git/blobs/%s?%s')
GITHUB_URL_REPOS_BLOB_ANON = (GITHUB_URL_API +
'/repos/%s/%s/git/blobs/%s') # No qs needed
GITHUB_URL_REPOS_COMMITS = GITHUB_URL_API + '/repos/%s/%s/commits?%s'
GITHUB_URL_REPOS_DELETE = GITHUB_URL_API + '/repos/%s/%s?%s'
GITHUB_URL_REPOS_REFS = (GITHUB_URL_API +
'/repos/%s/%s/git/refs?%s')
GITHUB_URL_REPOS_TAGS = (GITHUB_URL_API +
'/repos/%s/%s/git/tags?%s')
GITHUB_URL_REPOS_TREE = (GITHUB_URL_API +
'/repos/%s/%s/git/trees/%s?recursive=1?%s')
GITHUB_URL_REPOS_TREE_CREATE = (GITHUB_URL_API +
'/repos/%s/%s/git/trees?%s')
GITHUB_URL_TOKEN = GITHUB_URL + '/login/oauth/access_token'
GITHUB_URL_USER = GITHUB_URL_API + '/user?%s'
GITHUB_URL_USER_ORGS = GITHUB_URL_API + '/user/orgs?%s'
GITHUB_URL_USER_REPO = GITHUB_URL_API + '/repos/%s/%s?%s'
GITHUB_URL_USER_REPOS = GITHUB_URL_API + '/user/repos?%s&per_page=100'
# GMail
GMAIL_HOST = 'smtp.gmail.com'
GMAIL_PASS = ''
GMAIL_USER = 'aclark@pythonpackages.com'
# Mail
MESSAGE = """
Hi %s,
%s
---
pythonpackages.com
"""
# Menu
MENU = """\
<div class="btn-group pull-right">
<a class="btn dropdown-toggle login" data-toggle="dropdown" href="#">
<img src="%s"> %s
<span class="caret"></span>
</a>
<ul class="dropdown-menu">
<li><a href="/dashboard"><i
class="icon-user"></i> Dashboard</a></li>
<li class="divider"></li>
<li><a href="/logout"><i class="icon-off"></i> Sign out</a></li>
</ul>
</div>
"""
MENU_SIGNIN = """\
<div class="btn-group pull-right">
<a class="btn" href="%s">Sign in with GitHub</a>
</div>
"""
# Misc
ADMIN_EMAIL = 'pythonpackages.com <info@pythonpackages.com>'
COOKIE_PYPI = '_pp000'
COOKIE_GITHUB = '_pp001'
COOKIE_ENCRYPT = ''
COOKIE_VALIDATE = ''
CRATE_DOWNLOAD_URL = 'http://crate.io/api/v1/release/%s-%s/?format=json'
DEFAULT_ACTIVITY_LIMIT = 4
DEFAULT_LIMIT = 5
DEFAULT_TAB = 'downloaded'
DOCS = {
'python.org': 'http://packages.python.org',
'readthedocs.org': 'http://readthedocs.org/docs',
'zope.org': 'http://docs.zope.org',
}
DOCUTILS_SETTINGS = {
# Via https://svn.python.org/packages/trunk/pypi/description_utils.py
'raw_enabled': 0, # no raw HTML code
'file_insertion_enabled': 0, # no file/URL access
'halt_level': 2, # at warnings or errors, raise an exception
'report_level': 5, # never report problems with the reST code
}
FORTUNE_FILE = os.path.join(HERE, 'fortunes', 'fortunes')
MANAGE_PACKAGE_ACTIONS = (
'add-slot-here',
'run-test-suite',
'tag-and-release',
'test-installation',
'upload-to-test-index',
)
METADATA_IGNORE = {
'fields': (
'_pypi_hidden',
'_pypi_ordering',
'classifiers',
'name',
'summary',
'version'
),
'values': (
list(),
None,
str(),
'UNKNOWN',
)
}
PERMS = (
'manage_dashboard',
'manage_package',
'manage_site',
)
PYTHON_PACKAGES_URL = 'http://pythonpackages.com'
TEST_USER_MAIL = 'aclark@aclark.net'
TEST_USER_NAME = 'Alex Clark'
TIMESTAMP = 'at %l:%M %p on %a %b %d %Y UTC'
USER_SLOTMAX_ORG = 0
USER_SLOTMAX_PACKAGE = 1
# Opencomparison
OPENCOMP_FRAMEWORKS = ('django', 'pyramid', 'plone')
OPENCOMP_SERVICE_URLS = (
'http://%s.opencomparison.org',
'http://%s.opencomparison.org/api/v1/package/%s',
)
# Packaging
MANIFEST_IN = """\
include *
recursive-include docs *
recursive-include %s *
"""
PASTER_CONFIG = """\
[pastescript]
author = %s
author_email = %s
url = %s
"""
# Paster
PASTER_TEMPLATE_CHOICES = (
('basic_package', 'Basic package for a Python library'),
('django_app', 'Basic package for a Django app'),
('alchemy',
'Basic package for Pyramid SQLAlchemy project using url dispatch'),
('starter', 'Basic package for Pyramid starter project'),
('zodb', 'Basic package for Pyramid ZODB project using traversal'),
('basic_namespace', 'Namespace package for a Python library'),
('basic_zope', 'Namespace package for a Zope 2 product'),
('plone', 'Namespace package for a Plone add-on'),
('plone2_theme', 'Namespace package for a Plone 2.1 theme'),
('plone2.5_theme', 'Namespace package for a Plone 2.5 theme'),
('plone3_theme', 'Namespace package for a Plone 3 theme'),
('plone_theme', 'Namespace package for a Plone 4 theme'),
('plone_pas', 'Namespace package for a Plone PAS plugin'),
('kss_plugin', 'Namespace package for a Plone KSS plugin'),
('archetype', 'Namespace package for Plone Archetypes content'),
('dexterity', 'Namespace package for Plone Dexterity content'),
('plone3_portlet', 'Nested namespace package for a Plone 3 portlet'),
('recipe', 'Nested namespace package for a Buildout recipe'),
('plone_app', 'Nested namespace package for a Plone add-on'),
('nested_namespace', 'Nested namespace package for a Python library'),
)
# Plans
PLANS_CHOICES = (
('free', u'Free Plan — $0 USD/Month — 1 Package Slot'),
('hobbyist', u'Hobbyist Plan — $7 USD/Month — 1 Organization Slot and \
3 Package Slots'),
('semi-pro', u'Semi-pro Plan — $15 USD/Month — 2 Organization Slots \
and 6 Package Slots'),
('professional', u'Professional Plan — $31 USD/Month — 4 Organization \
Slots and 12 Package Slots'),
('corporate', u'Corporate Plan — $184 USD/Month — 8 Organization slots \
and 144 Package Slots'),
('game-changer', u'Game Changer Plan — $400 USD/Month — Unlimited \
Organization and Package Slots'),
)
PLANS_DATA = {
# name id cents orgslots packageslots
'free': (0, '0', 0, 1),
'hobbyist': (1, '700', 1, 3),
'semi-pro': (2, '1500', 2, 6),
'professional': (3, '3100', 4, 12),
'corporate': (4, '18400', 8, 144),
'game-changer': (5, '40000', 100, 1000),
}
# .pypirc
PYPIRC = """\
[distutils]
index-servers =
pypi
[pypi]
username:%s
password:%s
"""
PYPIRC_TEST = """\
[distutils]
index-servers =
other
pypi
[other]
repository: http://index.pythonpackages.com
username:%s
password:%s
[pypi]
username:%s
password:%s
"""
# PyPI
PYPI_URL = 'http://pypi.python.org/pypi'
PYPI_OAUTH_CONSUMER_KEY = u''
PYPI_OAUTH_CONSUMER_SECRET = u''
PYPI_URL_OAUTH_ACCESS_TOKEN = 'https://pypi.python.org/oauth/access_token'
PYPI_URL_OAUTH_ADD_RELEASE = 'https://pypi.python.org/oauth/add_release'
PYPI_URL_OAUTH_AUTHORIZE = ('https://pypi.python.org/oauth/authorise'
'?oauth_token=%s&oauth_callback=%s')
if 'STAGING' in os.environ:
PYPI_URL_OAUTH_CALLBACK = 'https://test.pythonpackages.com/login'
else:
PYPI_URL_OAUTH_CALLBACK = 'https://pythonpackages.com/login'
PYPI_URL_OAUTH_REQUEST_TOKEN = (
'https://pypi.python.org/oauth/request_token')
PYPI_URL_OAUTH_TEST = 'https://pypi.python.org/oauth/test'
PYPI_URL_OAUTH_UPLOAD = 'https://pypi.python.org/oauth/upload'
# Stripe
if 'STAGING' in os.environ:
STRIPE_API_KEY = "" # Testing
else:
STRIPE_API_KEY = "" # Live
# Twitter
TWITTER_ACCESS_TOKEN = ''
TWITTER_ACCESS_SECRET = ''
TWITTER_CONSUMER_KEY = ''
TWITTER_CONSUMER_SECRET = ''
|
python-packages/vanity_app
|
src/vanity_app/vanity_app/config.py
|
Python
|
mit
| 11,226 | 0.001161 |
#! /usr/bin/env python
import socket
print "creating socket..."
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) #ipv4, tcp
print "done."
print "looking up port number..."
port = socket.getservbyname('http', 'tcp')
print "done."
print "connecting to remote host on port %d..." % port
s.connect(("www.google.com", port))
print "done."
print "connected from", s.getsockname() #port randomly set
print "connected to", s.getpeername()
|
qzxx-syzz/study
|
python/Net/connect.py
|
Python
|
gpl-3.0
| 446 | 0.006726 |
from __future__ import absolute_import
from jsonschema import Draft4Validator
SELECT_OPTIONS_SCHEMA = {
"type": "array",
"definitions": {
"select-option": {
"type": "object",
"properties": {"label": {"type": "string"}, "value": {"type": "string"}},
"required": ["label", "value"],
}
},
"properties": {"type": "array", "items": {"$ref": "#definitions/select-option"}},
}
ISSUE_LINKER_SCHEMA = {
"type": "object",
"properties": {
"webUrl": {"type": "string"},
"identifier": {"type": "string"},
"project": {"type": "string"},
},
"required": ["webUrl", "identifier", "project"],
}
SCHEMA_LIST = {"select": SELECT_OPTIONS_SCHEMA, "issue_link": ISSUE_LINKER_SCHEMA}
def validate(instance, schema_type):
schema = SCHEMA_LIST[schema_type]
v = Draft4Validator(schema)
if not v.is_valid(instance):
return False
return True
|
mvaled/sentry
|
src/sentry/mediators/external_requests/util.py
|
Python
|
bsd-3-clause
| 953 | 0.003148 |
# Hihi is the grandfather of all geeks in IIITA. He and his crazy ideas.....Huh..... Currently, hihi is working on his most famous project
# named 21 Lane, but he is stuck at a tricky segment of his code.
# Hihi wants to assign some random IP addresses to users, but he won't use rand(). He wants to change the current IP of the user's computer
# to the IP such that its hash is next hash greater than the hash of original IP and differs by only 1 bit from the hash of original IP.
# Smart Hihi already hashed the IP to some integer using his personal hash function. What he wants from you is to convert the given hashed
# IP to the required IP X as mentioned above.
# OK, just find the find the smallest number greater than n with exactly 1 bit different from n in binary form
# Input :
# First line contains single integer T ( 1 <= T <= 10^6)- number of test cases. Second line contains hashed IP N ( 1 <= N <= 10^18)
# Output :
# Print T lines, each containing an integer X, the required IP.(don't worry Hihi will decode X to obtain final IP address)
# SAMPLE INPUT
# 5
# 6
# 4
# 10
# 12
# 14
# SAMPLE OUTPUT
# 7
# 5
# 11
# 13
# 15
for _ in range(int(input())):
a=int(input())
print(a|a+1)
|
OmkarPathak/Python-Programs
|
CompetitiveProgramming/HackerEarth/Bit_Manipulation/P05_HihiAndCrazyBits.py
|
Python
|
gpl-3.0
| 1,216 | 0.011513 |
"""Tests to ensure that the html5lib tree builder generates good trees."""
import warnings
try:
from bs4.builder import HTML5TreeBuilder
HTML5LIB_PRESENT = True
except ImportError, e:
HTML5LIB_PRESENT = False
from bs4.element import SoupStrainer
from bs4.testing import (
HTML5TreeBuilderSmokeTest,
SoupTest,
skipIf,
)
@skipIf(
not HTML5LIB_PRESENT,
"html5lib seems not to be present, not testing its tree builder.")
class HTML5LibBuilderSmokeTest(SoupTest, HTML5TreeBuilderSmokeTest):
"""See ``HTML5TreeBuilderSmokeTest``."""
@property
def default_builder(self):
return HTML5TreeBuilder
def test_soupstrainer(self):
# The html5lib tree builder does not support SoupStrainers.
strainer = SoupStrainer("b")
markup = "<p>A <b>bold</b> statement.</p>"
with warnings.catch_warnings(record=True) as w:
soup = self.soup(markup, parse_only=strainer)
self.assertEqual(
soup.decode(), self.document_for(markup))
self.assertTrue(
"the html5lib tree builder doesn't support parse_only" in
str(w[0].message))
def test_correctly_nested_tables(self):
"""html5lib inserts <tbody> tags where other parsers don't."""
markup = ('<table id="1">'
'<tr>'
"<td>Here's another table:"
'<table id="2">'
'<tr><td>foo</td></tr>'
'</table></td>')
self.assertSoupEquals(
markup,
'<table id="1"><tbody><tr><td>Here\'s another table:'
'<table id="2"><tbody><tr><td>foo</td></tr></tbody></table>'
'</td></tr></tbody></table>')
self.assertSoupEquals(
"<table><thead><tr><td>Foo</td></tr></thead>"
"<tbody><tr><td>Bar</td></tr></tbody>"
"<tfoot><tr><td>Baz</td></tr></tfoot></table>")
def test_xml_declaration_followed_by_doctype(self):
markup = '''<?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE html>
<html>
<head>
</head>
<body>
<p>foo</p>
</body>
</html>'''
soup = self.soup(markup)
# Verify that we can reach the <p> tag; this means the tree is connected.
self.assertEqual(b"<p>foo</p>", soup.p.encode())
def test_reparented_markup(self):
markup = '<p><em>foo</p>\n<p>bar<a></a></em></p>'
soup = self.soup(markup)
self.assertEqual(u"<body><p><em>foo</em></p><em>\n</em><p><em>bar<a></a></em></p></body>", soup.body.decode())
self.assertEqual(2, len(soup.find_all('p')))
def test_reparented_markup_ends_with_whitespace(self):
markup = '<p><em>foo</p>\n<p>bar<a></a></em></p>\n'
soup = self.soup(markup)
self.assertEqual(u"<body><p><em>foo</em></p><em>\n</em><p><em>bar<a></a></em></p>\n</body>", soup.body.decode())
self.assertEqual(2, len(soup.find_all('p')))
def test_reparented_markup_containing_identical_whitespace_nodes(self):
"""Verify that we keep the two whitespace nodes in this
document distinct when reparenting the adjacent <tbody> tags.
"""
markup = '<table> <tbody><tbody><ims></tbody> </table>'
soup = self.soup(markup)
space1, space2 = soup.find_all(string=' ')
tbody1, tbody2 = soup.find_all('tbody')
assert space1.next_element is tbody1
assert tbody2.next_element is space2
def test_reparented_markup_containing_children(self):
markup = '<div><a>aftermath<p><noscript>target</noscript>aftermath</a></p></div>'
soup = self.soup(markup)
noscript = soup.noscript
self.assertEqual("target", noscript.next_element)
target = soup.find(string='target')
# The 'aftermath' string was duplicated; we want the second one.
final_aftermath = soup.find_all(string='aftermath')[-1]
# The <noscript> tag was moved beneath a copy of the <a> tag,
# but the 'target' string within is still connected to the
# (second) 'aftermath' string.
self.assertEqual(final_aftermath, target.next_element)
self.assertEqual(target, final_aftermath.previous_element)
def test_processing_instruction(self):
"""Processing instructions become comments."""
markup = b"""<?PITarget PIContent?>"""
soup = self.soup(markup)
assert str(soup).startswith("<!--?PITarget PIContent?-->")
def test_cloned_multivalue_node(self):
markup = b"""<a class="my_class"><p></a>"""
soup = self.soup(markup)
a1, a2 = soup.find_all('a')
self.assertEqual(a1, a2)
assert a1 is not a2
def test_foster_parenting(self):
markup = b"""<table><td></tbody>A"""
soup = self.soup(markup)
self.assertEqual(u"<body>A<table><tbody><tr><td></td></tr></tbody></table></body>", soup.body.decode())
def test_extraction(self):
"""
Test that extraction does not destroy the tree.
https://bugs.launchpad.net/beautifulsoup/+bug/1782928
"""
markup = """
<html><head></head>
<style>
</style><script></script><body><p>hello</p></body></html>
"""
soup = self.soup(markup)
[s.extract() for s in soup('script')]
[s.extract() for s in soup('style')]
self.assertEqual(len(soup.find_all("p")), 1)
def test_empty_comment(self):
"""
Test that empty comment does not break structure.
https://bugs.launchpad.net/beautifulsoup/+bug/1806598
"""
markup = """
<html>
<body>
<form>
<!----><input type="text">
</form>
</body>
</html>
"""
soup = self.soup(markup)
inputs = []
for form in soup.find_all('form'):
inputs.extend(form.find_all('input'))
self.assertEqual(len(inputs), 1)
def test_tracking_line_numbers(self):
# The html.parser TreeBuilder keeps track of line number and
# position of each element.
markup = "\n <p>\n\n<sourceline>\n<b>text</b></sourceline><sourcepos></p>"
soup = self.soup(markup)
self.assertEqual(2, soup.p.sourceline)
self.assertEqual(5, soup.p.sourcepos)
self.assertEqual("sourceline", soup.p.find('sourceline').name)
# You can deactivate this behavior.
soup = self.soup(markup, store_line_numbers=False)
self.assertEqual("sourceline", soup.p.sourceline.name)
self.assertEqual("sourcepos", soup.p.sourcepos.name)
|
listyque/TACTIC-Handler
|
thlib/side/bs42/tests/test_html5lib.py
|
Python
|
epl-1.0
| 6,494 | 0.001386 |
"""add post media
Revision ID: 04da9abf37e2
Revises: 2e3a2882e5a4
Create Date: 2017-08-08 15:15:50.911420
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '04da9abf37e2'
down_revision = '2e3a2882e5a4'
branch_labels = None
depends_on = None
def upgrade():
op.add_column('accounts', sa.Column('policy_keep_media', sa.Boolean(), server_default='FALSE', nullable=False))
op.add_column('posts', sa.Column('has_media', sa.Boolean(), server_default='FALSE', nullable=False))
# ### end Alembic commands ###
def downgrade():
op.drop_column('posts', 'has_media')
op.drop_column('accounts', 'policy_keep_media')
|
codl/forget
|
migrations/versions/04da9abf37e2_add_post_media.py
|
Python
|
isc
| 683 | 0.002928 |
#
# QAPI event generator
#
# Copyright (c) 2014 Wenchao Xia
# Copyright (c) 2015-2016 Red Hat Inc.
#
# Authors:
# Wenchao Xia <wenchaoqemu@gmail.com>
# Markus Armbruster <armbru@redhat.com>
#
# This work is licensed under the terms of the GNU GPL, version 2.
# See the COPYING file in the top-level directory.
from qapi import *
def gen_event_send_proto(name, arg_type):
return 'void qapi_event_send_%(c_name)s(%(param)s)' % {
'c_name': c_name(name.lower()),
'param': gen_params(arg_type, 'Error **errp')}
def gen_event_send_decl(name, arg_type):
return mcgen('''
%(proto)s;
''',
proto=gen_event_send_proto(name, arg_type))
# Declare and initialize an object 'qapi' using parameters from gen_params()
def gen_param_var(typ):
assert not typ.variants
ret = mcgen('''
%(c_name)s param = {
''',
c_name=typ.c_name())
sep = ' '
for memb in typ.members:
ret += sep
sep = ', '
if memb.optional:
ret += 'has_' + c_name(memb.name) + sep
if memb.type.name == 'str':
# Cast away const added in gen_params()
ret += '(char *)'
ret += c_name(memb.name)
ret += mcgen('''
};
''')
return ret
def gen_event_send(name, arg_type):
# FIXME: Our declaration of local variables (and of 'errp' in the
# parameter list) can collide with exploded members of the event's
# data type passed in as parameters. If this collision ever hits in
# practice, we can rename our local variables with a leading _ prefix,
# or split the code into a wrapper function that creates a boxed
# 'param' object then calls another to do the real work.
ret = mcgen('''
%(proto)s
{
QDict *qmp;
Error *err = NULL;
QMPEventFuncEmit emit;
''',
proto=gen_event_send_proto(name, arg_type))
if arg_type and arg_type.members:
ret += mcgen('''
QmpOutputVisitor *qov;
Visitor *v;
''')
ret += gen_param_var(arg_type)
ret += mcgen('''
emit = qmp_event_get_func_emit();
if (!emit) {
return;
}
qmp = qmp_event_build_dict("%(name)s");
''',
name=name)
if arg_type and arg_type.members:
ret += mcgen('''
qov = qmp_output_visitor_new();
v = qmp_output_get_visitor(qov);
visit_start_struct(v, "%(name)s", NULL, 0, &err);
if (err) {
goto out;
}
visit_type_%(c_name)s_members(v, ¶m, &err);
if (!err) {
visit_check_struct(v, &err);
}
visit_end_struct(v);
if (err) {
goto out;
}
qdict_put_obj(qmp, "data", qmp_output_get_qobject(qov));
''',
name=name, c_name=arg_type.c_name())
ret += mcgen('''
emit(%(c_enum)s, qmp, &err);
''',
c_enum=c_enum_const(event_enum_name, name))
if arg_type and arg_type.members:
ret += mcgen('''
out:
qmp_output_visitor_cleanup(qov);
''')
ret += mcgen('''
error_propagate(errp, err);
QDECREF(qmp);
}
''')
return ret
class QAPISchemaGenEventVisitor(QAPISchemaVisitor):
def __init__(self):
self.decl = None
self.defn = None
self._event_names = None
def visit_begin(self, schema):
self.decl = ''
self.defn = ''
self._event_names = []
def visit_end(self):
self.decl += gen_enum(event_enum_name, self._event_names)
self.defn += gen_enum_lookup(event_enum_name, self._event_names)
self._event_names = None
def visit_event(self, name, info, arg_type):
self.decl += gen_event_send_decl(name, arg_type)
self.defn += gen_event_send(name, arg_type)
self._event_names.append(name)
(input_file, output_dir, do_c, do_h, prefix, dummy) = parse_command_line()
c_comment = '''
/*
* schema-defined QAPI event functions
*
* Copyright (c) 2014 Wenchao Xia
*
* Authors:
* Wenchao Xia <wenchaoqemu@gmail.com>
*
* This work is licensed under the terms of the GNU LGPL, version 2.1 or later.
* See the COPYING.LIB file in the top-level directory.
*
*/
'''
h_comment = '''
/*
* schema-defined QAPI event functions
*
* Copyright (c) 2014 Wenchao Xia
*
* Authors:
* Wenchao Xia <wenchaoqemu@gmail.com>
*
* This work is licensed under the terms of the GNU LGPL, version 2.1 or later.
* See the COPYING.LIB file in the top-level directory.
*
*/
'''
(fdef, fdecl) = open_output(output_dir, do_c, do_h, prefix,
'qapi-event.c', 'qapi-event.h',
c_comment, h_comment)
fdef.write(mcgen('''
#include "qemu/osdep.h"
#include "qemu-common.h"
#include "%(prefix)sqapi-event.h"
#include "%(prefix)sqapi-visit.h"
#include "qapi/qmp-output-visitor.h"
#include "qapi/qmp-event.h"
''',
prefix=prefix))
fdecl.write(mcgen('''
#include "qapi/error.h"
#include "qapi/qmp/qdict.h"
#include "%(prefix)sqapi-types.h"
''',
prefix=prefix))
event_enum_name = c_name(prefix + "QAPIEvent", protect=False)
schema = QAPISchema(input_file)
gen = QAPISchemaGenEventVisitor()
schema.visit(gen)
fdef.write(gen.defn)
fdecl.write(gen.decl)
close_output(fdef, fdecl)
|
afaerber/qemu-cpu
|
scripts/qapi-event.py
|
Python
|
gpl-2.0
| 5,197 | 0 |
from Cython.Compiler import Options
from Cython.Compiler import PyrexTypes
from Cython.Compiler.Visitor import CythonTransform
from Cython.Compiler.StringEncoding import EncodedString
from Cython.Compiler.AutoDocTransforms import (
ExpressionWriter as BaseExpressionWriter,
AnnotationWriter as BaseAnnotationWriter,
)
class ExpressionWriter(BaseExpressionWriter):
def visit_UnicodeNode(self, node):
self.emit_string(node)
class AnnotationWriter(ExpressionWriter, BaseAnnotationWriter):
pass
class EmbedSignature(CythonTransform):
def __init__(self, context):
super(EmbedSignature, self).__init__(context)
self.class_name = None
self.class_node = None
def _fmt_expr(self, node):
writer = ExpressionWriter()
result = writer.write(node)
# print(type(node).__name__, '-->', result)
return result
def _fmt_annotation(self, node):
writer = AnnotationWriter()
result = writer.write(node)
# print(type(node).__name__, '-->', result)
return result
def _fmt_arg(self, arg):
annotation = None
if arg.is_self_arg:
doc = arg.name # clinic: '$self'
elif arg.is_type_arg:
doc = arg.name # clinic: '$type'
else:
doc = arg.name
if arg.type is PyrexTypes.py_object_type:
annotation = None # XXX use 'Any' ?
else:
annotation = arg.type.declaration_code('', for_display=1)
#if arg.default and arg.default.is_none:
# annotation = 'Optional[%s]' % annotation
if arg.annotation:
annotation = self._fmt_annotation(arg.annotation)
if annotation:
doc = doc + (': %s' % annotation)
if arg.default:
default = self._fmt_expr(arg.default)
doc = doc + (' = %s' % default)
elif arg.default:
default = self._fmt_expr(arg.default)
doc = doc + ('=%s' % default)
return doc
def _fmt_star_arg(self, arg):
arg_doc = arg.name
if arg.annotation:
annotation = self._fmt_annotation(arg.annotation)
arg_doc = arg_doc + (': %s' % annotation)
return arg_doc
def _fmt_arglist(self, args,
npoargs=0, npargs=0, pargs=None,
nkargs=0, kargs=None,
hide_self=False):
arglist = []
for arg in args:
if not hide_self or not arg.entry.is_self_arg:
arg_doc = self._fmt_arg(arg)
arglist.append(arg_doc)
if pargs:
arg_doc = self._fmt_star_arg(pargs)
arglist.insert(npargs + npoargs, '*%s' % arg_doc)
elif nkargs:
arglist.insert(npargs + npoargs, '*')
if npoargs:
arglist.insert(npoargs, '/')
if kargs:
arg_doc = self._fmt_star_arg(kargs)
arglist.append('**%s' % arg_doc)
return arglist
def _fmt_ret_type(self, ret):
if ret is PyrexTypes.py_object_type:
return None
else:
return ret.declaration_code("", for_display=1)
def _fmt_signature(self, cls_name, func_name, args,
npoargs=0, npargs=0, pargs=None,
nkargs=0, kargs=None,
return_expr=None,
return_type=None, hide_self=False):
arglist = self._fmt_arglist(args,
npoargs, npargs, pargs,
nkargs, kargs,
hide_self=hide_self)
arglist_doc = ', '.join(arglist)
func_doc = '%s(%s)' % (func_name, arglist_doc)
if cls_name:
func_doc = '%s.%s' % (cls_name, func_doc)
ret_doc = None
if return_expr:
ret_doc = self._fmt_annotation(return_expr)
elif return_type:
ret_doc = self._fmt_ret_type(return_type)
if ret_doc:
docfmt = '%s -> %s' # clinic: '%s -> (%s)'
func_doc = docfmt % (func_doc, ret_doc)
return func_doc
def _embed_signature(self, signature, node_doc):
if node_doc:
docfmt = "%s\n%s" # clinic: "%s\n--\n\n%s
return docfmt % (signature, node_doc)
else:
return signature
def __call__(self, node):
if not Options.docstrings:
return node
else:
return super(EmbedSignature, self).__call__(node)
def visit_ClassDefNode(self, node):
oldname = self.class_name
oldclass = self.class_node
self.class_node = node
try:
# PyClassDefNode
self.class_name = node.name
except AttributeError:
# CClassDefNode
self.class_name = node.class_name
self.visitchildren(node)
self.class_name = oldname
self.class_node = oldclass
return node
def visit_LambdaNode(self, node):
# lambda expressions so not have signature or inner functions
return node
def visit_DefNode(self, node):
if not self.current_directives['embedsignature']:
return node
is_constructor = False
hide_self = False
if node.entry.is_special:
is_constructor = self.class_node and node.name == '__init__'
if not is_constructor:
return node
class_name, func_name = None, self.class_name
hide_self = True
else:
class_name, func_name = self.class_name, node.name
npoargs = getattr(node, 'num_posonly_args', 0)
nkargs = getattr(node, 'num_kwonly_args', 0)
npargs = len(node.args) - nkargs - npoargs
signature = self._fmt_signature(
class_name, func_name, node.args,
npoargs, npargs, node.star_arg,
nkargs, node.starstar_arg,
return_expr=node.return_type_annotation,
return_type=None, hide_self=hide_self)
if signature:
if is_constructor:
doc_holder = self.class_node.entry.type.scope
else:
doc_holder = node.entry
if doc_holder.doc is not None:
old_doc = doc_holder.doc
elif not is_constructor and getattr(node, 'py_func', None) is not None:
old_doc = node.py_func.entry.doc
else:
old_doc = None
new_doc = self._embed_signature(signature, old_doc)
doc_holder.doc = EncodedString(new_doc)
if not is_constructor and getattr(node, 'py_func', None) is not None:
node.py_func.entry.doc = EncodedString(new_doc)
return node
def visit_CFuncDefNode(self, node):
if not self.current_directives['embedsignature']:
return node
if not node.overridable: # not cpdef FOO(...):
return node
signature = self._fmt_signature(
self.class_name, node.declarator.base.name,
node.declarator.args,
return_type=node.return_type)
if signature:
if node.entry.doc is not None:
old_doc = node.entry.doc
elif getattr(node, 'py_func', None) is not None:
old_doc = node.py_func.entry.doc
else:
old_doc = None
new_doc = self._embed_signature(signature, old_doc)
node.entry.doc = EncodedString(new_doc)
py_func = getattr(node, 'py_func', None)
if py_func is not None:
py_func.entry.doc = EncodedString(new_doc)
return node
def visit_PropertyNode(self, node):
if not self.current_directives['embedsignature']:
return node
entry = node.entry
body = node.body
prop_name = entry.name
type_name = None
if entry.visibility == 'public':
# property synthesised from a cdef public attribute
type_name = entry.type.declaration_code("", for_display=1)
if not entry.type.is_pyobject:
type_name = "'%s'" % type_name
elif entry.type.is_extension_type:
type_name = entry.type.module_name + '.' + type_name
if type_name is None:
for stat in body.stats:
if stat.name != '__get__':
continue
cls_name = self.class_name
if cls_name:
prop_name = '%s.%s' % (cls_name, prop_name)
ret_annotation = stat.return_type_annotation
if ret_annotation:
type_name = self._fmt_annotation(ret_annotation)
if type_name is not None:
signature = '%s: %s' % (prop_name, type_name)
new_doc = self._embed_signature(signature, entry.doc)
entry.doc = EncodedString(new_doc)
return node
# Monkeypatch EmbedSignature transform
from Cython.Compiler import AutoDocTransforms
AutoDocTransforms.EmbedSignature = EmbedSignature
|
mpi4py/mpi4py
|
conf/cyautodoc.py
|
Python
|
bsd-2-clause
| 9,170 | 0.000545 |
# -*- coding: utf-8 -*-
from warnings import warn
import numpy as np
import matplotlib.pyplot as plt
from ....lib.utilities import isSignal, isContainer
from ....lib.globals import FdpWarning
from ...fft import Fft
def fft(obj, *args, **kwargs):
"""
Calculate FFT(s) for signal or container.
Return Fft instance from classes/fft.py
"""
# default to offsetminimum=True for BES ffts
offsetminimum = kwargs.pop('offsetminimum', True)
normalizetodc = kwargs.pop('normalizetodc', True)
if isSignal(obj):
return Fft(obj,
offsetminimum=offsetminimum,
normalizetodc=normalizetodc,
*args, **kwargs)
elif isContainer(obj):
signalnames = obj.listSignals()
ffts = []
for sname in signalnames:
signal = getattr(obj, sname)
ffts.append(Fft(signal,
offsetminimum=offsetminimum,
normalizetodc=normalizetodc,
*args, **kwargs))
return ffts
def plotfft(signal, fmax=None, *args, **kwargs):
"""
Plot spectrogram
"""
if not isSignal(signal):
warn("Method valid only at signal-level", FdpWarning)
return
sigfft = fft(signal, *args, **kwargs)
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
pcm = ax.pcolormesh(sigfft.time,
sigfft.freq,
sigfft.logpsd.transpose(),
cmap=plt.cm.YlGnBu)
pcm.set_clim([sigfft.logpsd.max() - 100, sigfft.logpsd.max() - 20])
cb = plt.colorbar(pcm, ax=ax)
cb.set_label(r'$10\,\log_{10}(|FFT|^2)$ $(V^2/Hz)$')
ax.set_xlabel('Time (s)')
ax.set_ylabel('Frequency (kHz)')
tmin = kwargs.get('tmin', 0)
tmax = kwargs.get('tmax', 1.5)
ax.set_xlim([tmin, tmax])
if fmax:
ax.set_ylim([0, fmax])
ax.set_title('{} | {} | {}'.format(
sigfft.shot,
sigfft.parentname.upper(),
sigfft.signalname.upper()))
return sigfft
def powerspectrum(signal, fmax=None, *args, **kwargs):
"""
Calcualte bin-averaged power spectrum
"""
msg = "powerspectrum() will be depricated. Use Fft.binavg_psd and \
Fft.binavg_logpsd instead."
warn(msg, FdpWarning)
if not isSignal(signal):
warn("Method valid only at signal-level", FdpWarning)
return
if 'tmin' not in kwargs:
kwargs['tmin'] = 0.25
if 'tmax' not in kwargs:
kwargs['tmax'] = 0.26
if not fmax:
fmax = 250
sigfft = fft(signal, *args, **kwargs)
psd = np.square(np.absolute(sigfft.fft))
# bin-averaged PSD, in dB
sigfft.bapsd = 10 * np.log10(np.mean(psd, axis=0))
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
ax.plot(sigfft.freq, sigfft.bapsd)
ax.set_ylabel(r'$10\,\log_{10}(|FFT|^2)$ $(V^2/Hz)$')
ax.set_xlim([0, fmax])
ax.set_xlabel('Frequency (kHz)')
ax.set_title('{} | {} | {} | {}-{} s'.format(
sigfft.shot,
sigfft.parentname.upper(),
sigfft.signalname.upper(),
kwargs['tmin'],
kwargs['tmax']))
return sigfft
|
drsmith48/fdp
|
fdp/methods/nstxu/bes/fft.py
|
Python
|
mit
| 3,236 | 0 |
# coding: utf-8
from collections import namedtuple
from pandas.io.msgpack.exceptions import * # noqa
from pandas.io.msgpack._version import version # noqa
class ExtType(namedtuple("ExtType", "code data")):
"""ExtType represents ext type in msgpack."""
def __new__(cls, code, data):
if not isinstance(code, int):
raise TypeError("code must be int")
if not isinstance(data, bytes):
raise TypeError("data must be bytes")
if not 0 <= code <= 127:
raise ValueError("code must be 0~127")
return super().__new__(cls, code, data)
import os # noqa
from pandas.io.msgpack._packer import Packer # noqa
from pandas.io.msgpack._unpacker import unpack, unpackb, Unpacker # noqa
def pack(o, stream, **kwargs):
"""
Pack object `o` and write it to `stream`
See :class:`Packer` for options.
"""
packer = Packer(**kwargs)
stream.write(packer.pack(o))
def packb(o, **kwargs):
"""
Pack object `o` and return packed bytes
See :class:`Packer` for options.
"""
return Packer(**kwargs).pack(o)
# alias for compatibility to simplejson/marshal/pickle.
load = unpack
loads = unpackb
dump = pack
dumps = packb
|
toobaz/pandas
|
pandas/io/msgpack/__init__.py
|
Python
|
bsd-3-clause
| 1,223 | 0 |
# The following has been generated automatically from src/core/qgsmaplayer.h
QgsMapLayer.LayerType = QgsMapLayerType
# monkey patching scoped based enum
QgsMapLayer.VectorLayer = QgsMapLayerType.VectorLayer
QgsMapLayer.VectorLayer.__doc__ = ""
QgsMapLayer.RasterLayer = QgsMapLayerType.RasterLayer
QgsMapLayer.RasterLayer.__doc__ = ""
QgsMapLayer.PluginLayer = QgsMapLayerType.PluginLayer
QgsMapLayer.PluginLayer.__doc__ = ""
QgsMapLayer.MeshLayer = QgsMapLayerType.MeshLayer
QgsMapLayer.MeshLayer.__doc__ = "Added in 3.2"
QgsMapLayer.VectorTileLayer = QgsMapLayerType.VectorTileLayer
QgsMapLayer.VectorTileLayer.__doc__ = "Added in 3.14"
QgsMapLayer.AnnotationLayer = QgsMapLayerType.AnnotationLayer
QgsMapLayer.AnnotationLayer.__doc__ = "Contains freeform, georeferenced annotations. Added in QGIS 3.16"
QgsMapLayerType.__doc__ = 'Types of layers that can be added to a map\n\n.. versionadded:: 3.8\n\n' + '* ``VectorLayer``: ' + QgsMapLayerType.VectorLayer.__doc__ + '\n' + '* ``RasterLayer``: ' + QgsMapLayerType.RasterLayer.__doc__ + '\n' + '* ``PluginLayer``: ' + QgsMapLayerType.PluginLayer.__doc__ + '\n' + '* ``MeshLayer``: ' + QgsMapLayerType.MeshLayer.__doc__ + '\n' + '* ``VectorTileLayer``: ' + QgsMapLayerType.VectorTileLayer.__doc__ + '\n' + '* ``AnnotationLayer``: ' + QgsMapLayerType.AnnotationLayer.__doc__
# --
QgsMapLayer.LayerFlag.baseClass = QgsMapLayer
QgsMapLayer.LayerFlags.baseClass = QgsMapLayer
LayerFlags = QgsMapLayer # dirty hack since SIP seems to introduce the flags in module
QgsMapLayer.StyleCategory.baseClass = QgsMapLayer
QgsMapLayer.StyleCategories.baseClass = QgsMapLayer
StyleCategories = QgsMapLayer # dirty hack since SIP seems to introduce the flags in module
|
rldhont/Quantum-GIS
|
python/core/auto_additions/qgsmaplayer.py
|
Python
|
gpl-2.0
| 1,706 | 0.002345 |
import matplotlib.pyplot as plt
import numpy as np
from image_funcs import *
from scipy.misc import imread, imsave
def grid(image, threshold):
###############################################################################
import scipy.misc
nuclei = imread('3.jpg')
nuclei = scipy.misc.imresize(nuclei, 0.05)
nuclei = np.max(nuclei, 2)
plt.imshow(nuclei)
plt.gray()
imsave('nuclei.jpg', nuclei)
################################################################################
binary = fill_holes(nuclei)
#imsave('nuclei.jpg', binary)
from skimage.exposure import rescale_intensity
rescaled_nuclei = rescale_intensity(nuclei, in_range=(np.min(nuclei),np.max(nuclei)))
new_range = tuple(np.percentile(nuclei,(2,98)))
rescaled_nuclei = rescale_intensity(nuclei, in_range=new_range)
from skimage.filter import gaussian_filter
blured = gaussian_filter(nuclei,8)
plt.imshow(blured)
highpass = nuclei - 0.8*blured
sharp = highpass + nuclei
sharp = np.floor(sharp).astype(np.uint8)
from skimage.filter import threshold_otsu
thres = threshold_otsu(rescaled_nuclei)
binary = rescaled_nuclei > thres
from skimage.filter import canny
edges = canny(sharp, sigma = 1, high_threshold = 35., low_threshold = 14.)
from scipy.ndimage.morphology import binary_dilation, binary_erosion
diamond = np.array([0,1,0,1,1,1,0,1,0], dtype=bool).reshape((3,3))
edges = double_dilation(edges, diamond)
binary = fill_holes(edges)
binary = double_erosion(binary, diamond)
imsave('bin.jpg', binary)
|
varnivey/hakoton_images
|
image_preparation/cells_search.py
|
Python
|
gpl-2.0
| 1,484 | 0.020889 |
from soppa.contrib import *
class Celery(Soppa):
def setup(self):
self.action('up', 'celery_supervisor.conf',
'{supervisor_conf_dir}celery_supervisor_{project}.conf',
handler=['supervisor.restart'],
when=lambda x: x.soppa_proc_daemon=='supervisor',)
|
futurice/fabric-deployment-helper
|
soppa/celery/__init__.py
|
Python
|
bsd-3-clause
| 311 | 0.016077 |
# Copyright (C) 2011-2015 Claas Abert
#
# This file is part of magnum.fe.
#
# magnum.fe is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# magnum.fe is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with magnum.fe. If not, see <http://www.gnu.org/licenses/>.
#
# Last modified by Claas Abert, 2015-06-29
from __future__ import absolute_import
from dolfin import Function, MeshFunction, Mesh, SubMesh, CellFunction, FunctionSpaceBase, interpolate
import numpy as np
__all__ = ["WrappedMesh"]
class WrappedMesh(Mesh):
def __init__(self, *args, **kwargs):
"""
This class represents a mesh with a certain submesh and defines
methods for the fast interpolation between the two meshes.
"""
super(WrappedMesh, self).__init__(*args, **kwargs)
def _init(self, mesh_with_shell):
"""
Initialize the mesh instance.
*Arguments*
mesh_with_shell (:class:`dolfin.Mesh`)
The super mesh
"""
self.with_shell = mesh_with_shell
# Cache DOF mappings
# The signature of the element is used as key
self._mappings = {}
def cut(self, f, **kwargs):
"""
Takes a function defined on the super mesh and returns a truncated
function defined on the sub mesh.
*Arguments*
f (:class:`dolfin.Function`)
The function on the super mesh.
*Returns*
:class:`dolfin.Function`
The function on the sub mesh.
"""
mapping = self._get_mapping(f.function_space())
result = Function(mapping['Vsub'])
result.vector()[:] = f.vector().array()[mapping['map']]
result.rename(f.name(), f.label())
return result
def expand(self, f, target = None):
"""
Takes a function defined on the sub mesh and returns a function
defined on the super mesh with unknown values set to zero.
*Arguments*
f (:class:`dolfin.Function`)
The function on the sub mesh.
*Returns*
The function on the super mesh.
"""
mapping = self._get_mapping(f.function_space())
if target is None:
target = Function(mapping['Vsuper'])
target.rename(f.name(), f.label())
target.vector()[mapping['map']] = f.vector().array()
return target
def _get_mapping(self, V):
element = V.ufl_element()
key = V.element().signature()
if not self._mappings.has_key(key):
Vsub = FunctionSpaceBase(self, element)
Vsuper = FunctionSpaceBase(self.with_shell, element)
fsuper = Function(Vsuper)
fsuper.vector()[:] = np.arange(fsuper.vector().size(), dtype=float)
fsub = interpolate(fsuper, Vsub)
self._mappings[key] = {
'map': np.round(fsub.vector().array()).astype(np.uint64),
'Vsub': Vsub,
'Vsuper': Vsuper
}
return self._mappings[key]
@staticmethod
def create(mesh, domain_ids, invert=False):
"""
Creates a wrapped mesh from a super mesh for a given collection
of domain IDs.
*Arguments*
mesh (:class:`dolfin.Mesh`)
The mesh.
domain_ids (:class:`[int]`)
List of domain IDs
invert (:class:`bool`)
Invert list of domain IDs
*Returns*
:class:`WrappedMesh`
The wrapped mesh
"""
if invert or isinstance(domain_ids, list) or isinstance(domain_ids, tuple):
if isinstance(domain_ids, int): domain_ids = (domain_ids,)
subdomains = MeshFunction('size_t', mesh, 3, mesh.domains())
combined_subdomains = CellFunction("size_t", mesh, 0)
for domain_id in domain_ids:
combined_subdomains.array()[subdomains.array() == domain_id] = 1
submesh = SubMesh(mesh, combined_subdomains, 0 if invert else 1)
else:
submesh = SubMesh(mesh, domain_ids)
submesh.__class__ = WrappedMesh
submesh._init(mesh)
return submesh
|
micromagnetics/magnum.fe
|
site-packages/magnumfe/common/wrapped_mesh.py
|
Python
|
lgpl-3.0
| 4,232 | 0.007798 |
import unittest
from katas.kyu_7.bug_fixing_unfinished_loop import create_array
class CreateArrayTestCase(unittest.TestCase):
def test_equals(self):
self.assertEqual(create_array(1), [1])
def test_equals_2(self):
self.assertEqual(create_array(2), [1, 2])
def test_equals_3(self):
self.assertEqual(create_array(3), [1, 2, 3])
def test_equals_4(self):
self.assertEqual(create_array(4), [1, 2, 3, 4])
def test_equals_5(self):
self.assertEqual(create_array(5), [1, 2, 3, 4, 5])
|
the-zebulan/CodeWars
|
tests/kyu_7_tests/test_bug_fixing_unfinished_loop.py
|
Python
|
mit
| 541 | 0 |
import os
import sys
def getRootDir():
cwd = os.getcwd()
print(cwd)
pos = cwd.find("hm65Vault")
print(pos)
pos += len("hm65Vault")
cwd = cwd[0:pos]
print(cwd)
getRootDir()
sys.exit(0)
|
humanist1965/hm65Vault
|
test.py
|
Python
|
gpl-3.0
| 212 | 0.04717 |
from datetime import datetime as dt
from functools import reduce
import transmissionrpc
from config import config
TRANSMISSION_ENABLED = config['TRANSMISSION_ENABLED']
TRANS_HOST = config['TRANS_HOST']
TRANS_PORT = config['TRANS_PORT']
TRANS_USER = config['TRANS_USER']
TRANS_PASS = config['TRANS_PASS']
TRANS_PUBLIC_RATIO_LIMIT = config['TRANS_PUBLIC_RATIO_LIMIT']
TRANS_ANIME_RATIO_LIMIT = config['TRANS_ANIME_RATIO_LIMIT']
def update_nyaa_torrents(host, port, user, password, ratio=TRANS_ANIME_RATIO_LIMIT):
tc = transmissionrpc.Client(host, port=port, user=user, password=password)
# All torrents
torrents = tc.get_torrents()
# Only public torrents
torrents = filter(lambda t: not t.isPrivate, torrents)
# Only torrents with matching trackers
trackers = ['nyaa', 'wakku']
torrents = list(filter(lambda t: reduce(lambda result, x: result or any(s in x['announce'] for s in trackers), t.trackers, False) is True, torrents))
# Torrent ids
ids = list(map(lambda t: t.id, torrents))
# Update torrents seed ratio limit and mode
if ids:
tc.change_torrent(ids, seedRatioLimit=ratio, seedRatioMode=1)
return ids
def update_global_ratio_public_torrents(host, port, user, password, ratio):
tc = transmissionrpc.Client(host, port=port, user=user, password=password)
# All torrents
torrents = tc.get_torrents()
# Only public torrents with a global seed ratio mode
torrents = filter(lambda t: not t.isPrivate and t.seed_ratio_mode == 'global', torrents)
# Torrent ids
ids = list(map(lambda t: t.id, torrents))
# Update torrents seed ratio limit and mode
if ids:
tc.change_torrent(ids, seedRatioLimit=ratio, seedRatioMode=1)
return ids
def stop_completed_public_seeding_torrents(host, port, user, password):
tc = transmissionrpc.Client(host, port=port, user=user, password=password)
# All torrents
torrents = tc.get_torrents()
# Only public, seeding torrents
torrents = filter(lambda t: not t.isPrivate and t.status == 'seeding' and t.seed_ratio_mode == 'global', torrents)
# Torrent ids
ids = list(map(lambda t: t.id, torrents))
# Stop torrents
if ids:
tc.stop_torrent(ids)
return ids
def delete_completed_public_stopped_torrents(host, port, user, password):
tc = transmissionrpc.Client(host, port=port, user=user, password=password)
# All torrents
torrents = tc.get_torrents()
# Only public, seeding torrents
torrents = filter(lambda t: not t.isPrivate and t.status == 'stopped', torrents)
# Torrents that are at least 2 hours complete
torrents = filter(lambda t: (dt.now() - t.date_done).seconds > 7200, torrents)
# Torrent ids
ids = list(map(lambda t: t.id, torrents))
# Stop torrents
if ids:
tc.remove_torrent(ids, delete_data=True)
return ids
num_changed = len(update_global_ratio_public_torrents(TRANS_HOST, TRANS_PORT, TRANS_USER, TRANS_PASS, TRANS_PUBLIC_RATIO_LIMIT))
num_changed += len(update_nyaa_torrents(TRANS_HOST, TRANS_PORT, TRANS_USER, TRANS_PASS, TRANS_ANIME_RATIO_LIMIT))
num_stopped = len(stop_completed_public_seeding_torrents(TRANS_HOST, TRANS_PORT, TRANS_USER, TRANS_PASS))
num_deleted = len(delete_completed_public_stopped_torrents(TRANS_HOST, TRANS_PORT, TRANS_USER, TRANS_PASS))
print("[%s] Torrents changed: %d; stopped: %d; deleted: %d" % (dt.now().strftime('%Y-%m-%d %H:%M:%S'), num_changed, num_stopped, num_deleted))
|
sgtsquiggs/PlexThrottle
|
TransmissionCleanUp.py
|
Python
|
unlicense
| 3,486 | 0.003155 |
#-------------------------------------------------------------------------------
# Copyright 2017 Cognizant Technology Solutions
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
#-------------------------------------------------------------------------------
'''
Created on Jun 23, 2016
@author: 463188
'''
from time import mktime
from dateutil import parser
from ....core.BaseAgent import BaseAgent
class RundeckAgent(BaseAgent):
@BaseAgent.timed
def process(self):
self.baseLogger.info('Inside process')
getProjects = self.config.get("baseEndPoint", '')
authtoken = self.getCredential("authtoken")
ExecutionsBaseEndPoint = self.config.get("executionsBaseEndPoint", '')
startFrom = self.config.get("startFrom", '')
startFrom = parser.parse(startFrom)
startFrom = mktime(startFrom.timetuple()) + startFrom.microsecond/1000000.0
startFrom = long(startFrom * 1000)
startFrom = str(startFrom)
getProjectsUrl = getProjects+"?authtoken="+authtoken
projects = self.getResponse(getProjectsUrl, 'GET', None, None, None)
responseTemplate = self.getResponseTemplate()
data = []
for project in range(len(projects)):
ProjName = projects[project]["name"]
if not self.tracking.get(ProjName, ''):
getProjectDetailsUrl = ExecutionsBaseEndPoint+"/"+ProjName+"/executions?authtoken="+authtoken+"&begin="+startFrom
else:
TimeStamp = self.tracking.get(ProjName, '')
TimeStamp = str(TimeStamp)
getProjectDetailsUrl = ExecutionsBaseEndPoint+"/"+ProjName+"/executions?authtoken="+authtoken+"&begin="+TimeStamp
rundeckProjectDetails = self.getResponse(getProjectDetailsUrl, 'GET', None, None, None)
for executions in rundeckProjectDetails["executions"]:
data += self.parseResponse(responseTemplate, executions)
self.tracking[ProjName] = rundeckProjectDetails["executions"][0]["date-ended"]["unixtime"] + 1
self.publishToolsData(data)
self.updateTrackingJson(self.tracking)
if __name__ == "__main__":
RundeckAgent()
|
CognizantOneDevOps/Insights
|
PlatformAgents/com/cognizant/devops/platformagents/agents/deployment/rundeck/RundeckAgent.py
|
Python
|
apache-2.0
| 2,690 | 0.005204 |
from itertools import groupby
from lxml import etree
def xml2d(e):
"""Convert an etree into a dict structure
@type e: etree.Element
@param e: the root of the tree
@return: The dictionary representation of the XML tree
"""
def _xml2d(e):
kids = dict(e.attrib)
# if e.text:
# kids['__text__'] = e.text
# if e.tail:
# kids['__tail__'] = e.tail
for k, g in groupby(e, lambda x: x.tag):
g = [ _xml2d(x) for x in g ]
kids[k]= g
return kids
return { e.tag : _xml2d(e) }
def d2xml(d):
"""convert dict to xml
1. The top level d must contain a single entry i.e. the root element
2. Keys of the dictionary become sublements or attributes
3. If a value is a simple string, then the key is an attribute
4. if a value is dict then, then key is a subelement
5. if a value is list, then key is a set of sublements
a = { 'module' : {'tag' : [ { 'name': 'a', 'value': 'b'},
{ 'name': 'c', 'value': 'd'},
],
'gobject' : { 'name': 'g', 'type':'xx' },
'uri' : 'test',
}
}
>>> d2xml(a)
<module uri="test">
<gobject type="xx" name="g"/>
<tag name="a" value="b"/>
<tag name="c" value="d"/>
</module>
@type d: dict
@param d: A dictionary formatted as an XML document
@return: A etree Root element
"""
def _d2xml(d, p):
for k,v in d.items():
if isinstance(v,dict):
node = etree.SubElement(p, k)
_d2xml(v, node)
elif isinstance(v,list):
for item in v:
node = etree.SubElement(p, k)
_d2xml(item, node)
elif k == "__text__":
p.text = v
elif k == "__tail__":
p.tail = v
else:
p.set(k, v)
k,v = d.items()[0]
node = etree.Element(k)
_d2xml(v, node)
return node
|
IndiciumSRL/wirecurly
|
tests/test_serialization/utils.py
|
Python
|
mpl-2.0
| 2,146 | 0.007922 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-10-08 20:49
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('cooperativa', '0002_auto_20171005_1404'),
]
operations = [
migrations.AlterField(
model_name='cooperativa',
name='direccion',
field=models.CharField(max_length=255),
),
]
|
adbetin/organico-cooperativas
|
cooperativa/migrations/0003_auto_20171008_1549.py
|
Python
|
gpl-3.0
| 464 | 0 |
# encoding= utf-8
##
# VisibilityCheck.
# <p>
# Description of the test.
#
# @data INSTANCE_ID [String] instance id
##
from qtaste import *
import time
# update in order to cope with the javaGUI extension declared in your testbed configuration.
javaguiMI = testAPI.getJavaGUI(INSTANCE_ID=testData.getValue("JAVAGUI_INSTANCE_NAME"))
subtitler = testAPI.getSubtitler()
importTestScript("TabbedPaneSelection")
def step1():
"""
@step Description of the actions done for this step
@expected Description of the expected result
"""
doSubSteps(TabbedPaneSelection.changeTabByTitle)
subtitler.setSubtitle("Click on the button to make the component invisible")
time.sleep(1)
javaguiMI.clickOnButton("VISIBILITY_BUTTON")
time.sleep(1)
if javaguiMI.isVisible("VISIBILITY_TEXT") != False:
testAPI.stop(Status.FAIL, "The component should not be visible")
try:
subtitler.setSubtitle("Try to insert a value in the invible text field", 10)
javaguiMI.setText("VISIBILITY_TEXT", "pas bien")
testAPI.stop(Status.FAIL, "The component should not be visible and the setText() should failed")
except :
javaguiMI.clickOnButton("VISIBILITY_BUTTON")
doStep(step1)
|
remybaranx/qtaste
|
demo/TestSuites/PlayBack/VisibilityCheck/TestScript.py
|
Python
|
gpl-3.0
| 1,239 | 0.008071 |
#-*- coding: utf-8 -*-
__author__ = 'tsbc'
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
|
radiateboy/Demo126mail
|
testcase/public/__init__.py
|
Python
|
gpl-2.0
| 98 | 0.020408 |
# -*- coding: utf-8 -*-
# Copyright (C) 2009-2010, 2012-2013 Rocky Bernstein
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""A base class for debugger commands.
This file is the one module in this directory that isn't a real command
and commands.py needs to take care to avoid instantiating this class
and storing it as a list of known debugger commands.
"""
NotImplementedMessage = "This method must be overriden in a subclass"
from import_relative import import_relative
__all__ = ['DebuggerCommand']
class DebuggerCommand:
"""Base Class for Debugger commands. We pull in some helper
functions for command from module cmdfns."""
category = 'misc'
def __init__(self, proc):
"""proc contains the command processor object that this
command is invoked through. A debugger field gives access to
the stack frame and I/O."""
self.proc = proc
# Convenience class access. We don't expect that either core
# or debugger will change over the course of the program
# execution like errmsg(), msg(), and msg_nocr() might. (See
# the note below on these latter 3 methods.)
#
self.core = proc.core
self.debugger = proc.debugger
self.settings = self.debugger.settings
return
aliases = ('alias1', 'alias2..',)
name = 'YourCommandName'
# Note for errmsg, msg, and msg_nocr we don't want to simply make
# an assignment of method names like self.msg = self.debugger.intf.msg,
# because we want to allow the interface (intf) to change
# dynamically. That is, the value of self.debugger may change
# in the course of the program and if we made such an method assignemnt
# we wouldn't pick up that change in our self.msg
def errmsg(self, msg, opts={}):
""" Convenience short-hand for self.debugger.intf.errmsg """
try:
return(self.debugger.intf[-1].errmsg(msg))
except EOFError:
# FIXME: what do we do here?
pass
return None
def msg(self, msg, opts={}):
""" Convenience short-hand for self.debugger.intf.msg """
try:
return(self.debugger.intf[-1].msg(msg))
except EOFError:
# FIXME: what do we do here?
pass
return None
def msg_nocr(self, msg, opts={}):
""" Convenience short-hand for self.debugger.intf.msg_nocr """
try:
return(self.debugger.intf[-1].msg_nocr(msg))
except EOFError:
# FIXME: what do we do here?
pass
return None
def run(self, args):
""" The method that implements the debugger command.
Help on the command comes from the docstring of this method.
"""
raise NotImplementedError(NotImplementedMessage)
pass
if __name__ == '__main__':
from import_relative import import_relative
mock = import_relative('mock')
d, cp = mock.dbg_setup()
dd = DebuggerCommand(cp)
dd.msg("hi")
dd.errmsg("Don't do that")
pass
|
kamawanu/pydbgr
|
trepan/bwprocessor/command/base_cmd.py
|
Python
|
gpl-3.0
| 3,660 | 0.001366 |
from rlib import jit
from som.primitives.integer_primitives import IntegerPrimitivesBase as _Base
from som.vmobjects.double import Double
from som.vmobjects.integer import Integer
from som.vmobjects.primitive import Primitive, TernaryPrimitive
def get_printable_location_up(block_method):
from som.vmobjects.method_bc import BcAbstractMethod
assert isinstance(block_method, BcAbstractMethod)
return "to:do: " + block_method.merge_point_string()
jitdriver_int = jit.JitDriver(
name="to:do: with int",
greens=["block_method"],
reds="auto",
# virtualizables=['frame'],
is_recursive=True,
get_printable_location=get_printable_location_up,
)
jitdriver_double = jit.JitDriver(
name="to:do: with double",
greens=["block_method"],
reds="auto",
# virtualizables=['frame'],
is_recursive=True,
get_printable_location=get_printable_location_up,
)
def get_printable_location_down(block_method):
from som.vmobjects.method_bc import BcAbstractMethod
assert isinstance(block_method, BcAbstractMethod)
return "downToto:do: " + block_method.merge_point_string()
jitdriver_int_down = jit.JitDriver(
name="downTo:do: with int",
greens=["block_method"],
reds="auto",
# virtualizables=['frame'],
is_recursive=True,
get_printable_location=get_printable_location_down,
)
jitdriver_double_down = jit.JitDriver(
name="downTo:do: with double",
greens=["block_method"],
reds="auto",
# virtualizables=['frame'],
is_recursive=True,
get_printable_location=get_printable_location_down,
)
def _to_do_int(i, by_increment, top, block, block_method):
assert isinstance(i, int)
assert isinstance(top, int)
while i <= top:
jitdriver_int.jit_merge_point(block_method=block_method)
block_method.invoke_2(block, Integer(i))
i += by_increment
def _to_do_double(i, by_increment, top, block, block_method):
assert isinstance(i, int)
assert isinstance(top, float)
while i <= top:
jitdriver_double.jit_merge_point(block_method=block_method)
block_method.invoke_2(block, Integer(i))
i += by_increment
def _to_do(rcvr, limit, block):
block_method = block.get_method()
i = rcvr.get_embedded_integer()
if isinstance(limit, Double):
_to_do_double(i, 1, limit.get_embedded_double(), block, block_method)
else:
_to_do_int(i, 1, limit.get_embedded_integer(), block, block_method)
return rcvr
def _to_by_do(_ivkbl, stack, stack_ptr):
block = stack[stack_ptr]
stack[stack_ptr] = None
stack_ptr -= 1
by_increment = stack[stack_ptr]
stack[stack_ptr] = None
stack_ptr -= 1
limit = stack[stack_ptr]
stack[stack_ptr] = None
stack_ptr -= 1
block_method = block.get_method()
self = stack[stack_ptr]
i = self.get_embedded_integer()
if isinstance(limit, Double):
_to_do_double(
i,
by_increment.get_embedded_integer(),
limit.get_embedded_double(),
block,
block_method,
)
else:
_to_do_int(
i,
by_increment.get_embedded_integer(),
limit.get_embedded_integer(),
block,
block_method,
)
return stack_ptr
def _down_to_do_int(i, by_increment, bottom, block, block_method):
assert isinstance(i, int)
assert isinstance(bottom, int)
while i >= bottom:
jitdriver_int_down.jit_merge_point(block_method=block_method)
block_method.invoke_2(block, Integer(i))
i -= by_increment
def _down_to_do_double(i, by_increment, bottom, block, block_method):
assert isinstance(i, int)
assert isinstance(bottom, float)
while i >= bottom:
jitdriver_double_down.jit_merge_point(block_method=block_method)
block_method.invoke_2(block, Integer(i))
i -= by_increment
def _down_to_do(rcvr, limit, block):
block_method = block.get_method()
i = rcvr.get_embedded_integer()
if isinstance(limit, Double):
_down_to_do_double(i, 1, limit.get_embedded_double(), block, block_method)
else:
_down_to_do_int(i, 1, limit.get_embedded_integer(), block, block_method)
return rcvr
class IntegerPrimitives(_Base):
def install_primitives(self):
_Base.install_primitives(self)
self._install_instance_primitive(TernaryPrimitive("to:do:", _to_do))
self._install_instance_primitive(TernaryPrimitive("downTo:do:", _down_to_do))
self._install_instance_primitive(Primitive("to:by:do:", _to_by_do))
|
SOM-st/PySOM
|
src/som/primitives/bc/integer_primitives.py
|
Python
|
mit
| 4,603 | 0.000652 |
#!/usr/bin/env python
# -*- coding: utf8 -*-
# *****************************************************************
# ** PTS -- Python Toolkit for working with SKIRT **
# ** © Astronomical Observatory, Ghent University **
# *****************************************************************
## \package pts.core.plot.memory Contains the MemoryPlotter class, used for creating plots of the memory consumption
# of a SKIRT simulation as a function of time.
# -----------------------------------------------------------------
# Ensure Python 3 compatibility
from __future__ import absolute_import, division, print_function
# Import standard modules
import numpy as np
import matplotlib.pyplot as plt
# Import the relevant PTS classes and modules
from ..basics.map import Map
from .plotter import Plotter
from ..tools.logging import log
from ..tools import filesystem as fs
# -----------------------------------------------------------------
class MemoryPlotter(Plotter):
"""
This class ...
"""
def __init__(self):
"""
The constructor ...
:return:
"""
# Call the constructor of the base class
super(MemoryPlotter, self).__init__()
# -- Attributes --
# A data structure to store the memory (de)allocation information
self.allocation = None
# -----------------------------------------------------------------
@staticmethod
def default_input():
"""
This function ...
:return:
"""
return "memory.dat"
# -----------------------------------------------------------------
def prepare_data(self):
"""
This function ...
:return:
"""
# Inform the user
log.info("Preparing the input data into plottable format...")
# Get the number of processes
ranks = np.unique(self.table["Process rank"])
assert len(ranks) == max(ranks) + 1
processes = len(ranks)
# Initialize the data structure to contain the memory usage information in plottable format
self.data = [Map({"times": [], "memory": []}) for i in range(processes)]
# Loop over the different entries in the memory table
for i in range(len(self.table)):
# Get the process rank
rank = self.table["Process rank"][i]
# Get the time and memory usage
time = self.table["Simulation time"][i]
memory = self.table["Memory usage"][i]
# Add the data point to the data structure
self.data[rank].times.append(time)
self.data[rank].memory.append(memory)
# Check whether (de)allocation information is present in the memory table
if "Array (de)allocation" in self.table.colnames:
# Initialize the data structure for plotting the memory usage of the root process and the memory
# allocation curve
self.allocation = Map({"times": [], "allocation": [], "cumulative": []})
# Get the mask covering entries that do not contain array (de)allocation information
mask = self.table["Array (de)allocation"].mask
# Check whether the first entry of the table corresponds to the root process
assert self.table["Process rank"][0] == 0
# Create a variable to store the cumulative sum of allocated memory
cumulative_sum = 0.0
# Loop over the different entries in the memory table
for i in range(len(self.table)):
# Get the process rank
rank = self.table["Process rank"][i]
# Only add the contributions from the root process
if rank > 0: break
# If the entry is masked because it does not contain memory allocation information, skip it
if mask[i]: continue
# Get the time and the amount of (de)allocated memory
time = self.table["Simulation time"][i]
allocation = self.table["Array (de)allocation"][i]
# Add the allocated memory to the sum
cumulative_sum += allocation
# Add the data point to the data structure
self.allocation.times.append(time)
self.allocation.allocation.append(allocation)
self.allocation.cumulative.append(cumulative_sum)
# -----------------------------------------------------------------
def plot(self):
"""
This function ...
:return:
"""
# Inform the user
log.info("Making the plots...")
# Make a plot of the memory usage as a function of time
self.plot_memory()
# Make a plot of the memory (de)allocation information, if present
if self.allocation is not None: self.plot_allocation()
# -----------------------------------------------------------------
def plot_memory(self):
"""
This function ...
:return:
"""
# Determine the path to the plot file
plot_path = fs.join(self.output_path, "memory.pdf")
# Initialize figure
plt.figure()
plt.clf()
# Loop over the different processes
for rank in range(len(self.data)):
# Name of the current process
process = "P" + str(rank)
# Plot the memory usage
plt.plot(self.data[rank].times, self.data[rank].memory, label=process)
# Set the axis labels
plt.xlabel("Time (s)", fontsize='large')
plt.ylabel("Memory usage (GB)", fontsize='large')
# Set the plot title
plt.title("Memory consumption")
# Set the legend
if len(self.data) > 16: plt.legend(loc='upper center', ncol=8, bbox_to_anchor=(0.5, -0.1), prop={'size': 8})
else: plt.legend(loc='lower right', ncol=4, prop={'size': 8})
# Save the figure
plt.savefig(plot_path, bbox_inches='tight', pad_inches=0.25)
plt.close()
# -----------------------------------------------------------------
def plot_allocation(self):
"""
This function ...
:return:
"""
# Determine the path to the plot file
plot_path = fs.join(self.output_path, "allocation.pdf")
# Initialize figure
plt.figure()
plt.clf()
# Plot the memory usage of the root process
plt.plot(self.data[0].times, self.data[0].memory, label="total memory usage")
# Plot the memory allocation of the root process
plt.step(self.allocation.times, self.allocation.cumulative, where="post", linestyle="--", label="allocated array memory")
# Set the axis labels
plt.xlabel("Time (s)", fontsize='large')
plt.ylabel("Memory usage (GB)", fontsize='large')
# Set the plot title
plt.title("Memory (de)allocation")
# Set the legend
plt.legend(loc='lower right', prop={'size': 8})
# Save the figure
plt.savefig(plot_path, bbox_inches='tight', pad_inches=0.25)
plt.close()
# -----------------------------------------------------------------
|
Stargrazer82301/CAAPR
|
CAAPR/CAAPR_AstroMagic/PTS/pts/core/plot/memory.py
|
Python
|
mit
| 7,242 | 0.002762 |
# -*- encoding: utf-8 -*-
# This file is part of IRIS: Infrastructure and Release Information System
#
# Copyright (C) 2013-2015 Intel Corporation
#
# IRIS is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# version 2.0 as published by the Free Software Foundation.
"""
Module for importing git scm data into IRIS.
Data is available from:
https://review.tizen.org/gerrit/gitweb?p=scm/meta/git.git
"""
# pylint: disable=E0611,E1101,F0401,R0914,C0103,W0142
# E0611: No name 'manage' in module 'iris'
# E1101: Class 'Domain' has no 'objects' member
# F0401: Unable to import 'iris.core.models'
# C0321: More than one statement on a single line
# C0103: Invalid name "uc"
# W0142: Used * or ** magic
import os
from django.contrib.auth.models import User
from iris.core.models import (
Domain, SubDomain, GitTree, License,
DomainRole, SubDomainRole, GitTreeRole)
from iris.core.models.user import roles as role_choices
from iris.core.injectors import inject_user_getters
from iris.etl.parser import parse_blocks, UserCache
from iris.etl.loader import get_default_loader
MAPPING = {
'A': 'ARCHITECT',
'B': 'BRANCH',
'C': 'COMMENTS',
'D': 'DOMAIN',
'I': 'INTEGRATOR',
'L': 'LICENSES',
'M': 'MAINTAINER',
'N': 'PARENT',
'O': 'DESCRIPTION',
'R': 'REVIEWER',
'T': 'TREE',
'SL': 'SUBDOMAIN_LEADER',
}
ROLES = {i for i, _ in role_choices()}
NONAME = 'Uncategorized'
def parse_name(name):
"""parse domain name and subdomain name from the given name
"""
parts = [i.strip() for i in name.split('/', 1)]
if len(parts) == 1:
if parts[0]:
return parts[0], NONAME
return NONAME, NONAME
return parts
def build_user_cache(rawdata):
"""
Go over all scm data to build a full UserCache
"""
uc = UserCache()
for typ, data in rawdata:
for role in ROLES & set(data.keys()):
for ustring in data[role]:
uc.update(ustring)
return uc
def rolename(role, name):
"""create role name
"""
return '%s: %s' % (role, name)
def subrolename(role, dname, sname):
"""create subdomain role name
"""
return '%s: %s-%s' % (role, dname, sname)
def transform_domains(rawdata, uc):
"""
Transform to Domain, SubDomain,
DomainRole, SubDomainRole,
DomainRole.user_set and SubDomainRole.user_set
"""
domains, subdomains = [{'name': NONAME}], []
domainroles, subdomainroles = [], []
domainrole_users, subdomainrole_users = [], []
def _trans_subdomain(data):
"""transform subdomain item"""
dname, sname = parse_name(data['DOMAIN'][0])
subdomains.append({'name': sname, 'domain__name': dname})
for role in ROLES & set(data.keys()):
sr = {'role': role,
'subdomain__name': sname,
'subdomain__domain__name': dname}
subdomainroles.append(
dict(sr, name=subrolename(role, dname, sname)))
for ustring in data[role]:
user = uc.get(ustring)
if user:
subdomainrole_users.append((sr, user))
def _trans_domain(data):
"""transform domain item"""
name = data['DOMAIN'][0]
domains.append({'name': name})
for role in ROLES & set(data.keys()):
dr = {'role': role,
'domain__name': name}
domainroles.append(dict(dr, name=rolename(role, name)))
for ustring in data[role]:
user = uc.get(ustring)
if user:
domainrole_users.append((dr, user))
for typ, data in rawdata:
if typ != 'DOMAIN':
continue
elif 'PARENT' in data:
# assume that a submain can't appear before its parent
_trans_subdomain(data)
else:
_trans_domain(data)
# Uncategorized Subdomain
for domain in domains:
subdomains.append({'name': NONAME, 'domain__name': domain['name']})
return (domains, subdomains,
domainroles, subdomainroles,
domainrole_users, subdomainrole_users)
def transform_trees(rawdata, uc):
"""
Transform to GitTree, GitTree.licenses
GitTreeRole, GitTreeRole.user_set
"""
trees, tree_licenses = [], []
treeroles, treerole_users = [], []
no_domain = ' / '.join([NONAME, NONAME])
for typ, data in rawdata:
if typ != 'TREE':
continue
path = data['TREE'][0]
# if DOMAIN exists it must only have one value
name = data.get('DOMAIN', [no_domain])[0] or no_domain
if ' / ' not in name:
name = ' / '.join([name, NONAME])
dname, sname = name.split(' / ', 1)
trees.append({'gitpath': path,
'subdomain__name': sname,
'subdomain__domain__name': dname})
for licen in data.get('LICENSES', ()):
tree_licenses.append(({'gitpath': path}, {'shortname': licen}))
for role in ROLES & set(data.keys()):
tr = {'role': role,
'gittree__gitpath': path}
treeroles.append(dict(tr, name=rolename(role, path)))
for ustring in data[role]:
user = uc.get(ustring)
if user:
treerole_users.append((tr, user))
return (trees, tree_licenses,
treeroles, treerole_users)
def transform_users(ucusers):
"""
Transform cached users to database compatible users
Field username is used for login and it's an unique field. The
correct value of this field is stored in LDAP server, we can't
get it here, so we use email as username when importing data.
"""
return [dict(username=i['email'], **i) for i in ucusers]
def from_string(scm_str, coding='utf8'):
"""
Import scm data from string.
If input string is not unicode, try to decode them using `coding`
"""
if isinstance(scm_str, str):
scm_str = scm_str.decode(coding)
return from_unicode(scm_str)
def from_unicode(scm_unicode):
"""
Import scm data from unicode string.
Strings return from Django model are all unicode. So it will be much
easier to only deal with unicode string.
"""
# 1.parse
rawdata = parse_blocks(scm_unicode, MAPPING)
# 2.extract and transform
uc = build_user_cache(rawdata)
users = transform_users(uc.all())
(domains, subdomains,
domainroles, subdomainroles,
domainrole_users, subdomainrole_users,
) = transform_domains(rawdata, uc)
(trees, tree_licenses,
treeroles, treerole_users,
) = transform_trees(rawdata, uc)
# 3.load
loader = get_default_loader()
loader.sync_entity(users, User)
delete_domains = loader.sync_entity(domains, Domain)
delete_subdomains = loader.sync_entity(subdomains, SubDomain)
delete_domainroles = loader.sync_entity(domainroles, DomainRole)
delete_subdomainroles = loader.sync_entity(subdomainroles, SubDomainRole)
delete_trees = loader.sync_entity(trees, GitTree)
delete_treeroles = loader.sync_entity(treeroles, GitTreeRole)
loader.sync_nnr(domainrole_users, DomainRole, User)
loader.sync_nnr(subdomainrole_users, SubDomainRole, User)
loader.sync_nnr(tree_licenses, GitTree, License)
loader.sync_nnr(treerole_users, GitTreeRole, User)
delete_treeroles()
delete_subdomainroles()
delete_domainroles()
delete_trees()
delete_subdomains()
delete_domains()
def from_file(dfile, tfile):
"""
import scm data from file.
`dfile` and `tfile` should be file objects not file names.
"""
return from_string(''.join([dfile.read(),
os.linesep, os.linesep,
tfile.read()]))
def merge_users(email):
"""merge the scm user into ldap user
"""
if not email:
return
# get the user which is from LDAP
users = User.objects.filter(email=email).exclude(username=email)
def updata_role(role, old, new):
role.user_set.remove(old)
role.user_set.add(new)
if len(users) == 1:
user = users[0]
# get the user which is from scm
try:
ur = User.objects.get(username=email)
except User.DoesNotExist:
return
if user.username != ur.email:
# merge it into ldap user then delete it
ogetter = inject_user_getters(ur)
for role in ogetter.get_domainroles():
updata_role(role, ur, user)
for role in ogetter.get_subdomainroles():
updata_role(role, ur, user)
for role in ogetter.get_gittreeroles():
updata_role(role, ur, user)
# merge submissions to ladp user
ur.submission_set.update(owner=user)
ur.delete()
|
sunshine027/iris-panel
|
iris/etl/scm.py
|
Python
|
gpl-2.0
| 8,948 | 0.000112 |
import logging
from flask_script import Manager
from flask_migrate import MigrateCommand
from seedbox.app import app
manager = Manager(app)
manager.add_command('db', MigrateCommand)
@manager.command
def watch_updates():
"""Starts component updates watcher in foreground (CoreOS, k8s, etcd)"""
from seedbox import update_watcher
update_watcher.watch()
def run():
logging.basicConfig(level='NOTSET')
manager.run()
if __name__ == '__main__':
run()
|
nailgun/seedbox
|
src/seedbox/manage.py
|
Python
|
apache-2.0
| 478 | 0 |
"""LU decomposition functions."""
from __future__ import division, print_function, absolute_import
from warnings import warn
from numpy import asarray, asarray_chkfinite
# Local imports
from .misc import _datacopied, LinAlgWarning
from .lapack import get_lapack_funcs
from .flinalg import get_flinalg_funcs
__all__ = ['lu', 'lu_solve', 'lu_factor']
def lu_factor(a, overwrite_a=False, check_finite=True):
"""
Compute pivoted LU decomposition of a matrix.
The decomposition is::
A = P L U
where P is a permutation matrix, L lower triangular with unit
diagonal elements, and U upper triangular.
Parameters
----------
a : (M, M) array_like
Matrix to decompose
overwrite_a : bool, optional
Whether to overwrite data in A (may increase performance)
check_finite : bool, optional
Whether to check that the input matrix contains only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
Returns
-------
lu : (N, N) ndarray
Matrix containing U in its upper triangle, and L in its lower triangle.
The unit diagonal elements of L are not stored.
piv : (N,) ndarray
Pivot indices representing the permutation matrix P:
row i of matrix was interchanged with row piv[i].
See also
--------
lu_solve : solve an equation system using the LU factorization of a matrix
Notes
-----
This is a wrapper to the ``*GETRF`` routines from LAPACK.
Examples
--------
>>> from scipy.linalg import lu_factor
>>> A = np.array([[2, 5, 8, 7], [5, 2, 2, 8], [7, 5, 6, 6], [5, 4, 4, 8]])
>>> lu, piv = lu_factor(A)
>>> piv
array([2, 2, 3, 3], dtype=int32)
Convert LAPACK's ``piv`` array to NumPy index and test the permutation
>>> piv_py = [2, 0, 3, 1]
>>> L, U = np.tril(lu, k=-1) + np.eye(4), np.triu(lu)
>>> np.allclose(A[piv_py] - L @ U, np.zeros((4, 4)))
True
"""
if check_finite:
a1 = asarray_chkfinite(a)
else:
a1 = asarray(a)
if len(a1.shape) != 2 or (a1.shape[0] != a1.shape[1]):
raise ValueError('expected square matrix')
overwrite_a = overwrite_a or (_datacopied(a1, a))
getrf, = get_lapack_funcs(('getrf',), (a1,))
lu, piv, info = getrf(a1, overwrite_a=overwrite_a)
if info < 0:
raise ValueError('illegal value in %d-th argument of '
'internal getrf (lu_factor)' % -info)
if info > 0:
warn("Diagonal number %d is exactly zero. Singular matrix." % info,
LinAlgWarning, stacklevel=2)
return lu, piv
def lu_solve(lu_and_piv, b, trans=0, overwrite_b=False, check_finite=True):
"""Solve an equation system, a x = b, given the LU factorization of a
Parameters
----------
(lu, piv)
Factorization of the coefficient matrix a, as given by lu_factor
b : array
Right-hand side
trans : {0, 1, 2}, optional
Type of system to solve:
===== =========
trans system
===== =========
0 a x = b
1 a^T x = b
2 a^H x = b
===== =========
overwrite_b : bool, optional
Whether to overwrite data in b (may increase performance)
check_finite : bool, optional
Whether to check that the input matrices contain only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
Returns
-------
x : array
Solution to the system
See also
--------
lu_factor : LU factorize a matrix
Examples
--------
>>> from scipy.linalg import lu_factor, lu_solve
>>> A = np.array([[2, 5, 8, 7], [5, 2, 2, 8], [7, 5, 6, 6], [5, 4, 4, 8]])
>>> b = np.array([1, 1, 1, 1])
>>> lu, piv = lu_factor(A)
>>> x = lu_solve((lu, piv), b)
>>> np.allclose(A @ x - b, np.zeros((4,)))
True
"""
(lu, piv) = lu_and_piv
if check_finite:
b1 = asarray_chkfinite(b)
else:
b1 = asarray(b)
overwrite_b = overwrite_b or _datacopied(b1, b)
if lu.shape[0] != b1.shape[0]:
raise ValueError("incompatible dimensions.")
getrs, = get_lapack_funcs(('getrs',), (lu, b1))
x, info = getrs(lu, piv, b1, trans=trans, overwrite_b=overwrite_b)
if info == 0:
return x
raise ValueError('illegal value in %d-th argument of internal gesv|posv'
% -info)
def lu(a, permute_l=False, overwrite_a=False, check_finite=True):
"""
Compute pivoted LU decomposition of a matrix.
The decomposition is::
A = P L U
where P is a permutation matrix, L lower triangular with unit
diagonal elements, and U upper triangular.
Parameters
----------
a : (M, N) array_like
Array to decompose
permute_l : bool, optional
Perform the multiplication P*L (Default: do not permute)
overwrite_a : bool, optional
Whether to overwrite data in a (may improve performance)
check_finite : bool, optional
Whether to check that the input matrix contains only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
Returns
-------
**(If permute_l == False)**
p : (M, M) ndarray
Permutation matrix
l : (M, K) ndarray
Lower triangular or trapezoidal matrix with unit diagonal.
K = min(M, N)
u : (K, N) ndarray
Upper triangular or trapezoidal matrix
**(If permute_l == True)**
pl : (M, K) ndarray
Permuted L matrix.
K = min(M, N)
u : (K, N) ndarray
Upper triangular or trapezoidal matrix
Notes
-----
This is a LU factorization routine written for SciPy.
Examples
--------
>>> from scipy.linalg import lu
>>> A = np.array([[2, 5, 8, 7], [5, 2, 2, 8], [7, 5, 6, 6], [5, 4, 4, 8]])
>>> p, l, u = lu(A)
>>> np.allclose(A - p @ l @ u, np.zeros((4, 4)))
True
"""
if check_finite:
a1 = asarray_chkfinite(a)
else:
a1 = asarray(a)
if len(a1.shape) != 2:
raise ValueError('expected matrix')
overwrite_a = overwrite_a or (_datacopied(a1, a))
flu, = get_flinalg_funcs(('lu',), (a1,))
p, l, u, info = flu(a1, permute_l=permute_l, overwrite_a=overwrite_a)
if info < 0:
raise ValueError('illegal value in %d-th argument of '
'internal lu.getrf' % -info)
if permute_l:
return l, u
return p, l, u
|
jor-/scipy
|
scipy/linalg/decomp_lu.py
|
Python
|
bsd-3-clause
| 6,757 | 0.000444 |
from builtins import str
from builtins import object
import httplib2
import MySQLdb
import json
import os
import sys
import time
import config
from apiclient.discovery import build
from oauth2client.file import Storage
from oauth2client.client import OAuth2WebServerFlow
from oauth2client.tools import run
class Error(Exception):
"""Custom Exception subclass."""
pass
class YoutubeCaption(object):
OAUTH_SCOPE = "https://gdata.youtube.com"
CAPTIONS_URL_FORMAT = ("http://gdata.youtube.com/feeds/api/videos/%s/" \
"captions?alt=json")
CAPTIONS_CONTENT_TYPE = "application/vnd.youtube.timedtext; charset=UTF-8"
CAPTIONS_LANGUAGE_CODE = "en"
CAPTIONS_TITLE = ""
def __init__(self, developer_key, client_id, client_secret):
self.CLIENT_ID = client_id
self.CLIENT_SECRET = client_secret
self.DEVELOPER_KEY = developer_key
def authenticate(self):
storage = Storage('youtube-oauth.storage')
self.credentials = storage.get()
if self.credentials is None or self.credentials.invalid:
flow = OAuth2WebServerFlow(
client_id = self.CLIENT_ID,
client_secret = self.CLIENT_SECRET,
scope = self.OAUTH_SCOPE,
user_agent = 'Mozilla/5.0 (X11; Linux x86_64) \
Gecko/20100101 Firefox/31.0'
)
self.credentials = run(flow, storage)
def setup_http_request_object(self):
self.headers = {
"GData-Version": "2",
"X-GData-Key": "key=%s" % self.DEVELOPER_KEY
}
self.http = self.credentials.authorize(httplib2.Http())
def upload_translated_captions(self, srt_file_path, video_id):
try:
self.authenticate()
self.setup_http_request_object()
except Exception as e:
raise Error("Error while authenticating: %s" % str(e))
self.headers["Content-Type"] = self.CAPTIONS_CONTENT_TYPE
self.headers["Content-Language"] = self.CAPTIONS_LANGUAGE_CODE
self.headers["Slug"] = self.CAPTIONS_TITLE
srt_file = open(srt_file_path)
self.translated_captions_body = srt_file.read()
url = self.CAPTIONS_URL_FORMAT % video_id
response_headers, body = self.http.request (
url,
"POST",
body = self.translated_captions_body,
headers = self.headers
)
if response_headers["status"] != "201":
return "Received HTTP response %s when uploading captions \
to %s." % (response_headers["status"], url), False
return '%s - %s %s - caption updated' % (video_id, \
self.CAPTIONS_LANGUAGE_CODE, self.CAPTIONS_TITLE), True
def set_caption_language_title(self, language='', title=''):
self.CAPTIONS_LANGUAGE_CODE = language
self.CAPTIONS_TITLE = title
if __name__ == "__main__":
caption = YoutubeCaption(config.DEVELOPER_KEY, config.CLIENT_ID, \
config.CLIENT_SECRET)
db = MySQLdb.connect(host = config.DB_HOST, user = config.DB_USER, \
passwd = config.DB_PASS, db = config.DB_NAME)
ldb = MySQLdb.connect(host = config.DB_HOST, user = config.DB_USER, \
passwd = config.DB_PASS, db = 'cron_logs')
db_cursor = db.cursor()
db_cursor.execute("select ctr.id, ctr.language_id, ctr.video, \
ctr.tutorial_detail_id, ctr.video_id, ctd.foss_id, ctd.tutorial from \
creation_tutorialresource ctr INNER JOIN creation_tutorialdetail ctd \
ON ( ctr.tutorial_detail_id = ctd.id ) WHERE ((ctr.status = 1 OR \
ctr.status = 2 ) AND ctr.video_id IS NOT NULL AND ctr.id NOT IN \
(select distinct trid from cron_logs.srt_uploads)) ORDER BY \
ctd.foss_id, ctd.level_id, ctd.order ASC")
rows = db_cursor.fetchall()
ldb = MySQLdb.connect(host = config.DB_HOST, user = config.DB_USER, \
passwd = config.DB_PASS, db = 'cron_logs')
ldb_cursor = ldb.cursor()
for row in rows:
overall_status = 0
db_cursor.execute("select id, name, code from creation_language \
where id = %s", [str(row[1]),])
language = db_cursor.fetchone()
video_title = str(row[6].replace(' ', '-'))
video_path = config.MEDIA_ROOT + 'videos/' + str(row[5]) + '/' + \
str(row[3]) + '/'
english_srt = video_path + video_title + '-English.srt'
status_flag = False
file_missing = False
print('')
print(('FOSS Id:', row[5]))
print(('Tutorial:', row[6]))
print(('Language:', language[1]))
if os.path.isfile(english_srt):
file_missing = False
ldb_cursor.execute("select * from srt_pending_uploads where trid=" \
+ str(row[0]) + " and native_or_english=0")
esrt_row = ldb_cursor.fetchone()
#print 'e------------', esrt_row, '----------'
if esrt_row is None:
caption.set_caption_language_title('en')
message, status_flag = caption.upload_translated_captions(\
english_srt, row[4])
if status_flag:
ldb_cursor.execute("insert into srt_pending_uploads \
(trid,native_or_english) values(%s, 0)", \
[str(row[0]),])
ldb.commit()
overall_status = 1
print(message)
else:
print((row[4], '- English - Already Exist'))
overall_status = 1
else:
file_missing = True
print((row[4], '- English -', 'SRT File Missing'))
if language[1] != 'English':
native_srt = video_path + video_title + '-' + language[1] + '.srt'
if os.path.isfile(native_srt):
ldb_cursor.execute("select * from srt_pending_uploads where \
trid=" + str(row[0]) + " and native_or_english=1")
nsrt_row = ldb_cursor.fetchone()
#print 'n------------', nsrt_row, '----------'
if nsrt_row is None:
file_missing = False
language_title = ''
if language[2] == 'en':
language_title = language[1]
caption.set_caption_language_title(language[2], \
language_title)
message, status_flag = caption.upload_translated_captions(\
native_srt, row[4])
if status_flag:
ldb_cursor.execute("insert into srt_pending_uploads \
(trid,native_or_english) values(%s, 1)", \
[str(row[0]),])
ldb.commit()
print(message)
else:
print((row[4], '-', language[1], '- Already Exist'))
status_flag = True
else:
file_missing = True
print((row[4], '-', language[1], '-', 'SRT File Missing'))
status_flag = False
if status_flag and overall_status:
ldb_cursor.execute("insert into srt_uploads (trid) values(%s)", \
[str(row[0]),])
ldb.commit()
elif file_missing:
continue
else:
time.sleep(1)
time.sleep(1)
|
Spoken-tutorial/spoken-website
|
cron/upload-subtitle.py
|
Python
|
gpl-3.0
| 7,439 | 0.010082 |
#!/usr/bin/python
# First parameter: the pattern for the files in ""
# Second parameter: output file
import glob
import sys
read_files = sorted(glob.glob(sys.argv[1]))
with open(sys.argv[2], "wb") as outfile:
for f in read_files:
with open(f, "rb") as infile:
outfile.write(infile.read())
print("File generated")
|
CodeAtCode/CSVEmailVerifier
|
mergecsv.py
|
Python
|
gpl-2.0
| 340 | 0 |
# -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2013, 2014 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
from flask import request
from flask.ext.restful import Resource, abort, reqparse
from flask.ext.login import current_user
from functools import wraps
from werkzeug.utils import secure_filename
from invenio.ext.restful import require_api_auth, error_codes, \
require_oauth_scopes, require_header
from invenio.modules.deposit.models import Deposition, \
DepositionFile, InvalidDepositionType, DepositionDoesNotExists, \
DraftDoesNotExists, FormDoesNotExists, DepositionNotDeletable, \
InvalidApiAction, FilenameAlreadyExists, \
FileDoesNotExists, ForbiddenAction, DepositionError
from invenio.modules.deposit.storage import \
DepositionStorage, UploadError
from cerberus import Validator
class APIValidator(Validator):
"""
Adds new datatype 'raw', that accepts anything.
"""
def _validate_type_any(self, field, value):
pass
# Request parser
list_parser = reqparse.RequestParser()
list_parser.add_argument('state', type=str)
list_parser.add_argument('submitted', type=bool)
list_parser.add_argument('type', type=str)
draft_data_schema = dict(
metadata=dict(type="dict"),
completed=dict(type="boolean"),
)
draft_data_extended_schema = draft_data_schema.copy()
draft_data_extended_schema['type'] = dict(type="string")
draft_data_extended_schema['draft_id'] = dict(type="string")
file_schema = dict(
filename=dict(type="string", minlength=1, maxlength=255),
)
file_schema_list = dict(
id=dict(type="string"),
)
#
# Decorators
#
def error_handler(f):
"""
Decorator to handle deposition exceptions
"""
@wraps(f)
def inner(*args, **kwargs):
try:
return f(*args, **kwargs)
except DepositionDoesNotExists:
abort(404, message="Deposition does not exist", status=404)
except DraftDoesNotExists:
abort(404, message="Draft does not exist", status=404)
except InvalidApiAction:
abort(404, message="Action does not exist", status=404)
except DepositionNotDeletable:
abort(403, message="Deposition is not deletable", status=403)
except ForbiddenAction:
abort(403, message="Forbidden", status=403)
except InvalidDepositionType:
abort(400, message="Invalid deposition type", status=400)
except FormDoesNotExists:
abort(400, message="Form does not exist", status=400)
except FileDoesNotExists:
abort(400, message="File does not exist", status=400)
except FilenameAlreadyExists:
abort(400, message="Filename already exist", status=400)
except UploadError:
abort(400)
except DepositionError as e:
if len(e.args) >= 1:
abort(400, message=e.args[0], status=400)
else:
abort(500, message="Internal server error", status=500)
return inner
def api_request_globals(f):
"""
Set a variable in request to allow functions further down the chain to
determine if the request is an API request.
"""
@wraps(f)
def inner(*args, **kwargs):
request.is_api_request = True
return f(*args, **kwargs)
return inner
def filter_draft_errors(result):
"""
Extract error messages from a draft.process() result dictionary.
"""
error_messages = []
for field, msgs in result.get('messages', {}).items():
if msgs.get('state', None) == 'error':
for m in msgs['messages']:
error_messages.append(dict(
field=field,
message=m,
code=error_codes['validation_error'],
))
return error_messages
def filter_validation_errors(errors):
"""
Extract error messages from Cerberus error dictionary.
"""
error_messages = []
for field, msgs in errors.items():
if isinstance(msgs, dict):
for f, m in msgs.items():
error_messages.append(dict(
field=f,
message=m,
code=error_codes['validation_error'],
))
else:
error_messages.append(dict(
field=field,
message=msgs,
code=error_codes['validation_error'],
))
return error_messages
# =========
# Mix-ins
# =========
deposition_decorators = [
require_api_auth(),
error_handler,
api_request_globals,
]
class InputProcessorMixin(object):
"""
Mix-in class for validating and processing deposition input data
"""
input_schema = draft_data_extended_schema
def validate_input(self, deposition, draft_id=None):
"""
Validate input data for creating and update a deposition
"""
v = APIValidator()
draft_id = draft_id or deposition.get_default_draft_id()
metadata_schema = deposition.type.api_metadata_schema(draft_id)
if metadata_schema:
schema = self.input_schema.copy()
schema['metadata'] = metadata_schema
else:
schema = self.input_schema
# Either conform to dictionary schema or dictionary is empty
if not v.validate(request.json, schema) and \
request.json:
abort(
400,
message="Bad request",
status=400,
errors=filter_validation_errors(v.errors),
)
def process_input(self, deposition, draft_id=None):
""" Process input data """
# If data provided, process it
if request.json:
if draft_id is None:
# Defaults to `_default' draft id unless specified
draft = deposition.get_or_create_draft(
request.json.get(
'draft_id',
deposition.get_default_draft_id()
)
)
else:
draft = deposition.get_draft(draft_id)
# Process data
dummy_form, validated, result = draft.process(
request.json.get('metadata', {}), complete_form=True
)
# Validation failed to abort
if not validated:
abort(
400,
message="Bad request",
status=400,
errors=filter_draft_errors(result),
)
if validated and request.json.get('completed', False):
draft.complete()
# =========
# Resources
# =========
class DepositionListResource(Resource, InputProcessorMixin):
"""
Collection of depositions
"""
method_decorators = deposition_decorators
def get(self, oauth):
"""
List depositions
:param type: Upload type identifier (optional)
"""
args = list_parser.parse_args()
result = Deposition.get_depositions(
user=current_user, type=args['type'] or None
)
return map(lambda o: o.marshal(), result)
@require_header('Content-Type', 'application/json')
@require_oauth_scopes('deposit:write')
def post(self, oauth):
"""
Create a new deposition
"""
# Create deposition (uses default deposition type unless type is given)
d = Deposition.create(current_user, request.json.get('type', None))
# Validate input data according to schema
self.validate_input(d)
# Process input data
self.process_input(d)
# Save if all went fine
d.save()
return d.marshal(), 201
def put(self, oauth):
abort(405)
def delete(self, oauth):
abort(405)
def head(self, oauth):
abort(405)
def options(self, oauth):
abort(405)
def patch(self, oauth):
abort(405)
class DepositionResource(Resource, InputProcessorMixin):
"""
Deposition item
"""
method_decorators = deposition_decorators
def get(self, oauth, resource_id):
""" Get a deposition """
return Deposition.get(resource_id, user=current_user).marshal()
def post(self, oauth, resource_id):
abort(405)
@require_header('Content-Type', 'application/json')
@require_oauth_scopes('deposit:write')
def put(self, oauth, resource_id):
""" Update a deposition """
d = Deposition.get(resource_id, user=current_user)
self.validate_input(d)
self.process_input(d)
d.save()
return d.marshal()
@require_oauth_scopes('deposit:write')
def delete(self, oauth, resource_id):
""" Delete existing deposition """
d = Deposition.get(resource_id, user=current_user)
d.delete()
return "", 204
def head(self, oauth, resource_id):
abort(405)
def options(self, oauth, resource_id):
abort(405)
def patch(self, oauth, resource_id):
abort(405)
class DepositionDraftListResource(Resource):
"""
Deposition draft collection
"""
method_decorators = deposition_decorators
def get(self, oauth, resource_id):
""" List all drafts """
d = Deposition.get(resource_id, user=current_user)
return map(lambda x: d.type.marshal_draft(x), d.drafts_list)
def post(self, oauth, resource_id):
abort(405)
def put(self, oauth, resource_id):
abort(405)
def delete(self, oauth, resource_id):
abort(405)
def head(self, oauth, resource_id):
abort(405)
def options(self, oauth, resource_id):
abort(405)
def patch(self, oauth, resource_id):
abort(405)
class DepositionDraftResource(Resource, InputProcessorMixin):
"""
Deposition draft item
"""
method_decorators = deposition_decorators
input_schema = draft_data_schema
def get(self, oauth, resource_id, draft_id):
""" Get a deposition draft """
d = Deposition.get(resource_id, user=current_user)
return d.type.marshal_draft(d.get_draft(draft_id))
def post(self, oauth, resource_id, draft_id):
abort(405)
@require_header('Content-Type', 'application/json')
@require_oauth_scopes('deposit:write')
def put(self, oauth, resource_id, draft_id):
""" Update a deposition draft """
d = Deposition.get(resource_id, user=current_user)
self.validate_input(d, draft_id)
self.process_input(d, draft_id)
d.save()
def delete(self, oauth, resource_id, draft_id):
abort(405)
def head(self, oauth, resource_id, draft_id):
abort(405)
def options(self, oauth, resource_id, draft_id):
abort(405)
def patch(self, oauth, resource_id, draft_id):
abort(405)
class DepositionActionResource(Resource):
"""
Represents an deposition action - mostly used to run the workflow.
"""
method_decorators = deposition_decorators
def get(self, oauth, resource_id, action_id):
abort(405)
@require_oauth_scopes('deposit:actions')
def post(self, oauth, resource_id, action_id):
""" Run an action """
d = Deposition.get(resource_id, user=current_user)
return d.type.api_action(d, action_id)
def put(self, oauth, resource_id, action_id):
abort(405)
def delete(self, oauth, resource_id, action_id):
abort(405)
def head(self, oauth, resource_id, action_id):
abort(405)
def options(self, oauth, resource_id, action_id):
abort(405)
def patch(self, oauth, resource_id, action_id):
abort(405)
class DepositionFileListResource(Resource):
"""
Represents a collection of deposition files.
"""
method_decorators = deposition_decorators
def get(self, oauth, resource_id):
""" Get deposition list of files """
d = Deposition.get(resource_id, user=current_user)
return map(lambda f: d.type.marshal_file(f), d.files)
@require_header('Content-Type', 'multipart/form-data')
@require_oauth_scopes('deposit:write')
def post(self, oauth, resource_id):
""" Upload a file """
d = Deposition.get(resource_id, user=current_user)
# Bail-out early if not permitted (add_file will also check, but then
# we already uploaded the file)
if not d.authorize('add_file'):
raise ForbiddenAction('add_file', d)
uploaded_file = request.files['file']
filename = secure_filename(
request.form.get('filename') or uploaded_file.filename
)
df = DepositionFile(backend=DepositionStorage(d.id))
if df.save(uploaded_file, filename=filename):
try:
d.add_file(df)
d.save()
except FilenameAlreadyExists as e:
df.delete()
raise e
return d.type.marshal_file(df), 201
@require_header('Content-Type', 'application/json')
@require_oauth_scopes('deposit:write')
def put(self, oauth, resource_id):
""" Sort files in collection """
if not isinstance(request.json, list):
abort(
400,
message="Bad request",
status=400,
errors=[dict(
message="Expected a list",
code=error_codes["validation_error"],
)],
)
v = APIValidator()
for file_item in request.json:
if not v.validate(file_item, file_schema_list):
abort(
400,
message="Bad request",
status=400,
errors=map(lambda x: dict(
message=x,
code=error_codes["validation_error"]
), v.errors),
)
d = Deposition.get(resource_id, user=current_user)
for file_item in request.json:
if not d.get_file(file_item['id']):
raise FileDoesNotExists(file_item['id'])
# Sort files raise ForbiddenAction if not authorized
d.sort_files(map(lambda x: x['id'], request.json))
d.save()
return map(lambda f: d.type.marshal_file(f), d.files)
def delete(self, oauth, resource_id):
abort(405)
def head(self, oauth, resource_id):
abort(405)
def options(self, oauth, resource_id):
abort(405)
def patch(self, oauth, resource_id):
abort(405)
class DepositionFileResource(Resource):
"""
Represent a deposition file
"""
method_decorators = deposition_decorators
def get(self, oauth, resource_id, file_id):
""" Get a deposition file """
d = Deposition.get(resource_id, user=current_user)
df = d.get_file(file_id)
if df is None:
abort(404, message="File does not exist", status=404)
return d.type.marshal_file(df)
@require_oauth_scopes('deposit:write')
def delete(self, oauth, resource_id, file_id):
""" Delete existing deposition file """
d = Deposition.get(resource_id, user=current_user)
# Sort files raise ForbiddenAction if not authorized
df = d.remove_file(file_id)
if df is None:
abort(404, message="File does not exist", status=404)
df.delete()
d.save()
return "", 204
def post(self, oauth, resource_id, file_id):
abort(405)
@require_header('Content-Type', 'application/json')
@require_oauth_scopes('deposit:write')
def put(self, oauth, resource_id, file_id):
""" Update a deposition file - i.e. rename it"""
v = APIValidator()
if not v.validate(request.json, file_schema):
abort(
400,
message="Bad request",
status=400,
errors=map(lambda x: dict(
message=x,
code=error_codes["validation_error"]
), v.errors),
)
d = Deposition.get(resource_id, user=current_user)
df = d.get_file(file_id)
if not d.type.authorize_file(d, df, 'update_metadata'):
raise ForbiddenAction('update_metadata', df)
new_name = secure_filename(request.json['filename'])
if new_name != request.json['filename']:
abort(
400,
message="Bad request",
status=400,
errors=[dict(
message="Not a valid filename",
code=error_codes["validation_error"]
)],
)
df.name = new_name
d.save()
return d.type.marshal_file(df)
def head(self, oauth, resource_id, file_id):
abort(405)
def options(self, oauth, resource_id, file_id):
abort(405)
def patch(self, oauth, resource_id, file_id):
abort(405)
#
# Register API resources
#
def setup_app(app, api):
api.add_resource(
DepositionListResource,
'/api/deposit/depositions/',
)
api.add_resource(
DepositionResource,
'/api/deposit/depositions/<string:resource_id>',
)
api.add_resource(
DepositionFileListResource,
'/api/deposit/depositions/<string:resource_id>/files/',
)
api.add_resource(
DepositionDraftListResource,
'/api/deposit/depositions/<string:resource_id>/metadata/',
)
api.add_resource(
DepositionDraftResource,
'/api/deposit/depositions/<string:resource_id>/metadata/'
'<string:draft_id>',
)
api.add_resource(
DepositionActionResource,
'/api/deposit/depositions/<string:resource_id>/actions/'
'<string:action_id>',
)
api.add_resource(
DepositionFileResource,
'/api/deposit/depositions/<string:resource_id>/files/<string:file_id>',
)
|
MSusik/invenio
|
invenio/modules/deposit/restful.py
|
Python
|
gpl-2.0
| 18,728 | 0.000694 |
#!/usr/bin/python
import json
import httplib
import os
import subprocess
import time
from mininet.cli import CLI
from mininet.log import setLogLevel, info
from mininet.net import Mininet
from mininet.node import RemoteController
from mininet.topo import Topo
from mininet.util import irange
HOME_FOLDER = os.getenv('HOME')
class LinearTopo(Topo):
"""
construct a network of N hosts and N-1 switches, connected as follows:
h1 <-> s1 <-> s2 .. sN-1
| | |
h2 h3 hN
"""
def __init__(self, N, **params):
Topo.__init__(self, **params)
hosts = [ self.addHost( 'h%s' % h )
for h in irange( 1, N ) ]
switches = [ self.addSwitch( 's%s' % s )
for s in irange( 1, N - 1 ) ]
# Wire up switches
last = None
for switch in switches:
if last:
self.addLink( last, switch )
last = switch
# Wire up hosts
self.addLink( hosts[ 0 ], switches[ 0 ] )
for host, switch in zip( hosts[ 1: ], switches ):
self.addLink( host, switch )
def getControllerIP():
guest_ip = subprocess.check_output("/sbin/ifconfig eth1 | grep 'inet addr:' | cut -d: -f2 | awk '{ print $1}'",
shell=True)
split_ip = guest_ip.split('.')
split_ip[3] = '1'
return '.'.join(split_ip)
def rest_call(path, data, action):
headers = {
'Content-type': 'application/json',
'Accept' : 'application/json',
}
body = json.dumps(data)
conn = httplib.HTTPConnection(getControllerIP(), 8080)
conn.request(action, path, body, headers)
response = conn.getresponse()
ret = (response.status, response.reason, response.read())
conn.close()
return ret
def addDHCPInstance1(name):
data = {
"name" : name,
"start-ip" : "10.0.0.101",
"end-ip" : "10.0.0.200",
"server-id" : "10.0.0.2",
"server-mac" : "aa:bb:cc:dd:ee:ff",
"router-ip" : "10.0.0.1",
"broadcast-ip" : "10.0.0.255",
"subnet-mask" : "255.255.255.0",
"lease-time" : "60",
"ip-forwarding": "true",
"domain-name" : "mininet-domain-name"
}
ret = rest_call('/wm/dhcp/instance', data, 'POST')
return ret
def addDHCPInstance2(name):
data = {
"name" : name,
"start-ip" : "20.0.0.101",
"end-ip" : "20.0.0.200",
"server-id" : "20.0.0.2",
"server-mac" : "aa:bb:cc:dd:ee:ff", #TODO: not quite sure why another MAC address is not working..
"router-ip" : "20.0.0.1",
"broadcast-ip" : "20.0.0.255",
"subnet-mask" : "255.255.255.0",
"lease-time" : "60",
"ip-forwarding": "true",
"domain-name" : "mininet-domain-name"
}
ret = rest_call('/wm/dhcp/instance', data, 'POST')
return ret
def addSwitchToDHCPInstance1(name):
data = {
"switches": [
{
"dpid": "1"
}
]
}
ret = rest_call('/wm/dhcp/instance/' + name, data, 'POST')
return ret
def addSwitchToDHCPInstance2(name):
data = {
"switches": [
{
"dpid": "2"
},
{
"dpid": "3"
},
{
"dpid": "4"
}
]
}
ret = rest_call('/wm/dhcp/instance/' + name, data, 'POST')
return ret
def enableDHCPServer():
data = {
"enable" : "true",
"lease-gc-period" : "10",
"dynamic-lease" : "true"
}
ret = rest_call('/wm/dhcp/config', data, 'POST')
return ret
# DHCP client functions
def startDHCPclient(host):
"Start DHCP client on host"
intf = host.defaultIntf()
host.cmd('dhclient -v -d -r', intf)
host.cmd('dhclient -v -d 1> /tmp/dhclient.log 2>&1', intf, '&')
def stopDHCPclient(host):
host.cmd('kill %dhclient')
def waitForIP(host):
"Wait for an IP address"
info('*', host, 'waiting for IP address')
while True:
host.defaultIntf().updateIP()
if host.IP():
break
info('.')
time.sleep(1)
info('\n')
info('*', host, 'is now using',
host.cmd('grep nameserver /etcresolv.conf'))
def mountPrivateResolvconf(host):
"Create/mount private /etc/resolv.conf for host"
etc = '/tmp/etc-%s' % host
host.cmd('mkdir -p', etc)
host.cmd('mount --bind /etc', etc)
host.cmd('mount -n -t tmpfs tmpfs /etc')
host.cmd('ln -s %s/* /etc/' % etc)
host.cmd('rm /etc/resolv.conf')
host.cmd('cp %s/resolv.conf /etc/' % etc)
def unmountPrivateResolvconf(host):
"Unmount private /etc dir for host"
etc = '/tmp/etc-%s' % host
host.cmd('umount /etc')
host.cmd('umount', etc)
host.cmd('rmdir', etc)
def startNetworkWithLinearTopo( hostCount ):
global net
net = Mininet(topo=LinearTopo(hostCount), build=False)
remote_ip = getControllerIP()
info('** Adding Floodlight Controller\n')
net.addController('c1', controller=RemoteController,
ip=remote_ip, port=6653)
# Build the network
net.build()
net.start()
# Start DHCP
ret = enableDHCPServer()
print(ret)
addDHCPInstance1('mininet-dhcp-1')
ret = addSwitchToDHCPInstance1('mininet-dhcp-1')
print(ret)
addDHCPInstance2('mininet-dhcp-2')
ret = addSwitchToDHCPInstance2('mininet-dhcp-2')
print(ret)
hosts = net.hosts
for host in hosts:
mountPrivateResolvconf(host)
startDHCPclient(host)
waitForIP(host)
def stopNetwork():
if net is not None:
info('** Tearing down network\n')
hosts = net.hosts
for host in hosts:
unmountPrivateResolvconf(host)
unmountPrivateResolvconf(host)
stopDHCPclient(host)
stopDHCPclient(host)
net.stop()
if __name__ == '__main__':
setLogLevel('info')
startNetworkWithLinearTopo(5)
CLI(net)
stopNetwork()
|
onebsv1/floodlight
|
example/mininet/dhcp_linear_sw.py
|
Python
|
apache-2.0
| 6,108 | 0.009987 |
from django.core.management.base import BaseCommand
from django.db.models import Q
from ajapaik.ajapaik.models import Album
class Command(BaseCommand):
help = 'Connects to TartuNLP API and retrieves neuro machine translations for empty name fields'
def handle(self, *args, **options):
albums = Album.objects.exclude(
Q(atype=Album.AUTO) |
Q(name_original_language__isnull=False) |
Q(atype=Album.PERSON) |
Q(atype=Album.COLLECTION)
).filter(
Q(name_et__isnull=False)
| Q(name_lv__isnull=False)
| Q(name_lt__isnull=False)
| Q(name_fi__isnull=False)
| Q(name_ru__isnull=False)
| Q(name_de__isnull=False)
| Q(name_en__isnull=False)
)
for each in albums:
print(f'Processing Album {each.pk}')
each: Album
each.fill_untranslated_fields()
|
Ajapaik/ajapaik-web
|
ajapaik/ajapaik/management/commands/tartunlp_on_all_albums.py
|
Python
|
gpl-3.0
| 963 | 0.001038 |
from nose.tools import eq_, raises
from ...errors import DependencyError, DependencyLoop
from ..dependent import Dependent
from ..functions import dig, draw, expand, normalize_context, solve
def test_solve():
# Simple functions
eq_(solve(lambda: "foo"), "foo")
eq_(list(solve([lambda: "foo", lambda: "bar"])), ["foo", "bar"])
# Dependents
foo = Dependent("foo", lambda: "foo")
bar = Dependent("bar", lambda: "bar")
foobar = Dependent("foobar", lambda foo, bar: foo + bar,
depends_on=[foo, bar])
eq_(solve(foobar), "foobar")
eq_(list(solve([foo, bar, foobar])), ["foo", "bar", "foobar"])
# Cache
eq_(solve(foobar, cache={foobar: "foobaz"}), "foobaz")
eq_(solve(foobar, cache={bar: "baz"}), "foobaz")
eq_(solve(foobar, cache={"dependent.bar": "baz"}), "foobaz")
# Context
mybar = Dependent("bar", lambda: "baz")
eq_(solve(foobar, context={mybar}), "foobaz")
eq_(solve(foobar, context={mybar: mybar}), "foobaz")
eq_(solve(foobar, context={bar: mybar}), "foobaz")
eq_(solve(foobar, context={bar: lambda: "baz"}), "foobaz")
solving_profile = {}
solve(foobar, profile=solving_profile)
# print(solving_profile)
eq_(set(solving_profile.keys()), {foo, bar, foobar})
list(solve([foo, bar, foobar], profile=solving_profile))
# print(solving_profile)
eq_(len(solving_profile[foobar]), 2)
@raises(RuntimeError)
def test_unsolveable():
solve(5)
@raises(DependencyLoop)
def test_dependency_loop():
foo = Dependent("foo")
bar = Dependent("bar", depends_on=[foo])
my_foo = Dependent("foo", depends_on=[bar])
solve(bar, context={my_foo})
@raises(DependencyError)
def test_dependency_error():
def derror():
raise DependencyError()
raises_error = Dependent("foo", derror)
solve(raises_error)
def test_cache_preservation():
foo = Dependent("foo")
bar = Dependent("bar", depends_on=[foo], process=lambda foo: foo + "bar")
fooz = Dependent("foo", process=lambda: "fooz")
cache = {foo: "foo"}
values = list(solve([foo, bar], cache=cache))
eq_(values, ["foo", "foobar"])
eq_(cache[bar], "foobar")
cache = {}
values = list(solve([foo, bar], context={fooz}, cache=cache))
eq_(values, ["fooz", "foozbar"])
eq_(cache[bar], "foozbar")
def test_expand():
foo = Dependent("foo", lambda: "foo")
bar = Dependent("bar", lambda: "bar")
foobar = Dependent("foobar", lambda foo, bar: foo + bar,
depends_on=[foo, bar])
derp = Dependent("derp", lambda: "derp")
fooderp = Dependent("fooderp", lambda foo, derp: foo + derp,
depends_on=[foo, derp])
dependents = list(expand(foobar))
eq_(len(dependents), 3)
eq_(set(dependents), {foo, bar, foobar})
dependents = list(expand([fooderp, foobar]))
eq_(len(dependents), 5)
eq_(set(dependents), {derp, fooderp, foo, bar, foobar})
def test_dig():
foo = Dependent("foo", lambda: "foo")
bar = Dependent("bar", lambda: "bar")
foobar = Dependent("foobar", lambda foo, bar: foo + bar,
depends_on=[foo, bar])
foobar_foobar = Dependent("foobar_foobar",
lambda foobar1, foobar2: foobar1 + "_" + foobar2,
depends_on=[foobar, foobar])
roots = list(dig(foobar_foobar))
eq_(len(roots), 2)
eq_(set(roots), {foo, bar})
roots = list(dig([foobar, foo, bar]))
eq_(len(roots), 2)
eq_(set(roots), {foo, bar})
roots = list(dig(foobar_foobar, cache={foo}))
eq_(len(roots), 1)
eq_(set(roots), {bar})
myfoobar = Dependent("foobar", lambda foo1, foo2: foo1 + foo2,
depends_on=[foo, foo])
roots = list(dig(foobar_foobar, context={myfoobar}))
eq_(len(roots), 1)
eq_(set(roots), {foo})
def get_5():
return 5
myfoobar = Dependent("foobar", lambda my_5: 5 ** 2,
depends_on=[get_5])
roots = list(dig(foobar_foobar, context={myfoobar}))
eq_(len(roots), 1)
eq_(set(roots), {get_5})
def test_draw():
foo = Dependent("foo", lambda: "foo")
bar = Dependent("bar", lambda foo: foo + "bar", dependencies=[foo])
draw(bar) # Does not throw an error
draw(bar, cache={foo: "CACHED"}) # Does not throw an error
@raises(DependencyError)
def test_not_implemented_error():
foo = Dependent("foo")
solve(foo)
@raises(TypeError)
def test_normalize_context_fail():
normalize_context(15)
|
yafeunteun/wikipedia-spam-classifier
|
revscoring/revscoring/dependencies/tests/test_functions.py
|
Python
|
mit
| 4,537 | 0 |
# -*- coding: utf-8 -*-
from admesh import Stl
from utils import asset
import filecmp
class TestIO(object):
'''Tests for the basic IO operations'''
def test_saved_equals_original_ascii(self):
'''Tests if saved ASCII file is identical to the loaded one'''
stl = Stl(asset('block.stl'))
stl.write_ascii(asset('block_ascii.stl'))
assert filecmp.cmp(asset('block.stl'), asset('block_ascii.stl'))
def test_saved_equals_original_binary(self):
'''Tests if saved binary file is identical to the loaded one'''
stl1 = Stl(asset('block.stl'))
stl1.write_binary(asset('block_binary.stl'))
stl2 = Stl(asset('block_binary.stl'))
stl2.write_binary(asset('block_binary2.stl'))
assert filecmp.cmp(asset('block_binary.stl'), asset('block_binary2.stl'))
def test_save_load_unicode(self):
'''Tests saving and loading files with Unicode filenames'''
stl1 = Stl(asset('block.stl'))
stl1.write_ascii(asset(u'block_ěščřž.stl'))
stl2 = Stl(asset(u'block_ěščřž.stl'))
|
admesh/python-admesh
|
test/test_io.py
|
Python
|
gpl-2.0
| 1,085 | 0.00093 |
import unittest
from antelope_catalog.data_sources.local import TEST_ROOT
from antelope_catalog import LcCatalog
from lcatools.interfaces import IndexRequired
cat = LcCatalog(TEST_ROOT)
ref = 'calrecycle.antelope'
cat.new_resource(ref, 'http://www.antelope-lca.net/uo-lca/api/', 'AntelopeV1Client',
store=False, interfaces=['index', 'inventory', 'quantity'], quiet=True)
ar = cat.get_archive(ref)
class AntelopeV1Client(unittest.TestCase):
def test_stages(self):
self.assertEqual(len(ar.get_endpoint('stages')), 87)
def test_stagename(self):
inv = ar.make_interface('inventory')
self.assertEqual(inv.get_stage_name('42'), 'Natural Gas')
self.assertEqual(inv.get_stage_name('47'), 'Natural Gas Supply')
self.assertEqual(inv.get_stage_name('81'), 'WWTP')
def test_impactcategory(self):
self.assertEqual(ar._get_impact_category(6), 'Cancer human health effects')
with self.assertRaises(ValueError):
ar._get_impact_category(5)
def test_nonimpl(self):
with self.assertRaises(IndexRequired):
next(cat.query(ref).terminate('flows/87'))
def test_traversal(self):
ffs = cat.query(ref).get('fragments/47').traverse()
self.assertEqual(len(ffs), 14)
self.assertSetEqual({-0.5, -0.01163, -0.0102, 0.0, 0.5}, set(round(x.node_weight, 5) for x in ffs))
def test_lcia(self):
lcia = cat.query(ref).get('fragments/19').fragment_lcia('lciamethods/4')
self.assertSetEqual(set(x.external_ref for x in lcia.component_entities()),
{'Crude Oil', 'Electricity', 'Natural Gas', 'Refinery'})
self.assertSetEqual(set(round(x.cumulative_result, 10) for x in lcia.components()),
{0.0004522897, 0.0000733389, 0.0000419222, 0.0001582613})
self.assertAlmostEqual(lcia.total(), 0.0007258121306, places=12)
if __name__ == '__main__':
unittest.main()
|
bkuczenski/lca-tools
|
antelope_catalog/providers/v1_client/tests/test_antelope_v1_client.py
|
Python
|
gpl-2.0
| 1,973 | 0.005068 |
"""
Support for HomeMatic devices.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/homematic/
"""
import asyncio
from datetime import timedelta
from functools import partial
import logging
import os
import socket
import voluptuous as vol
from homeassistant.config import load_yaml_config_file
from homeassistant.const import (
EVENT_HOMEASSISTANT_STOP, CONF_USERNAME, CONF_PASSWORD, CONF_PLATFORM,
CONF_HOSTS, CONF_HOST, ATTR_ENTITY_ID, STATE_UNKNOWN)
from homeassistant.helpers import discovery
from homeassistant.helpers.entity import Entity
import homeassistant.helpers.config_validation as cv
from homeassistant.loader import bind_hass
REQUIREMENTS = ['pyhomematic==0.1.36']
DOMAIN = 'homematic'
_LOGGER = logging.getLogger(__name__)
SCAN_INTERVAL_HUB = timedelta(seconds=300)
SCAN_INTERVAL_VARIABLES = timedelta(seconds=30)
DISCOVER_SWITCHES = 'homematic.switch'
DISCOVER_LIGHTS = 'homematic.light'
DISCOVER_SENSORS = 'homematic.sensor'
DISCOVER_BINARY_SENSORS = 'homematic.binary_sensor'
DISCOVER_COVER = 'homematic.cover'
DISCOVER_CLIMATE = 'homematic.climate'
ATTR_DISCOVER_DEVICES = 'devices'
ATTR_PARAM = 'param'
ATTR_CHANNEL = 'channel'
ATTR_NAME = 'name'
ATTR_ADDRESS = 'address'
ATTR_VALUE = 'value'
ATTR_INTERFACE = 'interface'
ATTR_ERRORCODE = 'error'
ATTR_MESSAGE = 'message'
ATTR_MODE = 'mode'
ATTR_TIME = 'time'
EVENT_KEYPRESS = 'homematic.keypress'
EVENT_IMPULSE = 'homematic.impulse'
EVENT_ERROR = 'homematic.error'
SERVICE_VIRTUALKEY = 'virtualkey'
SERVICE_RECONNECT = 'reconnect'
SERVICE_SET_VARIABLE_VALUE = 'set_variable_value'
SERVICE_SET_DEVICE_VALUE = 'set_device_value'
SERVICE_SET_INSTALL_MODE = 'set_install_mode'
HM_DEVICE_TYPES = {
DISCOVER_SWITCHES: [
'Switch', 'SwitchPowermeter', 'IOSwitch', 'IPSwitch', 'RFSiren',
'IPSwitchPowermeter', 'KeyMatic', 'HMWIOSwitch', 'Rain', 'EcoLogic'],
DISCOVER_LIGHTS: ['Dimmer', 'KeyDimmer', 'IPKeyDimmer'],
DISCOVER_SENSORS: [
'SwitchPowermeter', 'Motion', 'MotionV2', 'RemoteMotion', 'MotionIP',
'ThermostatWall', 'AreaThermostat', 'RotaryHandleSensor',
'WaterSensor', 'PowermeterGas', 'LuxSensor', 'WeatherSensor',
'WeatherStation', 'ThermostatWall2', 'TemperatureDiffSensor',
'TemperatureSensor', 'CO2Sensor', 'IPSwitchPowermeter', 'HMWIOSwitch',
'FillingLevel', 'ValveDrive', 'EcoLogic', 'IPThermostatWall',
'IPSmoke', 'RFSiren', 'PresenceIP'],
DISCOVER_CLIMATE: [
'Thermostat', 'ThermostatWall', 'MAXThermostat', 'ThermostatWall2',
'MAXWallThermostat', 'IPThermostat', 'IPThermostatWall',
'ThermostatGroup'],
DISCOVER_BINARY_SENSORS: [
'ShutterContact', 'Smoke', 'SmokeV2', 'Motion', 'MotionV2',
'MotionIP', 'RemoteMotion', 'WeatherSensor', 'TiltSensor',
'IPShutterContact', 'HMWIOSwitch', 'MaxShutterContact', 'Rain',
'WiredSensor', 'PresenceIP'],
DISCOVER_COVER: ['Blind', 'KeyBlind']
}
HM_IGNORE_DISCOVERY_NODE = [
'ACTUAL_TEMPERATURE',
'ACTUAL_HUMIDITY'
]
HM_ATTRIBUTE_SUPPORT = {
'LOWBAT': ['battery', {0: 'High', 1: 'Low'}],
'ERROR': ['sabotage', {0: 'No', 1: 'Yes'}],
'RSSI_DEVICE': ['rssi', {}],
'VALVE_STATE': ['valve', {}],
'BATTERY_STATE': ['battery', {}],
'CONTROL_MODE': ['mode', {
0: 'Auto',
1: 'Manual',
2: 'Away',
3: 'Boost',
4: 'Comfort',
5: 'Lowering'
}],
'POWER': ['power', {}],
'CURRENT': ['current', {}],
'VOLTAGE': ['voltage', {}],
'WORKING': ['working', {0: 'No', 1: 'Yes'}],
}
HM_PRESS_EVENTS = [
'PRESS_SHORT',
'PRESS_LONG',
'PRESS_CONT',
'PRESS_LONG_RELEASE',
'PRESS',
]
HM_IMPULSE_EVENTS = [
'SEQUENCE_OK',
]
CONF_RESOLVENAMES_OPTIONS = [
'metadata',
'json',
'xml',
False
]
DATA_HOMEMATIC = 'homematic'
DATA_STORE = 'homematic_store'
DATA_CONF = 'homematic_conf'
CONF_INTERFACES = 'interfaces'
CONF_LOCAL_IP = 'local_ip'
CONF_LOCAL_PORT = 'local_port'
CONF_PORT = 'port'
CONF_PATH = 'path'
CONF_CALLBACK_IP = 'callback_ip'
CONF_CALLBACK_PORT = 'callback_port'
CONF_RESOLVENAMES = 'resolvenames'
CONF_VARIABLES = 'variables'
CONF_DEVICES = 'devices'
CONF_PRIMARY = 'primary'
DEFAULT_LOCAL_IP = '0.0.0.0'
DEFAULT_LOCAL_PORT = 0
DEFAULT_RESOLVENAMES = False
DEFAULT_PORT = 2001
DEFAULT_PATH = ''
DEFAULT_USERNAME = 'Admin'
DEFAULT_PASSWORD = ''
DEVICE_SCHEMA = vol.Schema({
vol.Required(CONF_PLATFORM): 'homematic',
vol.Required(ATTR_NAME): cv.string,
vol.Required(ATTR_ADDRESS): cv.string,
vol.Required(ATTR_INTERFACE): cv.string,
vol.Optional(ATTR_CHANNEL, default=1): vol.Coerce(int),
vol.Optional(ATTR_PARAM): cv.string,
})
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.Schema({
vol.Optional(CONF_INTERFACES, default={}): {cv.match_all: {
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_PATH, default=DEFAULT_PATH): cv.string,
vol.Optional(CONF_RESOLVENAMES, default=DEFAULT_RESOLVENAMES):
vol.In(CONF_RESOLVENAMES_OPTIONS),
vol.Optional(CONF_USERNAME, default=DEFAULT_USERNAME): cv.string,
vol.Optional(CONF_PASSWORD, default=DEFAULT_PASSWORD): cv.string,
vol.Optional(CONF_CALLBACK_IP): cv.string,
vol.Optional(CONF_CALLBACK_PORT): cv.port,
}},
vol.Optional(CONF_HOSTS, default={}): {cv.match_all: {
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_USERNAME, default=DEFAULT_USERNAME): cv.string,
vol.Optional(CONF_PASSWORD, default=DEFAULT_PASSWORD): cv.string,
}},
vol.Optional(CONF_LOCAL_IP, default=DEFAULT_LOCAL_IP): cv.string,
vol.Optional(CONF_LOCAL_PORT, default=DEFAULT_LOCAL_PORT): cv.port,
}),
}, extra=vol.ALLOW_EXTRA)
SCHEMA_SERVICE_VIRTUALKEY = vol.Schema({
vol.Required(ATTR_ADDRESS): vol.All(cv.string, vol.Upper),
vol.Required(ATTR_CHANNEL): vol.Coerce(int),
vol.Required(ATTR_PARAM): cv.string,
vol.Optional(ATTR_INTERFACE): cv.string,
})
SCHEMA_SERVICE_SET_VARIABLE_VALUE = vol.Schema({
vol.Required(ATTR_NAME): cv.string,
vol.Required(ATTR_VALUE): cv.match_all,
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
})
SCHEMA_SERVICE_SET_DEVICE_VALUE = vol.Schema({
vol.Required(ATTR_ADDRESS): vol.All(cv.string, vol.Upper),
vol.Required(ATTR_CHANNEL): vol.Coerce(int),
vol.Required(ATTR_PARAM): vol.All(cv.string, vol.Upper),
vol.Required(ATTR_VALUE): cv.match_all,
vol.Optional(ATTR_INTERFACE): cv.string,
})
SCHEMA_SERVICE_RECONNECT = vol.Schema({})
SCHEMA_SERVICE_SET_INSTALL_MODE = vol.Schema({
vol.Required(ATTR_INTERFACE): cv.string,
vol.Optional(ATTR_TIME, default=60): cv.positive_int,
vol.Optional(ATTR_MODE, default=1):
vol.All(vol.Coerce(int), vol.In([1, 2])),
vol.Optional(ATTR_ADDRESS): vol.All(cv.string, vol.Upper),
})
@bind_hass
def virtualkey(hass, address, channel, param, interface=None):
"""Send virtual keypress to homematic controlller."""
data = {
ATTR_ADDRESS: address,
ATTR_CHANNEL: channel,
ATTR_PARAM: param,
ATTR_INTERFACE: interface,
}
hass.services.call(DOMAIN, SERVICE_VIRTUALKEY, data)
@bind_hass
def set_variable_value(hass, entity_id, value):
"""Change value of a Homematic system variable."""
data = {
ATTR_ENTITY_ID: entity_id,
ATTR_VALUE: value,
}
hass.services.call(DOMAIN, SERVICE_SET_VARIABLE_VALUE, data)
@bind_hass
def set_device_value(hass, address, channel, param, value, interface=None):
"""Call setValue XML-RPC method of supplied interface."""
data = {
ATTR_ADDRESS: address,
ATTR_CHANNEL: channel,
ATTR_PARAM: param,
ATTR_VALUE: value,
ATTR_INTERFACE: interface,
}
hass.services.call(DOMAIN, SERVICE_SET_DEVICE_VALUE, data)
@bind_hass
def set_install_mode(hass, interface, mode=None, time=None, address=None):
"""Call setInstallMode XML-RPC method of supplied inteface."""
data = {
key: value for key, value in (
(ATTR_INTERFACE, interface),
(ATTR_MODE, mode),
(ATTR_TIME, time),
(ATTR_ADDRESS, address)
) if value
}
hass.services.call(DOMAIN, SERVICE_SET_INSTALL_MODE, data)
@bind_hass
def reconnect(hass):
"""Reconnect to CCU/Homegear."""
hass.services.call(DOMAIN, SERVICE_RECONNECT, {})
def setup(hass, config):
"""Set up the Homematic component."""
from pyhomematic import HMConnection
conf = config[DOMAIN]
hass.data[DATA_CONF] = remotes = {}
hass.data[DATA_STORE] = set()
# Create hosts-dictionary for pyhomematic
for rname, rconfig in conf[CONF_INTERFACES].items():
remotes[rname] = {
'ip': socket.gethostbyname(rconfig.get(CONF_HOST)),
'port': rconfig.get(CONF_PORT),
'path': rconfig.get(CONF_PATH),
'resolvenames': rconfig.get(CONF_RESOLVENAMES),
'username': rconfig.get(CONF_USERNAME),
'password': rconfig.get(CONF_PASSWORD),
'callbackip': rconfig.get(CONF_CALLBACK_IP),
'callbackport': rconfig.get(CONF_CALLBACK_PORT),
'connect': True,
}
for sname, sconfig in conf[CONF_HOSTS].items():
remotes[sname] = {
'ip': socket.gethostbyname(sconfig.get(CONF_HOST)),
'port': DEFAULT_PORT,
'username': sconfig.get(CONF_USERNAME),
'password': sconfig.get(CONF_PASSWORD),
'connect': False,
}
# Create server thread
bound_system_callback = partial(_system_callback_handler, hass, config)
hass.data[DATA_HOMEMATIC] = homematic = HMConnection(
local=config[DOMAIN].get(CONF_LOCAL_IP),
localport=config[DOMAIN].get(CONF_LOCAL_PORT),
remotes=remotes,
systemcallback=bound_system_callback,
interface_id='homeassistant'
)
# Start server thread, connect to hosts, initialize to receive events
homematic.start()
# Stops server when HASS is shutting down
hass.bus.listen_once(
EVENT_HOMEASSISTANT_STOP, hass.data[DATA_HOMEMATIC].stop)
# Init homematic hubs
entity_hubs = []
for hub_name in conf[CONF_HOSTS].keys():
entity_hubs.append(HMHub(hass, homematic, hub_name))
# Register HomeMatic services
descriptions = load_yaml_config_file(
os.path.join(os.path.dirname(__file__), 'services.yaml'))
def _hm_service_virtualkey(service):
"""Service to handle virtualkey servicecalls."""
address = service.data.get(ATTR_ADDRESS)
channel = service.data.get(ATTR_CHANNEL)
param = service.data.get(ATTR_PARAM)
# Device not found
hmdevice = _device_from_servicecall(hass, service)
if hmdevice is None:
_LOGGER.error("%s not found for service virtualkey!", address)
return
# Parameter doesn't exist for device
if param not in hmdevice.ACTIONNODE:
_LOGGER.error("%s not datapoint in hm device %s", param, address)
return
# Channel doesn't exist for device
if channel not in hmdevice.ACTIONNODE[param]:
_LOGGER.error("%i is not a channel in hm device %s",
channel, address)
return
# Call parameter
hmdevice.actionNodeData(param, True, channel)
hass.services.register(
DOMAIN, SERVICE_VIRTUALKEY, _hm_service_virtualkey,
descriptions[SERVICE_VIRTUALKEY], schema=SCHEMA_SERVICE_VIRTUALKEY)
def _service_handle_value(service):
"""Service to call setValue method for HomeMatic system variable."""
entity_ids = service.data.get(ATTR_ENTITY_ID)
name = service.data[ATTR_NAME]
value = service.data[ATTR_VALUE]
if entity_ids:
entities = [entity for entity in entity_hubs if
entity.entity_id in entity_ids]
else:
entities = entity_hubs
if not entities:
_LOGGER.error("No HomeMatic hubs available")
return
for hub in entities:
hub.hm_set_variable(name, value)
hass.services.register(
DOMAIN, SERVICE_SET_VARIABLE_VALUE, _service_handle_value,
descriptions[SERVICE_SET_VARIABLE_VALUE],
schema=SCHEMA_SERVICE_SET_VARIABLE_VALUE)
def _service_handle_reconnect(service):
"""Service to reconnect all HomeMatic hubs."""
homematic.reconnect()
hass.services.register(
DOMAIN, SERVICE_RECONNECT, _service_handle_reconnect,
descriptions[SERVICE_RECONNECT], schema=SCHEMA_SERVICE_RECONNECT)
def _service_handle_device(service):
"""Service to call setValue method for HomeMatic devices."""
address = service.data.get(ATTR_ADDRESS)
channel = service.data.get(ATTR_CHANNEL)
param = service.data.get(ATTR_PARAM)
value = service.data.get(ATTR_VALUE)
# Device not found
hmdevice = _device_from_servicecall(hass, service)
if hmdevice is None:
_LOGGER.error("%s not found!", address)
return
hmdevice.setValue(param, value, channel)
hass.services.register(
DOMAIN, SERVICE_SET_DEVICE_VALUE, _service_handle_device,
descriptions[SERVICE_SET_DEVICE_VALUE],
schema=SCHEMA_SERVICE_SET_DEVICE_VALUE)
def _service_handle_install_mode(service):
"""Service to set interface into install mode."""
interface = service.data.get(ATTR_INTERFACE)
mode = service.data.get(ATTR_MODE)
time = service.data.get(ATTR_TIME)
address = service.data.get(ATTR_ADDRESS)
homematic.setInstallMode(interface, t=time, mode=mode, address=address)
hass.services.register(
DOMAIN, SERVICE_SET_INSTALL_MODE, _service_handle_install_mode,
descriptions[SERVICE_SET_INSTALL_MODE],
schema=SCHEMA_SERVICE_SET_INSTALL_MODE)
return True
def _system_callback_handler(hass, config, src, *args):
"""System callback handler."""
# New devices available at hub
if src == 'newDevices':
(interface_id, dev_descriptions) = args
interface = interface_id.split('-')[-1]
# Device support active?
if not hass.data[DATA_CONF][interface]['connect']:
return
addresses = []
for dev in dev_descriptions:
address = dev['ADDRESS'].split(':')[0]
if address not in hass.data[DATA_STORE]:
hass.data[DATA_STORE].add(address)
addresses.append(address)
# Register EVENTS
# Search all devices with an EVENTNODE that includes data
bound_event_callback = partial(_hm_event_handler, hass, interface)
for dev in addresses:
hmdevice = hass.data[DATA_HOMEMATIC].devices[interface].get(dev)
if hmdevice.EVENTNODE:
hmdevice.setEventCallback(
callback=bound_event_callback, bequeath=True)
# Create HASS entities
if addresses:
for component_name, discovery_type in (
('switch', DISCOVER_SWITCHES),
('light', DISCOVER_LIGHTS),
('cover', DISCOVER_COVER),
('binary_sensor', DISCOVER_BINARY_SENSORS),
('sensor', DISCOVER_SENSORS),
('climate', DISCOVER_CLIMATE)):
# Get all devices of a specific type
found_devices = _get_devices(
hass, discovery_type, addresses, interface)
# When devices of this type are found
# they are setup in HASS and an discovery event is fired
if found_devices:
discovery.load_platform(hass, component_name, DOMAIN, {
ATTR_DISCOVER_DEVICES: found_devices
}, config)
# Homegear error message
elif src == 'error':
_LOGGER.error("Error: %s", args)
(interface_id, errorcode, message) = args
hass.bus.fire(EVENT_ERROR, {
ATTR_ERRORCODE: errorcode,
ATTR_MESSAGE: message
})
def _get_devices(hass, discovery_type, keys, interface):
"""Get the HomeMatic devices for given discovery_type."""
device_arr = []
for key in keys:
device = hass.data[DATA_HOMEMATIC].devices[interface][key]
class_name = device.__class__.__name__
metadata = {}
# Class not supported by discovery type
if class_name not in HM_DEVICE_TYPES[discovery_type]:
continue
# Load metadata needed to generate a parameter list
if discovery_type == DISCOVER_SENSORS:
metadata.update(device.SENSORNODE)
elif discovery_type == DISCOVER_BINARY_SENSORS:
metadata.update(device.BINARYNODE)
else:
metadata.update({None: device.ELEMENT})
# Generate options for 1...n elements with 1...n parameters
for param, channels in metadata.items():
if param in HM_IGNORE_DISCOVERY_NODE:
continue
# Add devices
_LOGGER.debug("%s: Handling %s: %s: %s",
discovery_type, key, param, channels)
for channel in channels:
name = _create_ha_name(
name=device.NAME, channel=channel, param=param,
count=len(channels)
)
device_dict = {
CONF_PLATFORM: "homematic",
ATTR_ADDRESS: key,
ATTR_INTERFACE: interface,
ATTR_NAME: name,
ATTR_CHANNEL: channel
}
if param is not None:
device_dict[ATTR_PARAM] = param
# Add new device
try:
DEVICE_SCHEMA(device_dict)
device_arr.append(device_dict)
except vol.MultipleInvalid as err:
_LOGGER.error("Invalid device config: %s",
str(err))
return device_arr
def _create_ha_name(name, channel, param, count):
"""Generate a unique entity id."""
# HMDevice is a simple device
if count == 1 and param is None:
return name
# Has multiple elements/channels
if count > 1 and param is None:
return "{} {}".format(name, channel)
# With multiple parameters on first channel
if count == 1 and param is not None:
return "{} {}".format(name, param)
# Multiple parameters with multiple channels
if count > 1 and param is not None:
return "{} {} {}".format(name, channel, param)
def _hm_event_handler(hass, interface, device, caller, attribute, value):
"""Handle all pyhomematic device events."""
try:
channel = int(device.split(":")[1])
address = device.split(":")[0]
hmdevice = hass.data[DATA_HOMEMATIC].devices[interface].get(address)
except (TypeError, ValueError):
_LOGGER.error("Event handling channel convert error!")
return
# Return if not an event supported by device
if attribute not in hmdevice.EVENTNODE:
return
_LOGGER.debug("Event %s for %s channel %i", attribute,
hmdevice.NAME, channel)
# Keypress event
if attribute in HM_PRESS_EVENTS:
hass.bus.fire(EVENT_KEYPRESS, {
ATTR_NAME: hmdevice.NAME,
ATTR_PARAM: attribute,
ATTR_CHANNEL: channel
})
return
# Impulse event
if attribute in HM_IMPULSE_EVENTS:
hass.bus.fire(EVENT_IMPULSE, {
ATTR_NAME: hmdevice.NAME,
ATTR_CHANNEL: channel
})
return
_LOGGER.warning("Event is unknown and not forwarded")
def _device_from_servicecall(hass, service):
"""Extract HomeMatic device from service call."""
address = service.data.get(ATTR_ADDRESS)
interface = service.data.get(ATTR_INTERFACE)
if address == 'BIDCOS-RF':
address = 'BidCoS-RF'
if interface:
return hass.data[DATA_HOMEMATIC].devices[interface].get(address)
for devices in hass.data[DATA_HOMEMATIC].devices.values():
if address in devices:
return devices[address]
class HMHub(Entity):
"""The HomeMatic hub. (CCU2/HomeGear)."""
def __init__(self, hass, homematic, name):
"""Initialize HomeMatic hub."""
self.hass = hass
self.entity_id = "{}.{}".format(DOMAIN, name.lower())
self._homematic = homematic
self._variables = {}
self._name = name
self._state = None
# Load data
self.hass.helpers.event.track_time_interval(
self._update_hub, SCAN_INTERVAL_HUB)
self.hass.add_job(self._update_hub, None)
self.hass.helpers.event.track_time_interval(
self._update_variables, SCAN_INTERVAL_VARIABLES)
self.hass.add_job(self._update_variables, None)
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def should_poll(self):
"""Return false. HomeMatic Hub object updates variables."""
return False
@property
def state(self):
"""Return the state of the entity."""
return self._state
@property
def state_attributes(self):
"""Return the state attributes."""
attr = self._variables.copy()
return attr
@property
def icon(self):
"""Return the icon to use in the frontend, if any."""
return "mdi:gradient"
def _update_hub(self, now):
"""Retrieve latest state."""
service_message = self._homematic.getServiceMessages(self._name)
state = None if service_message is None else len(service_message)
# state have change?
if self._state != state:
self._state = state
self.schedule_update_ha_state()
def _update_variables(self, now):
"""Retrive all variable data and update hmvariable states."""
variables = self._homematic.getAllSystemVariables(self._name)
if variables is None:
return
state_change = False
for key, value in variables.items():
if key in self._variables and value == self._variables[key]:
continue
state_change = True
self._variables.update({key: value})
if state_change:
self.schedule_update_ha_state()
def hm_set_variable(self, name, value):
"""Set variable value on CCU/Homegear."""
if name not in self._variables:
_LOGGER.error("Variable %s not found on %s", name, self.name)
return
old_value = self._variables.get(name)
if isinstance(old_value, bool):
value = cv.boolean(value)
else:
value = float(value)
self._homematic.setSystemVariable(self.name, name, value)
self._variables.update({name: value})
self.schedule_update_ha_state()
class HMDevice(Entity):
"""The HomeMatic device base object."""
def __init__(self, config):
"""Initialize a generic HomeMatic device."""
self._name = config.get(ATTR_NAME)
self._address = config.get(ATTR_ADDRESS)
self._interface = config.get(ATTR_INTERFACE)
self._channel = config.get(ATTR_CHANNEL)
self._state = config.get(ATTR_PARAM)
self._data = {}
self._homematic = None
self._hmdevice = None
self._connected = False
self._available = False
# Set parameter to uppercase
if self._state:
self._state = self._state.upper()
@asyncio.coroutine
def async_added_to_hass(self):
"""Load data init callbacks."""
yield from self.hass.async_add_job(self.link_homematic)
@property
def should_poll(self):
"""Return false. HomeMatic states are pushed by the XML-RPC Server."""
return False
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def available(self):
"""Return true if device is available."""
return self._available
@property
def device_state_attributes(self):
"""Return device specific state attributes."""
attr = {}
# Generate a dictionary with attributes
for node, data in HM_ATTRIBUTE_SUPPORT.items():
# Is an attribute and exists for this object
if node in self._data:
value = data[1].get(self._data[node], self._data[node])
attr[data[0]] = value
# Static attributes
attr['id'] = self._hmdevice.ADDRESS
attr['interface'] = self._interface
return attr
def link_homematic(self):
"""Connect to HomeMatic."""
if self._connected:
return True
# Initialize
self._homematic = self.hass.data[DATA_HOMEMATIC]
self._hmdevice = \
self._homematic.devices[self._interface][self._address]
self._connected = True
try:
# Initialize datapoints of this object
self._init_data()
self._load_data_from_hm()
# Link events from pyhomematic
self._subscribe_homematic_events()
self._available = not self._hmdevice.UNREACH
# pylint: disable=broad-except
except Exception as err:
self._connected = False
_LOGGER.error("Exception while linking %s: %s",
self._address, str(err))
def _hm_event_callback(self, device, caller, attribute, value):
"""Handle all pyhomematic device events."""
_LOGGER.debug("%s received event '%s' value: %s", self._name,
attribute, value)
has_changed = False
# Is data needed for this instance?
if attribute in self._data:
# Did data change?
if self._data[attribute] != value:
self._data[attribute] = value
has_changed = True
# Availability has changed
if attribute == 'UNREACH':
self._available = bool(value)
has_changed = True
elif not self.available:
self._available = False
has_changed = True
# If it has changed data point, update HASS
if has_changed:
self.schedule_update_ha_state()
def _subscribe_homematic_events(self):
"""Subscribe all required events to handle job."""
channels_to_sub = set()
channels_to_sub.add(0) # Add channel 0 for UNREACH
# Push data to channels_to_sub from hmdevice metadata
for metadata in (self._hmdevice.SENSORNODE, self._hmdevice.BINARYNODE,
self._hmdevice.ATTRIBUTENODE,
self._hmdevice.WRITENODE, self._hmdevice.EVENTNODE,
self._hmdevice.ACTIONNODE):
for node, channels in metadata.items():
# Data is needed for this instance
if node in self._data:
# chan is current channel
if len(channels) == 1:
channel = channels[0]
else:
channel = self._channel
# Prepare for subscription
try:
channels_to_sub.add(int(channel))
except (ValueError, TypeError):
_LOGGER.error("Invalid channel in metadata from %s",
self._name)
# Set callbacks
for channel in channels_to_sub:
_LOGGER.debug(
"Subscribe channel %d from %s", channel, self._name)
self._hmdevice.setEventCallback(
callback=self._hm_event_callback, bequeath=False,
channel=channel)
def _load_data_from_hm(self):
"""Load first value from pyhomematic."""
if not self._connected:
return False
# Read data from pyhomematic
for metadata, funct in (
(self._hmdevice.ATTRIBUTENODE,
self._hmdevice.getAttributeData),
(self._hmdevice.WRITENODE, self._hmdevice.getWriteData),
(self._hmdevice.SENSORNODE, self._hmdevice.getSensorData),
(self._hmdevice.BINARYNODE, self._hmdevice.getBinaryData)):
for node in metadata:
if metadata[node] and node in self._data:
self._data[node] = funct(name=node, channel=self._channel)
return True
def _hm_set_state(self, value):
"""Set data to main datapoint."""
if self._state in self._data:
self._data[self._state] = value
def _hm_get_state(self):
"""Get data from main datapoint."""
if self._state in self._data:
return self._data[self._state]
return None
def _init_data(self):
"""Generate a data dict (self._data) from the HomeMatic metadata."""
# Add all attributes to data dictionary
for data_note in self._hmdevice.ATTRIBUTENODE:
self._data.update({data_note: STATE_UNKNOWN})
# Initialize device specific data
self._init_data_struct()
def _init_data_struct(self):
"""Generate a data dictionary from the HomeMatic device metadata."""
raise NotImplementedError
|
ewandor/home-assistant
|
homeassistant/components/homematic/__init__.py
|
Python
|
apache-2.0
| 29,766 | 0 |
# -*- coding: utf-8 -*-
#
# MothBall documentation build configuration file, created by
# sphinx-quickstart on Thu Jun 30 20:11:36 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('../src/'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
# extensions = []
extensions = ['sphinx.ext.autodoc']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
from recommonmark.parser import CommonMarkParser
source_suffix = ['.rst', '.md']
parsers = {
'.md': CommonMarkParser,
}
# source_suffix = '.rst'
# The encoding of source files.
#
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Watchmaker'
copyright = u'2016, Plus3IT'
author = u'Plus3IT'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'0.1'
# The full version, including alpha/beta/rc tags.
release = u'0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#
# today = ''
#
# Else, today_fmt is used as the format for a strftime call.
#
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
# html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents.
# "<project> v<release> documentation" by default.
#
# html_title = u'MothBall v0.1'
# A shorter title for the navigation bar. Default is the same as html_title.
#
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#
# html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#
# html_extra_path = []
# If not None, a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
# The empty string is equivalent to '%b %d, %Y'.
#
# html_last_updated_fmt = None
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#
# html_additional_pages = {}
# If false, no module index is generated.
#
# html_domain_indices = True
# If false, no index is generated.
#
# html_use_index = True
# If true, the index is split into individual pages for each letter.
#
# html_split_index = False
# If true, links to the reST sources are added to the pages.
#
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh'
#
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# 'ja' uses this config value.
# 'zh' user can custom change `jieba` dictionary path.
#
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'Watchmakerdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'Watchmaker.tex', u'Watchmaker Documentation',
u'Plus3IT', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#
# latex_use_parts = False
# If true, show page references after internal links.
#
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
#
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
#
# latex_appendices = []
# If false, no module index is generated.
#
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'watchmaker', u'Watchmaker Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'Watchmaker', u'Watchmaker Documentation',
author, 'Watchmaker', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#
# texinfo_appendices = []
# If false, no module index is generated.
#
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#
# texinfo_no_detailmenu = False
|
MarionTheBull/watchmaker
|
docs/conf.py
|
Python
|
apache-2.0
| 9,706 | 0.000206 |
from rsqueakvm.util.cells import QuasiConstant
from rsqueakvm.plugins.vmdebugging.model import wrap_oplist, wrap_greenkey, wrap_debug_info
from rpython.rlib.jit import JitHookInterface, Counters
jit_iface_recursion = QuasiConstant(False)
def make_hook(args, func):
import inspect, re
src = "\n".join([
re.sub("^\\s+", " " * 12, line) for line in inspect.getsource(func).split("\n")[1:]
])
code = [
"def f(%s):" % (args),
" from rsqueakvm import constants",
" from rsqueakvm.interpreter import jit_driver_name",
" from rsqueakvm.model.variable import W_BytesObject",
" if jitdriver.name != jit_driver_name: return",
" space = self.space",
" if jit_iface_recursion.is_set(): return",
" interp = space.interp.get()",
" w_jithook = space.w_jit_hook_selector()",
" w_rcvr = space.w_jit_hook_receiver()",
" if w_jithook and isinstance(w_jithook, W_BytesObject) and w_rcvr:",
" w_method = w_rcvr.class_shadow(space).lookup(w_jithook)",
" if w_method is None: return",
" jit_iface_recursion.activate()",
" try:",
" args_w = func(%s)" % args,
" interp.perform_headless(w_rcvr, w_jithook, [space.wrap_list(args_w)])",
" finally:",
" jit_iface_recursion.deactivate()"
]
d = {
"jit_iface_recursion": jit_iface_recursion,
"func": func,
}
exec compile("\n".join(code), __file__, 'exec') in d
return d["f"]
class JitIface(JitHookInterface):
def prepare_abort(self, reason, jitdriver, greenkey, greenkey_repr, logops, operations):
space = self.space
return [
space.wrap_string('abort'),
wrap_greenkey(space, jitdriver, greenkey, greenkey_repr),
space.wrap_string(Counters.counter_names[reason]),
wrap_oplist(space, logops, operations)]
on_abort = make_hook(
"self, reason, jitdriver, greenkey, greenkey_repr, logops, operations",
prepare_abort
)
def prepare_trace_too_long(self, jitdriver, greenkey, greenkey_repr):
space = self.space
return [
space.wrap_string('trace_too_long'),
wrap_greenkey(space, jitdriver, greenkey, greenkey_repr)]
on_trace_too_long = make_hook(
"self, jitdriver, greenkey, greenkey_repr",
prepare_trace_too_long
)
def prepare_compile_hook(self, jitdriver, debug_info, is_bridge):
space = self.space
return [
space.wrap_string('compile_loop' if not is_bridge else 'compile_bridge'),
wrap_debug_info(space, debug_info, is_bridge=is_bridge)]
wrapped_compiled_hook = make_hook(
"self, jitdriver, debug_info, is_bridge",
prepare_compile_hook
)
def _compile_hook(self, debug_info, is_bridge=False):
jitdriver = debug_info.get_jitdriver()
self.wrapped_compiled_hook(jitdriver, debug_info, is_bridge)
def after_compile(self, debug_info): self._compile_hook(debug_info, is_bridge=False)
def after_compile_bridge(self, debug_info): self._compile_hook(debug_info, is_bridge=True)
def before_compile(self, debug_info): pass
def before_compile_bridge(self, debug_info): pass
jitiface = JitIface()
|
HPI-SWA-Lab/RSqueak
|
rsqueakvm/plugins/vmdebugging/hooks.py
|
Python
|
bsd-3-clause
| 3,381 | 0.002958 |
# Copyright 2014: Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
SLA (Service-level agreement) is set of details for determining compliance
with contracted values such as maximum error rate or minimum response time.
"""
from rally.common.i18n import _
from rally.common import streaming_algorithms
from rally import consts
from rally.task import sla
@sla.configure(name="outliers")
class Outliers(sla.SLA):
"""Limit the number of outliers (iterations that take too much time).
The outliers are detected automatically using the computation of the mean
and standard deviation (std) of the data.
"""
CONFIG_SCHEMA = {
"type": "object",
"$schema": consts.JSON_SCHEMA,
"properties": {
"max": {"type": "integer", "minimum": 0},
"min_iterations": {"type": "integer", "minimum": 3},
"sigmas": {"type": "number", "minimum": 0.0,
"exclusiveMinimum": True}
}
}
def __init__(self, criterion_value):
super(Outliers, self).__init__(criterion_value)
self.max_outliers = self.criterion_value.get("max", 0)
# NOTE(msdubov): Having 3 as default is reasonable (need enough data).
self.min_iterations = self.criterion_value.get("min_iterations", 3)
self.sigmas = self.criterion_value.get("sigmas", 3.0)
self.iterations = 0
self.outliers = 0
self.threshold = None
self.mean_comp = streaming_algorithms.MeanComputation()
self.std_comp = streaming_algorithms.StdDevComputation()
def add_iteration(self, iteration):
# NOTE(ikhudoshyn): This method can not be implemented properly.
# After adding a new iteration, both mean and standard deviation
# may change. Hence threshold will change as well. In this case we
# should again compare durations of all accounted iterations
# to the threshold. Unfortunately we can not do it since
# we do not store durations.
# Implementation provided here only gives rough approximation
# of outliers number.
if not iteration.get("error"):
duration = iteration["duration"]
self.iterations += 1
# NOTE(msdubov): First check if the current iteration is an outlier
if ((self.iterations >= self.min_iterations and self.threshold and
duration > self.threshold)):
self.outliers += 1
# NOTE(msdubov): Then update the threshold value
self.mean_comp.add(duration)
self.std_comp.add(duration)
if self.iterations >= 2:
mean = self.mean_comp.result()
std = self.std_comp.result()
self.threshold = mean + self.sigmas * std
self.success = self.outliers <= self.max_outliers
return self.success
def merge(self, other):
# NOTE(ikhudoshyn): This method can not be implemented properly.
# After merge, both mean and standard deviation may change.
# Hence threshold will change as well. In this case we
# should again compare durations of all accounted iterations
# to the threshold. Unfortunately we can not do it since
# we do not store durations.
# Implementation provided here only gives rough approximation
# of outliers number.
self.iterations += other.iterations
self.outliers += other.outliers
self.mean_comp.merge(other.mean_comp)
self.std_comp.merge(other.std_comp)
if self.iterations >= 2:
mean = self.mean_comp.result()
std = self.std_comp.result()
self.threshold = mean + self.sigmas * std
self.success = self.outliers <= self.max_outliers
return self.success
def details(self):
return (_("Maximum number of outliers %i <= %i - %s") %
(self.outliers, self.max_outliers, self.status()))
|
gluke77/rally
|
rally/plugins/common/sla/outliers.py
|
Python
|
apache-2.0
| 4,530 | 0 |
"""
Django settings for sample project.
Generated by 'django-admin startproject' using Django 1.10.1.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
from warnings import warn
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '$^yawh-48g!2@mq5!bfj3pq0r%ld+xyr+zlpm_q@5k(4$ur1v2'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'sample_app',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'sample_project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')]
,
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'sample_project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
# FIXME: Testes com um ou multiplos dbs.
DATABASES = {
'default': {
'ENGINE': 'arangodb_driver',
'HOST': 'localhost',
'PORT': '8529',
'NAME': 'teste_python',
'USER': 'root',
'PASSWORD': 'omoomo',
}
}
# DATABASES = {
# 'default': {
# 'ENGINE': 'django.db.backends.sqlite3',
# 'NAME': 'defaultbd',
# },
# 'arangodb': {
# 'ENGINE': 'arangodb_driver',
# 'HOST': 'localhost',
# 'PORT': '8529',
# 'NAME': 'teste_python',
# 'USER': 'root',
# 'PASSWORD': 'omoomo',
# }
#
# }
# DATABASE_ROUTERS = ['arangodb_driver.router.GraphRouter']
# ARANGODB: Map model types to database names.
# If not defined, "default" maps to "default".
DB_ROUTES = {'graph': 'arangodb'}
# ARANGODB: The name of the property in the model that defines the type of the model (default: 'model_type').
DB_ROUTES_MODEL_TYPE_PROPERTY = 'model_type' # type: str
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
|
pablotcarreira/django-arangodb
|
sample_project/settings.py
|
Python
|
apache-2.0
| 4,023 | 0.001491 |
import tempfile
import nox
nox.options.sessions = "lint", "tests"
locations = "src", "tests", "noxfile.py"
def install_with_constraints(session, *args, **kwargs):
with tempfile.NamedTemporaryFile() as requirements:
session.run(
"poetry",
"export",
"--dev",
"--without-hashes",
"--format=requirements.txt",
f"--output={requirements.name}",
external=True,
)
session.install(f"--constraint={requirements.name}", *args, **kwargs)
@nox.session(python="3.9")
def black(session):
args = session.posargs or locations
install_with_constraints(
session,
"black",
)
session.run("black", *args)
@nox.session(python=["3.9", "3.8"])
def lint(session):
args = session.posargs or locations
install_with_constraints(
session,
"flake8",
"flake8-bandit",
"flake8-black",
"flake8-bugbear",
"flake8-import-order",
)
session.run("flake8", *args)
@nox.session(python="3.9")
def safety(session):
with tempfile.NamedTemporaryFile() as requirements:
session.run(
"poetry",
"export",
"--dev",
"--format=requirements.txt",
"--without-hashes",
f"--output={requirements.name}",
external=True,
)
install_with_constraints(
session,
"safety",
)
session.run("safety", "check", f"--file={requirements.name}", "--full-report")
@nox.session(python=["3.9", "3.8"])
def tests(session):
args = session.posargs or ["--cov", "-m", "not e2e"]
session.run("poetry", "install", "--no-dev", external=True)
install_with_constraints(
session,
"coverage[toml]",
"pytest",
"pytest-cov",
"pytest-mock",
"testfixtures",
)
session.run("pytest", *args)
|
yerejm/ttt
|
noxfile.py
|
Python
|
isc
| 1,940 | 0.000515 |
from xsvm import instructions
from tabulate import tabulate
class Memory:
def __init__(self):
self.memory_storage = {}
self.labels_map = {}
def set(self, address, value):
self.memory_storage[address] = value
def set_label(self, label, address):
self.labels_map[label] = address
def get(self, address):
return self.memory_storage.get(address, 0)
def resolve_label(self, label):
if label not in self.labels_map:
raise RuntimeError("Could not resolve label {l}".format(l=label))
return self.labels_map[label]
class RegisterBank:
available_registers = [
"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15"
]
aliases = {"sp": "r13", "lr": "r14", "pc": "r15"}
def __init__(self):
self.registers = {}
for register in RegisterBank.available_registers:
self.registers[register] = 0
def get(self, register):
register = RegisterBank._resolve_alias(register)
RegisterBank._validate_register_name(register)
return self.registers[register]
def set(self, register, value):
register =RegisterBank._resolve_alias(register)
RegisterBank._validate_register_name(register)
self.registers[register] = value
def dump_content(self):
table_content = []
for i in range(0, 16, 2):
reg_left = "r{r}".format(r=i)
reg_right = "r{r}".format(r=i+1)
table_content.append([reg_left, self.get(reg_left), reg_right, self.get(reg_right)])
return tabulate(table_content, tablefmt="fancy_grid")
@staticmethod
def _resolve_alias(register):
if register in RegisterBank.aliases:
register = RegisterBank.aliases[register]
return register
@staticmethod
def _validate_register_name(register):
if register not in RegisterBank.available_registers:
raise AttributeError("{r} is an invalid register".format(r=register))
class Processor:
def __init__(self, debug=False):
self.register_bank = RegisterBank()
self.memory = Memory()
self.instructions_executed = 0
self.halted = False
self.comparison_register = 0
self.register_bank.set("sp", 0xFFFFFF)
self.instructions_executed_grouped = {}
self.debug = debug
def fetch_instruction(self):
pc = self.register_bank.get("pc")
instruction = self.memory.get(pc)
if not isinstance(instruction, instructions.Instruction):
raise RuntimeError("No instruction located at {addr}".format(addr=pc))
if self.debug:
print("Executing {i} from {a}".format(i=instruction.original_instruction, a=pc))
self.register_bank.set("pc", pc + 1)
return instruction
def execute_instruction(self, instruction):
if self.halted:
return
executable_name = "exec_" + instruction.mnemonic
executable = getattr(instructions, executable_name)
executable(self, instruction)
self.instructions_executed += 1
if instruction.mnemonic not in self.instructions_executed_grouped:
self.instructions_executed_grouped[instruction.mnemonic] = 0
self.instructions_executed_grouped[instruction.mnemonic] += 1
if self.debug:
print("Register bank after executing the instruction:")
print(self.register_bank.dump_content())
def halt(self):
self.halted = True
def step(self):
self.execute_instruction(self.fetch_instruction())
def execute_until_halted(self, instructions_limit=None):
while not self.halted:
if instructions_limit is not None and self.instructions_executed == instructions_limit:
break
self.step()
def dump_instructions_executed_grouped(self):
keys = list(self.instructions_executed_grouped.keys())
table_contents = []
for i in range(0, len(keys) + 1, 2):
table_row = []
if len(keys) > i:
mnemonic_left = keys[i]
count_left = self.instructions_executed_grouped[mnemonic_left]
table_row.append(mnemonic_left)
table_row.append(count_left)
if len(keys) > i + 1:
mnemonic_right = keys[i+1]
count_right = self.instructions_executed_grouped[mnemonic_right]
table_row.append(mnemonic_right)
table_row.append(count_right)
if len(table_row) > 0:
table_contents.append(table_row)
if len(table_contents) > 1:
headers = ["Instruction", "Count", "Instruction", "Count"]
else:
headers = ["Instruction", "Count"]
return tabulate(table_contents, headers=headers, tablefmt="fancy_grid")
|
GedRap/xs-vm
|
xsvm/vm.py
|
Python
|
mit
| 4,947 | 0.001819 |
# ===============================================================================
# Copyright (C) 2010 Diego Duclos
#
# This file is part of eos.
#
# eos is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# eos is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with eos. If not, see <http://www.gnu.org/licenses/>.
# ===============================================================================
from sqlalchemy import Column, String, Integer, Boolean, ForeignKey, Table, Float
from sqlalchemy.ext.associationproxy import association_proxy
from sqlalchemy.orm import relation, mapper, synonym, deferred
from sqlalchemy.orm.collections import attribute_mapped_collection
from eos.db import gamedata_meta
from eos.types import Icon, Attribute, Item, Effect, MetaType, Group, Traits
items_table = Table("invtypes", gamedata_meta,
Column("typeID", Integer, primary_key=True),
Column("typeName", String, index=True),
Column("description", String),
Column("raceID", Integer),
Column("factionID", Integer),
Column("volume", Float),
Column("mass", Float),
Column("capacity", Float),
Column("published", Boolean),
Column("marketGroupID", Integer, ForeignKey("invmarketgroups.marketGroupID")),
Column("iconID", Integer, ForeignKey("icons.iconID")),
Column("groupID", Integer, ForeignKey("invgroups.groupID"), index=True))
from .metaGroup import metatypes_table # noqa
from .traits import traits_table # noqa
mapper(Item, items_table,
properties={"group": relation(Group, backref="items"),
"icon": relation(Icon),
"_Item__attributes": relation(Attribute, collection_class=attribute_mapped_collection('name')),
"effects": relation(Effect, collection_class=attribute_mapped_collection('name')),
"metaGroup": relation(MetaType,
primaryjoin=metatypes_table.c.typeID == items_table.c.typeID,
uselist=False),
"ID": synonym("typeID"),
"name": synonym("typeName"),
"description": deferred(items_table.c.description),
"traits": relation(Traits,
primaryjoin=traits_table.c.typeID == items_table.c.typeID,
uselist=False)
})
Item.category = association_proxy("group", "category")
|
Ebag333/Pyfa
|
eos/db/gamedata/item.py
|
Python
|
gpl-3.0
| 3,101 | 0.002902 |
"""Utility functions."""
import numpy as np
def vec(X, order='F'):
"""Returns the vectorization of X. Columns of X are stacked. (The opposite of X.flatten())."""
assert X.ndim == 2, 'vec operator requires a matrix.'
return X.flatten(order=order)
def from_vec(x, m, n, order='F'):
return x.reshape(m, n, order=order)
def vech(X, order='F'):
"""Returns vectorization of lower triangle of X in column major order by default."""
assert X.ndim == 2, 'vech operator requires a matrix.'
m, n = X.shape
if order == 'F':
idx = np.where(1 - np.tri(n,m, -1, dtype=int))
return X.T[idx]
elif order == 'C':
i,j = np.where(np.tri(m,n, dtype=int))
else:
raise Exception("Only order C and F are allowed")
return X[i,j]
def from_vech(x, m, n, order='F', sym = False):
X = np.zeros((m,n), dtype = x.dtype)
if order == 'F':
idx = np.where(1 - np.tri(n,m,-1, dtype=int))
X.T[idx] = x
elif order == 'C':
idx = np.where(np.tri(m,n, dtype=int))
X[idx] = x
else:
raise Exception("Only C and F ordering allowed.")
if sym:
if m != n:
raise Exception("Can only do this for square matrices.")
X = X + X.T - np.diag(np.diag(X))
return X
def vc(a, b):
"""vc operator from Brand 2005
"""
assert a.ndim == 1 and b.ndim == 1 and a.size == b.size
a = a[:,np.newaxis]
b = b[:,np.newaxis]
return vech(np.dot(a, b.T) + np.dot(b, a.T) - np.diag((a*b).flat))
def hat_operator(omega):
F = omega.shape[0]
# Create the hat matrix.
OH = np.zeros((F,3,3))
o1, o2, o3 = omega.T
OH[:,0,1] = -o3
OH[:,0,2] = o2
OH[:,1,0] = o3
OH[:,1,2] = -o1
OH[:,2,0] = -o2
OH[:,2,1] = o1
return OH
def axis_angle_to_Rs(omega, theta):
F = theta.shape[0]
# Calculate omega hat.
omega_hat = hat_operator(omega)
# Use Rodriguez' formula
Rs = np.zeros((F,3,3))
# Term 1.
Rs += np.eye(3)[np.newaxis,...]
# Term 2.
Rs += np.sin(theta)[:,np.newaxis,np.newaxis] * omega_hat
# Term 3.
tmp = omega[:,:,np.newaxis] * omega[:,np.newaxis,:] - np.eye(3)[np.newaxis,...]
Rs += (1 - np.cos(theta))[:,np.newaxis,np.newaxis] * tmp
return Rs
def rotvecs_to_Rs(omega):
"""
omega - Fx3 rotation vectors.
Returns a Fx3x3 Rs tensor.
"""
# Make sure, that we don't modify the original vector.
omega = omega.copy()
# Calculate the norm of the vectors.
theta = norm(omega, axis=1)
# When theta is zero, the rotation will be the identity
# and 2pi is also the identity.
theta[theta==0] = np.pi*2
# This allows us to normalize without getting NaNs
omega /= theta[:,np.newaxis]
# Now use Rodrigues' formula to calculate
Rs = axis_angle_to_Rs(omega, theta)
return Rs
def norm(x, axis= -1):
return np.sqrt((x * x).sum(axis=axis))
def rms(x, axis=None):
return np.sqrt(np.mean(np.square(x), axis=axis))
def normed(x, axis = -1):
if axis < -x.ndim or axis >= x.ndim:
raise ValueError("axis(=%d) out of bounds" % axis)
if axis < 0:
axis += x.ndim
shape = list(x.shape)
shape[axis] = 1
shape = tuple(shape)
return x / norm(x,axis=axis).reshape(shape)
|
jtaylor/pysfm
|
sfm/util.py
|
Python
|
bsd-3-clause
| 3,379 | 0.025155 |
"""
Test more expression command sequences with objective-c.
"""
from __future__ import print_function
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
@skipUnlessDarwin
class FoundationTestCaseString(TestBase):
mydir = TestBase.compute_mydir(__file__)
def test_NSString_expr_commands(self):
"""Test expression commands for NSString."""
self.build()
self.target, process, thread, bkpt = lldbutil.run_to_source_breakpoint(
self, '// Break here for NSString tests',
lldb.SBFileSpec('main.m', False))
# Test_NSString:
self.runCmd("thread backtrace")
self.expect("expression (int)[str length]",
patterns=["\(int\) \$.* ="])
self.expect("expression (int)[str_id length]",
patterns=["\(int\) \$.* ="])
self.expect("expression (id)[str description]",
patterns=["\(id\) \$.* = 0x"])
self.expect("expression (id)[str_id description]",
patterns=["\(id\) \$.* = 0x"])
self.expect("expression str.length")
self.expect('expression str = @"new"')
self.runCmd("image lookup -t NSString")
self.expect('expression str = (id)[NSString stringWithCString: "new"]')
self.runCmd("process continue")
@expectedFailureAll(archs=["i[3-6]86"], bugnumber="<rdar://problem/28814052>")
def test_MyString_dump_with_runtime(self):
"""Test dump of a known Objective-C object by dereferencing it."""
self.build()
self.target, process, thread, bkpt = lldbutil.run_to_source_breakpoint(
self, '// Set break point at this line',
lldb.SBFileSpec('main.m', False))
self.expect(
"expression --show-types -- *my",
patterns=[
"\(MyString\) \$.* = ",
"\(MyBase\)"])
self.runCmd("process continue")
|
llvm-mirror/lldb
|
packages/Python/lldbsuite/test/lang/objc/foundation/TestObjCMethodsString.py
|
Python
|
apache-2.0
| 2,009 | 0.00896 |
from __future__ import division
from time import sleep, time
from threading import Event
from collections import deque
from RPi import GPIO
from w1thermsensor import W1ThermSensor
from .devices import GPIODeviceError, GPIODevice, GPIOThread
class InputDeviceError(GPIODeviceError):
pass
class InputDevice(GPIODevice):
def __init__(self, pin=None, pull_up=True):
super(InputDevice, self).__init__(pin)
self._pull_up = pull_up
self._edge = (GPIO.RISING, GPIO.FALLING)[pull_up]
if pull_up:
self._active_state = 0
self._inactive_state = 1
GPIO.setup(pin, GPIO.IN, (GPIO.PUD_DOWN, GPIO.PUD_UP)[pull_up])
@property
def pull_up(self):
return self._pull_up
def wait_for_input(self):
GPIO.wait_for_edge(self.pin, self._edge)
def add_callback(self, callback=None, bouncetime=1000):
if callback is None:
raise InputDeviceError('No callback function given')
GPIO.add_event_detect(self.pin, self._edge, callback, bouncetime)
def remove_callback(self):
GPIO.remove_event_detect(self.pin)
class Button(InputDevice):
pass
class MotionSensor(InputDevice):
def __init__(
self, pin=None, queue_len=5, sample_rate=10, threshold=0.5,
partial=False):
super(MotionSensor, self).__init__(pin, pull_up=False)
if queue_len < 1:
raise InputDeviceError('queue_len must be at least one')
self.sample_rate = sample_rate
self.threshold = threshold
self.partial = partial
self._queue = deque(maxlen=queue_len)
self._queue_full = Event()
self._queue_thread = GPIOThread(target=self._fill_queue)
self._queue_thread.start()
@property
def queue_len(self):
return self._queue.maxlen
@property
def value(self):
if not self.partial:
self._queue_full.wait()
try:
return sum(self._queue) / len(self._queue)
except ZeroDivisionError:
# No data == no motion
return 0.0
@property
def motion_detected(self):
return self.value > self.threshold
def _get_sample_rate(self):
return self._sample_rate
def _set_sample_rate(self, value):
if value <= 0:
raise InputDeviceError('sample_rate must be greater than zero')
self._sample_rate = value
sample_rate = property(_get_sample_rate, _set_sample_rate)
def _get_threshold(self):
return self._threshold
def _set_threshold(self, value):
if value < 0:
raise InputDeviceError('threshold must be zero or more')
self._threshold = value
threshold = property(_get_threshold, _set_threshold)
def _fill_queue(self):
while (
not self._queue_thread.stopping.wait(1 / self.sample_rate) and
len(self._queue) < self._queue.maxlen
):
self._queue.append(self.is_active)
self._queue_full.set()
while not self._queue_thread.stopping.wait(1 / self.sample_rate):
self._queue.append(self.is_active)
class LightSensor(InputDevice):
def __init__(
self, pin=None, queue_len=5, darkness_time=0.01,
threshold=0.1, partial=False):
super(LightSensor, self).__init__(pin, pull_up=False)
if queue_len < 1:
raise InputDeviceError('queue_len must be at least one')
self.darkness_time = darkness_time
self.threshold = threshold
self.partial = partial
self._charged = Event()
GPIO.add_event_detect(self.pin, GPIO.RISING, lambda channel: self._charged.set())
self._queue = deque(maxlen=queue_len)
self._queue_full = Event()
self._queue_thread = GPIOThread(target=self._fill_queue)
self._queue_thread.start()
@property
def queue_len(self):
return self._queue.maxlen
@property
def value(self):
if not self.partial:
self._queue_full.wait()
try:
return 1.0 - (sum(self._queue) / len(self._queue)) / self.darkness_time
except ZeroDivisionError:
# No data == no light
return 0.0
@property
def light_detected(self):
return self.value > self.threshold
def _get_darkness_time(self):
return self._darkness_time
def _set_darkness_time(self, value):
if value <= 0.0:
raise InputDeviceError('darkness_time must be greater than zero')
self._darkness_time = value
# XXX Empty the queue and restart the thread
darkness_time = property(_get_darkness_time, _set_darkness_time)
def _get_threshold(self):
return self._threshold
def _set_threshold(self, value):
if value < 0:
raise InputDeviceError('threshold must be zero or more')
self._threshold = value
threshold = property(_get_threshold, _set_threshold)
def _fill_queue(self):
try:
while (
not self._queue_thread.stopping.is_set() and
len(self._queue) < self._queue.maxlen
):
self._queue.append(self._time_charging())
self._queue_full.set()
while not self._queue_thread.stopping.is_set():
self._queue.append(self._time_charging())
finally:
GPIO.remove_event_detect(self.pin)
def _time_charging(self):
# Drain charge from the capacitor
GPIO.setup(self.pin, GPIO.OUT)
GPIO.output(self.pin, GPIO.LOW)
sleep(0.1)
# Time the charging of the capacitor
start = time()
self._charged.clear()
GPIO.setup(self.pin, GPIO.IN)
self._charged.wait(self.darkness_time)
return min(self.darkness_time, time() - start)
class TemperatureSensor(W1ThermSensor):
@property
def value(self):
return self.get_temperature()
|
agiledata/python-gpiozero
|
gpiozero/input_devices.py
|
Python
|
bsd-3-clause
| 5,990 | 0.001503 |
from __future__ import unicode_literals
from django.test import TestCase
from django.utils import six
from .models import Person, Book, Car, PersonManager, PublishedBookManager
class CustomManagerTests(TestCase):
def setUp(self):
self.b1 = Book.published_objects.create(
title="How to program", author="Rodney Dangerfield", is_published=True)
self.b2 = Book.published_objects.create(
title="How to be smart", author="Albert Einstein", is_published=False)
self.p1 = Person.objects.create(first_name="Bugs", last_name="Bunny", fun=True)
self.p2 = Person.objects.create(first_name="Droopy", last_name="Dog", fun=False)
def test_manager(self):
# Test a custom `Manager` method.
self.assertQuerysetEqual(
Person.objects.get_fun_people(), [
"Bugs Bunny"
],
six.text_type
)
# Test that the methods of a custom `QuerySet` are properly
# copied onto the default `Manager`.
for manager in ['custom_queryset_default_manager',
'custom_queryset_custom_manager']:
manager = getattr(Person, manager)
# Copy public methods.
manager.public_method()
# Don't copy private methods.
with self.assertRaises(AttributeError):
manager._private_method()
# Copy methods with `manager=True` even if they are private.
manager._optin_private_method()
# Don't copy methods with `manager=False` even if they are public.
with self.assertRaises(AttributeError):
manager.optout_public_method()
# Test that the overridden method is called.
queryset = manager.filter()
self.assertQuerysetEqual(queryset, ["Bugs Bunny"], six.text_type)
self.assertEqual(queryset._filter_CustomQuerySet, True)
# Test that specialized querysets inherit from our custom queryset.
queryset = manager.values_list('first_name', flat=True).filter()
self.assertEqual(list(queryset), [six.text_type("Bugs")])
self.assertEqual(queryset._filter_CustomQuerySet, True)
# Test that the custom manager `__init__()` argument has been set.
self.assertEqual(Person.custom_queryset_custom_manager.init_arg, 'hello')
# Test that the custom manager method is only available on the manager.
Person.custom_queryset_custom_manager.manager_only()
with self.assertRaises(AttributeError):
Person.custom_queryset_custom_manager.all().manager_only()
# Test that the queryset method doesn't override the custom manager method.
queryset = Person.custom_queryset_custom_manager.filter()
self.assertQuerysetEqual(queryset, ["Bugs Bunny"], six.text_type)
self.assertEqual(queryset._filter_CustomManager, True)
# The RelatedManager used on the 'books' descriptor extends the default
# manager
self.assertIsInstance(self.p2.books, PublishedBookManager)
# The default manager, "objects", doesn't exist, because a custom one
# was provided.
self.assertRaises(AttributeError, lambda: Book.objects)
# The RelatedManager used on the 'authors' descriptor extends the
# default manager
self.assertIsInstance(self.b2.authors, PersonManager)
self.assertQuerysetEqual(
Book.published_objects.all(), [
"How to program",
],
lambda b: b.title
)
Car.cars.create(name="Corvette", mileage=21, top_speed=180)
Car.cars.create(name="Neon", mileage=31, top_speed=100)
self.assertQuerysetEqual(
Car.cars.order_by("name"), [
"Corvette",
"Neon",
],
lambda c: c.name
)
self.assertQuerysetEqual(
Car.fast_cars.all(), [
"Corvette",
],
lambda c: c.name
)
# Each model class gets a "_default_manager" attribute, which is a
# reference to the first manager defined in the class. In this case,
# it's "cars".
self.assertQuerysetEqual(
Car._default_manager.order_by("name"), [
"Corvette",
"Neon",
],
lambda c: c.name
)
def test_related_manager_fk(self):
self.p1.favorite_book = self.b1
self.p1.save()
self.p2.favorite_book = self.b1
self.p2.save()
self.assertQuerysetEqual(
self.b1.favorite_books.order_by('first_name').all(), [
"Bugs",
"Droopy",
],
lambda c: c.first_name
)
self.assertQuerysetEqual(
self.b1.favorite_books(manager='boring_people').all(), [
"Droopy",
],
lambda c: c.first_name
)
self.assertQuerysetEqual(
self.b1.favorite_books(manager='fun_people').all(), [
"Bugs",
],
lambda c: c.first_name
)
def test_related_manager_gfk(self):
self.p1.favorite_thing = self.b1
self.p1.save()
self.p2.favorite_thing = self.b1
self.p2.save()
self.assertQuerysetEqual(
self.b1.favorite_things.order_by('first_name').all(), [
"Bugs",
"Droopy",
],
lambda c: c.first_name
)
self.assertQuerysetEqual(
self.b1.favorite_things(manager='boring_people').all(), [
"Droopy",
],
lambda c: c.first_name
)
self.assertQuerysetEqual(
self.b1.favorite_things(manager='fun_people').all(), [
"Bugs",
],
lambda c: c.first_name
)
def test_related_manager_m2m(self):
self.b1.authors.add(self.p1)
self.b1.authors.add(self.p2)
self.assertQuerysetEqual(
self.b1.authors.order_by('first_name').all(), [
"Bugs",
"Droopy",
],
lambda c: c.first_name
)
self.assertQuerysetEqual(
self.b1.authors(manager='boring_people').all(), [
"Droopy",
],
lambda c: c.first_name
)
self.assertQuerysetEqual(
self.b1.authors(manager='fun_people').all(), [
"Bugs",
],
lambda c: c.first_name
)
|
ericholscher/django
|
tests/custom_managers/tests.py
|
Python
|
bsd-3-clause
| 6,629 | 0.000905 |
# *****************************************************************************
# conduct - CONvenient Construction Tool
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; either version 2 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# Module authors:
# Alexander Lenz <alexander.lenz@posteo.de>
# Georg Brandl <georg@python.org>
#
# *****************************************************************************
import os
import sys
import time
import linecache
import traceback
import logging
from os import path
from logging import Logger, Formatter, Handler, DEBUG, INFO, WARNING, ERROR
from conduct import colors
LOGFMT = '%(asctime)s : %(levelname)-7s : %(name)-25s: %(message)s'
DATEFMT = '%H:%M:%S'
DATESTAMP_FMT = '%Y-%m-%d'
SECONDS_PER_DAY = 60 * 60 * 24
LOGLEVELS = {'debug': DEBUG, 'info': INFO, 'warning': WARNING, 'error': ERROR}
INVLOGLEVELS = {value : key for key, value in LOGLEVELS.items()}
class ConductLogger(Logger):
maxLogNameLength = 0
def __init__(self, *args, **kwargs):
Logger.__init__(self, *args, **kwargs)
ConductLogger._storeLoggerNameLength(self)
def getChild(self, suffix, ownDir=False):
child = Logger.getChild(self, suffix)
child.setLevel(self.getEffectiveLevel())
if ownDir:
for handler in self._collectHandlers():
if isinstance(handler, LogfileHandler):
handler = handler.getChild(suffix)
child.addHandler(handler)
child.propagate = False
return child
def _collectHandlers(self):
result = []
log = self
while log is not None:
result += log.handlers
log = log.parent
return result
@staticmethod
def _storeLoggerNameLength(logObj):
# store max logger name length for formatting
if len(logObj.name) > ConductLogger.maxLogNameLength:
ConductLogger.maxLogNameLength = len(logObj.name)
class ConsoleFormatter(Formatter):
"""
A lightweight formatter for the interactive console, with optional
colored output.
"""
def __init__(self, fmt=None, datefmt=None, colorize=None):
Formatter.__init__(self, fmt, datefmt)
if colorize:
self.colorize = colorize
else:
self.colorize = lambda c, s: s
def formatException(self, exc_info):
return traceback.format_exception_only(*exc_info[0:2])[-1]
def formatTime(self, record, datefmt=None):
return time.strftime(datefmt or DATEFMT,
self.converter(record.created))
def format(self, record):
record.message = record.getMessage()
levelno = record.levelno
datefmt = self.colorize('lightgray', '[%(asctime)s] ')
namefmt = '%(name)-' + str(ConductLogger.maxLogNameLength) + 's: '
if levelno <= DEBUG:
fmtstr = self.colorize('darkgray', '%s%%(message)s' % namefmt)
elif levelno <= INFO:
fmtstr = '%s%%(message)s' % namefmt
elif levelno <= WARNING:
fmtstr = self.colorize('fuchsia', '%s%%(levelname)s: %%(message)s'
% namefmt)
else:
# Add exception type to error (if caused by exception)
msgPrefix = ''
if record.exc_info:
msgPrefix = '%s: ' % record.exc_info[0].__name__
fmtstr = self.colorize('red', '%s%%(levelname)s: %s%%(message)s'
% (namefmt, msgPrefix))
fmtstr = datefmt + fmtstr
if not getattr(record, 'nonl', False):
fmtstr += '\n'
record.asctime = self.formatTime(record, self.datefmt)
s = fmtstr % record.__dict__
# never output more exception info -- the exception message is already
# part of the log message because of our special logger behavior
# if record.exc_info:
# # *not* caching exception text on the record, since it's
# # only a short version
# s += self.formatException(record.exc_info)
return s
def format_extended_frame(frame):
ret = []
for key, value in frame.f_locals.items():
try:
valstr = repr(value)[:256]
except Exception:
valstr = '<cannot be displayed>'
ret.append(' %-20s = %s\n' % (key, valstr))
ret.append('\n')
return ret
def format_extended_traceback(etype, value, tb):
ret = ['Traceback (most recent call last):\n']
while tb is not None:
frame = tb.tb_frame
filename = frame.f_code.co_filename
item = ' File "%s", line %d, in %s\n' % (filename, tb.tb_lineno,
frame.f_code.co_name)
linecache.checkcache(filename)
line = linecache.getline(filename, tb.tb_lineno, frame.f_globals)
if line:
item = item + ' %s\n' % line.strip()
ret.append(item)
if filename != '<script>':
ret += format_extended_frame(tb.tb_frame)
tb = tb.tb_next
ret += traceback.format_exception_only(etype, value)
return ''.join(ret).rstrip('\n')
class LogfileFormatter(Formatter):
"""
The standard Formatter does not support milliseconds with an explicit
datestamp format. It also doesn't show the full traceback for exceptions.
"""
extended_traceback = True
def formatException(self, ei):
if self.extended_traceback:
s = format_extended_traceback(*ei)
else:
s = ''.join(traceback.format_exception(ei[0], ei[1], ei[2],
sys.maxsize))
if s.endswith('\n'):
s = s[:-1]
return s
def formatTime(self, record, datefmt=None):
res = time.strftime(DATEFMT, self.converter(record.created))
res += ',%03d' % record.msecs
return res
class StreamHandler(Handler):
"""Reimplemented from logging: remove cruft, remove bare excepts."""
def __init__(self, stream=None):
Handler.__init__(self)
if stream is None:
stream = sys.stderr
self.stream = stream
def flush(self):
self.acquire()
try:
if self.stream and hasattr(self.stream, 'flush'):
self.stream.flush()
finally:
self.release()
def emit(self, record):
try:
msg = self.format(record)
try:
self.stream.write('%s\n' % msg)
except UnicodeEncodeError:
self.stream.write('%s\n' % msg.encode('utf-8'))
self.flush()
except Exception:
self.handleError(record)
class LogfileHandler(StreamHandler):
"""
Logs to log files with a date stamp appended, and rollover on midnight.
"""
def __init__(self, directory, filenameprefix, dayfmt=DATESTAMP_FMT):
self._directory = path.join(directory, filenameprefix)
if not path.isdir(self._directory):
os.makedirs(self._directory)
self._currentsymlink = path.join(self._directory, 'current')
self._filenameprefix = filenameprefix
self._pathnameprefix = path.join(self._directory, filenameprefix)
self._dayfmt = dayfmt
# today's logfile name
basefn = self._pathnameprefix + '-' + time.strftime(dayfmt) + '.log'
self.baseFilename = path.abspath(basefn)
self.mode = 'a'
StreamHandler.__init__(self, self._open())
# determine time of first midnight from now on
t = time.localtime()
self.rollover_at = time.mktime((t[0], t[1], t[2], 0, 0, 0,
t[6], t[7], t[8])) + SECONDS_PER_DAY
self.setFormatter(LogfileFormatter(LOGFMT, DATEFMT))
self.disabled = False
def getChild(self, name):
return LogfileHandler(self._directory, name)
def filter(self, record):
return not self.disabled
def emit(self, record):
try:
t = int(time.time())
if t >= self.rollover_at:
self.doRollover()
if self.stream is None:
self.stream = self._open()
StreamHandler.emit(self, record)
except Exception:
self.handleError(record)
def enable(self, enabled):
if enabled:
self.disabled = False
self.stream.close()
self.stream = self._open()
else:
self.disabled = True
def close(self):
self.acquire()
try:
if self.stream:
self.flush()
if hasattr(self.stream, 'close'):
self.stream.close()
StreamHandler.close(self)
self.stream = None
finally:
self.release()
def doRollover(self):
self.stream.close()
self.baseFilename = self._pathnameprefix + '-' + \
time.strftime(self._dayfmt) + '.log'
self.stream = self._open()
self.rollover_at += SECONDS_PER_DAY
def _open(self):
# update 'current' symlink upon open
try:
os.remove(self._currentsymlink)
except OSError:
# if the symlink does not (yet) exist, OSError is raised.
# should happen at most once per installation....
pass
if hasattr(os, 'symlink'):
os.symlink(path.basename(self.baseFilename), self._currentsymlink)
# finally open the new logfile....
return open(self.baseFilename, self.mode)
class ColoredConsoleHandler(StreamHandler):
"""
A handler class that writes colorized records to standard output.
"""
def __init__(self):
StreamHandler.__init__(self, sys.stdout)
self.setFormatter(ConsoleFormatter(datefmt=DATEFMT,
colorize=colors.colorize))
def emit(self, record):
msg = self.format(record)
try:
self.stream.write(msg)
except UnicodeEncodeError:
self.stream.write(msg.encode('utf-8'))
self.stream.flush()
|
birkenfeld/conduct
|
conduct/loggers.py
|
Python
|
gpl-2.0
| 10,769 | 0.000464 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('anagrafica', '0024_auto_20160129_0053'),
]
operations = [
migrations.AlterIndexTogether(
name='delega',
index_together=set([('persona', 'tipo'), ('inizio', 'fine', 'tipo'), ('inizio', 'fine', 'tipo', 'oggetto_id', 'oggetto_tipo'), ('inizio', 'fine'), ('oggetto_tipo', 'oggetto_id'), ('tipo', 'oggetto_tipo', 'oggetto_id'), ('persona', 'inizio', 'fine', 'tipo', 'oggetto_id', 'oggetto_tipo'), ('persona', 'inizio', 'fine', 'tipo')]),
),
]
|
CroceRossaItaliana/jorvik
|
anagrafica/migrations/0025_auto_20160129_0056.py
|
Python
|
gpl-3.0
| 670 | 0.001493 |
"""
Braubuddy LibratoAPI unit tests.
"""
from mock import call, patch, MagicMock
from braubuddy.tests import BraubuddyTestCase
from braubuddy.output import libratoapi
@patch('braubuddy.output.libratoapi.librato.connect')
class LibratoAPIOutput(BraubuddyTestCase):
def test_librato_api_connect(self, mk_libratoapi_connect):
"""Librato API is initialised on LibratoAPIOutput init."""
output = libratoapi.LibratoAPIOutput(
units='celsius', username='myusername', token='mytoken',
source='braubuddy')
mk_libratoapi_connect.assert_called_with('myusername', 'mytoken')
def test_init_sets_source(self, mk_libratoapi_connect):
"""Source is initialised on LibratoAPIOutput init."""
output = libratoapi.LibratoAPIOutput(
units='celsius', username='myusername', token='mytoken',
source='braubuddy')
self.assertEqual(output._source, 'braubuddy')
def test_publish_metrics(self, mk_libratoapi_connect):
output = libratoapi.LibratoAPIOutput(
units='celsius', username='myusername', token='mytoken',
source='braubuddy')
mk_queue = MagicMock()
output._api = MagicMock()
output._api.new_queue.return_value = mk_queue
output.publish_status(26, 20, 0, 100)
self.assertEqual(
mk_queue.add.mock_calls,
[
call('target_temperature', 26, source='braubuddy'),
call('actual_temperature', 20, source='braubuddy'),
call('heater_percent', 0, source='braubuddy'),
call('cooler_percent', 100, source='braubuddy')
])
self.assertTrue(mk_queue.submit.called)
|
amorphic/braubuddy
|
braubuddy/tests/output/test_libratoapi.py
|
Python
|
bsd-3-clause
| 1,721 | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.