text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
# -*- coding: utf-8 -*-
import unittest
import json
from api_health.verifier import Verifier
class JsonVerifier(unittest.TestCase):
def test_constructor_should_be_smart_about_params(self):
simple_json = u'{ "foo": "bar" }'
json_dict = json.loads(simple_json)
try:
v1 = Verifier(simple_json)
v2 = Verifier(json_dict)
except:
self.fail('Verifier() constructor should deal with both '
'string and object json')
self.assertTrue(v1.has_property('foo'))
self.assertTrue(v2.has_property('foo'))
def test_should_check_for_json_property(self):
simple_json = u'{ "foo": "bar" }'
verifier = Verifier(simple_json)
self.assertTrue(verifier.has_property('foo'))
self.assertTrue(verifier.does_not_have_property('bu'))
self.assertFalse(verifier.has_property('bleh'))
def test_should_check_arrays(self):
array_json = u'{ "foo": "bar", "baz": [ 1, 2, 3] }'
verifier = Verifier(array_json)
self.assertTrue(verifier.has_property("baz[1]"))
| bigodines/api-health | tests/test_verifier.py | Python | mit | 1,105 | 0.00181 |
# This file is part of Gem.
#
# Gem is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Gem is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Gem. If not, see <http://www.gnu.org/licenses/\>.
from gem.api import Location
from enum import Enum
LOG_TAG = "player"
def player_position_update(player, location, warped):
profile = player.profile
profile.location = location
| kaye64/gem | content/player.py | Python | gpl-3.0 | 838 | 0.001193 |
from cStringIO import StringIO
from datetime import datetime
from unidecode import unidecode
from handler import Patobj, PatentHandler
import re
import uuid
import xml.sax
import xml_util
import xml_driver
xml_string = 'ipg050104.xml'
xh = xml_driver.XMLHandler()
parser = xml_driver.make_parser()
parser.setContentHandler(xh)
parser.setFeature(xml_driver.handler.feature_external_ges, False)
l = xml.sax.xmlreader.Locator()
xh.setDocumentLocator(l)
#parser.parse(StringIO(xml_string))
parser.parse(xml_string)
print "parsing done"
#print type(xh.root.us_bibliographic_data_grant.publication_reference.contents_of('document_id', '', as_string=False))
print xh.root.claims.contents_of('claim', '', as_string=True, upper=False)
#print type(xh.root.us_bibliographic_data_grant.publication_reference.contents_of('document_id', '', as_string=True))
#print xh.root.us_bibliographic_data_grant.publication_reference.contents_of('document_id', '', as_string=True)
| namunu/MBS_Patent | parser_test/test.py | Python | bsd-2-clause | 962 | 0.008316 |
from tests.support.asserts import assert_error, assert_dialog_handled
from tests.support.fixtures import create_dialog
from tests.support.inline import inline
alert_doc = inline("<script>window.alert()</script>")
def get_window_rect(session):
return session.transport.send(
"GET", "session/{session_id}/window/rect".format(**vars(session)))
def test_handle_prompt_dismiss_and_notify():
"""TODO"""
def test_handle_prompt_accept_and_notify():
"""TODO"""
def test_handle_prompt_ignore():
"""TODO"""
def test_handle_prompt_accept(new_session, add_browser_capabilites):
_, session = new_session({"capabilities": {"alwaysMatch": add_browser_capabilites({"unhandledPromptBehavior": "accept"})}})
session.url = inline("<title>WD doc title</title>")
create_dialog(session)("alert", text="dismiss #1", result_var="dismiss1")
response = get_window_rect(session)
assert response.status == 200
assert_dialog_handled(session, "dismiss #1")
create_dialog(session)("confirm", text="dismiss #2", result_var="dismiss2")
response = get_window_rect(session)
assert response.status == 200
assert_dialog_handled(session, "dismiss #2")
create_dialog(session)("prompt", text="dismiss #3", result_var="dismiss3")
response = get_window_rect(session)
assert response.status == 200
assert_dialog_handled(session, "dismiss #3")
def test_handle_prompt_missing_value(session, create_dialog):
session.url = inline("<title>WD doc title</title>")
create_dialog("alert", text="dismiss #1", result_var="dismiss1")
response = get_window_rect(session)
assert_error(response, "unexpected alert open")
assert_dialog_handled(session, "dismiss #1")
create_dialog("confirm", text="dismiss #2", result_var="dismiss2")
response = get_window_rect(session)
assert_error(response, "unexpected alert open")
assert_dialog_handled(session, "dismiss #2")
create_dialog("prompt", text="dismiss #3", result_var="dismiss3")
response = get_window_rect(session)
assert_error(response, "unexpected alert open")
assert_dialog_handled(session, "dismiss #3")
| mbrubeck/servo | tests/wpt/web-platform-tests/webdriver/tests/get_window_rect/user_prompts.py | Python | mpl-2.0 | 2,153 | 0.000464 |
"""
Common test utilities for courseware functionality
"""
from abc import ABCMeta, abstractmethod
from datetime import datetime
import ddt
from mock import patch
from urllib import urlencode
from lms.djangoapps.courseware.url_helpers import get_redirect_url
from student.tests.factories import AdminFactory, UserFactory, CourseEnrollmentFactory
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory, check_mongo_calls
@ddt.ddt
class RenderXBlockTestMixin(object):
"""
Mixin for testing the courseware.render_xblock function.
It can be used for testing any higher-level endpoint that calls this method.
"""
__metaclass__ = ABCMeta
# DOM elements that appear in the LMS Courseware,
# but are excluded from the xBlock-only rendering.
COURSEWARE_CHROME_HTML_ELEMENTS = [
'<ol class="course-tabs"',
'<footer id="footer-openedx"',
'<div class="window-wrap"',
'<div class="preview-menu"',
'<div class="container"'
]
# DOM elements that appear in an xBlock,
# but are excluded from the xBlock-only rendering.
XBLOCK_REMOVED_HTML_ELEMENTS = [
'<div class="wrap-instructor-info"',
]
@abstractmethod
def get_response(self, url_encoded_params=None):
"""
Abstract method to get the response from the endpoint that is being tested.
Arguments:
url_encoded_params - URL encoded parameters that should be appended to the requested URL.
"""
pass # pragma: no cover
def login(self):
"""
Logs in the test user.
"""
self.client.login(username=self.user.username, password='test')
def setup_course(self, default_store=None):
"""
Helper method to create the course.
"""
if not default_store:
default_store = self.store.default_modulestore.get_modulestore_type()
with self.store.default_store(default_store):
self.course = CourseFactory.create() # pylint: disable=attribute-defined-outside-init
chapter = ItemFactory.create(parent=self.course, category='chapter')
self.html_block = ItemFactory.create( # pylint: disable=attribute-defined-outside-init
parent=chapter,
category='html',
data="<p>Test HTML Content<p>"
)
def setup_user(self, admin=False, enroll=False, login=False):
"""
Helper method to create the user.
"""
self.user = AdminFactory() if admin else UserFactory() # pylint: disable=attribute-defined-outside-init
if enroll:
CourseEnrollmentFactory(user=self.user, course_id=self.course.id)
if login:
self.login()
def verify_response(self, expected_response_code=200, url_params=None):
"""
Helper method that calls the endpoint, verifies the expected response code, and returns the response.
"""
if url_params:
url_params = urlencode(url_params)
response = self.get_response(url_params)
if expected_response_code == 200:
self.assertContains(response, self.html_block.data, status_code=expected_response_code)
for chrome_element in [self.COURSEWARE_CHROME_HTML_ELEMENTS + self.XBLOCK_REMOVED_HTML_ELEMENTS]:
self.assertNotContains(response, chrome_element)
else:
self.assertNotContains(response, self.html_block.data, status_code=expected_response_code)
return response
@ddt.data(
(ModuleStoreEnum.Type.mongo, 7),
(ModuleStoreEnum.Type.split, 5),
)
@ddt.unpack
def test_courseware_html(self, default_store, mongo_calls):
"""
To verify that the removal of courseware chrome elements is working,
we include this test here to make sure the chrome elements that should
be removed actually exist in the full courseware page.
If this test fails, it's probably because the HTML template for courseware
has changed and COURSEWARE_CHROME_HTML_ELEMENTS needs to be updated.
"""
with self.store.default_store(default_store):
self.setup_course(default_store)
self.setup_user(admin=True, enroll=True, login=True)
with check_mongo_calls(mongo_calls):
url = get_redirect_url(self.course.id, self.html_block.location)
response = self.client.get(url)
for chrome_element in self.COURSEWARE_CHROME_HTML_ELEMENTS:
self.assertContains(response, chrome_element)
@ddt.data(
(ModuleStoreEnum.Type.mongo, 5),
(ModuleStoreEnum.Type.split, 5),
)
@ddt.unpack
def test_success_enrolled_staff(self, default_store, mongo_calls):
with self.store.default_store(default_store):
self.setup_course(default_store)
self.setup_user(admin=True, enroll=True, login=True)
# The 5 mongoDB calls include calls for
# Old Mongo:
# (1) fill_in_run
# (2) get_course in get_course_with_access
# (3) get_item for HTML block in get_module_by_usage_id
# (4) get_parent when loading HTML block
# (5) edx_notes descriptor call to get_course
# Split:
# (1) course_index - bulk_operation call
# (2) structure - get_course_with_access
# (3) definition - get_course_with_access
# (4) definition - HTML block
# (5) definition - edx_notes decorator (original_get_html)
with check_mongo_calls(mongo_calls):
self.verify_response()
def test_success_unenrolled_staff(self):
self.setup_course()
self.setup_user(admin=True, enroll=False, login=True)
self.verify_response()
def test_success_enrolled_student(self):
self.setup_course()
self.setup_user(admin=False, enroll=True, login=True)
self.verify_response()
def test_unauthenticated(self):
self.setup_course()
self.setup_user(admin=False, enroll=True, login=False)
self.verify_response(expected_response_code=404)
def test_unenrolled_student(self):
self.setup_course()
self.setup_user(admin=False, enroll=False, login=True)
self.verify_response(expected_response_code=404)
@patch.dict('django.conf.settings.FEATURES', {'DISABLE_START_DATES': False})
def test_fail_block_unreleased(self):
self.setup_course()
self.setup_user(admin=False, enroll=True, login=True)
self.html_block.start = datetime.max
modulestore().update_item(self.html_block, self.user.id)
self.verify_response(expected_response_code=404)
def test_fail_block_nonvisible(self):
self.setup_course()
self.setup_user(admin=False, enroll=True, login=True)
self.html_block.visible_to_staff_only = True
modulestore().update_item(self.html_block, self.user.id)
self.verify_response(expected_response_code=404)
def test_student_view_param(self):
self.setup_course()
self.setup_user(admin=False, enroll=True, login=True)
self.verify_response(url_params={'view': 'student_view'})
def test_unsupported_view_param(self):
self.setup_course()
self.setup_user(admin=False, enroll=True, login=True)
self.verify_response(url_params={'view': 'author_view'}, expected_response_code=400)
| rismalrv/edx-platform | lms/djangoapps/courseware/testutils.py | Python | agpl-3.0 | 7,642 | 0.002355 |
# Copyright (c) 2016 RIPE NCC
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import collections
import copy
import os
import re
import yaml
class Configuration(object):
"""
A singleton configuration class that's smart enough to create a config
out of defaults + yaml
"""
USER_CONFIG_DIR = os.path.join(
os.path.expanduser("~"), ".config", "ripe-atlas-tools")
USER_RC = os.path.join(USER_CONFIG_DIR, "rc")
DEFAULT = {
"authorisation": {
"fetch": "",
"fetch_aliases": {},
"create": "",
},
"specification": {
"af": 4,
"description": "",
"source": {
"type": "area",
"value": "WW",
"requested": 50,
},
"times": {
"one-off": True,
"interval": None,
"start": None,
"stop": None,
},
"types": {
"ping": {
"packets": 3,
"packet-interval": 1000,
"size": 48
},
"traceroute": {
"packets": 3,
"size": 48,
"protocol": "ICMP",
"dont-fragment": False,
"paris": 0,
"first-hop": 1,
"max-hops": 255,
"port": 80,
"destination-option-size": None,
"hop-by-hop-option-size": None,
"timeout": 4000
},
"sslcert": {
"port": 443
},
"ntp": {
"packets": 3,
"timeout": 4000
},
"dns": {
"set-cd-bit": False,
"set-do-bit": False,
"protocol": "UDP",
"query-class": "IN",
"query-type": "A",
"query-argument": None,
"set-nsid-bit": False,
"udp-payload-size": 512,
"set-rd-bit": True,
"retry": 0
},
"http": {
"header-bytes": 0,
"version": "1.1",
"method": "GET",
"port": 80,
"path": "/",
"query-string": None,
"user-agent": "RIPE ATLAS: https://atlas.ripe.net/",
"body-bytes": None,
"timing-verbosity": 0,
},
},
"tags": {
"ipv4": {
"ping": {
"include": [],
"exclude": []
},
"traceroute": {
"include": [],
"exclude": []
},
"dns": {
"include": [],
"exclude": []
},
"sslcert": {
"include": [],
"exclude": []
},
"http": {
"include": [],
"exclude": []
},
"ntp": {
"include": [],
"exclude": []
},
"all": {
"include": ["system-ipv4-works"],
"exclude": []
},
},
"ipv6": {
"ping": {
"include": [],
"exclude": []
},
"traceroute": {
"include": [],
"exclude": []
},
"dns": {
"include": [],
"exclude": []
},
"sslcert": {
"include": [],
"exclude": []
},
"http": {
"include": [],
"exclude": []
},
"ntp": {
"include": [],
"exclude": []
},
"all": {
"include": ["system-ipv6-works"],
"exclude": []
}
}
}
},
"ripe-ncc": {
"endpoint": "https://atlas.ripe.net",
"version": 0,
}
}
def get(self):
r = copy.deepcopy(self.DEFAULT)
if os.path.exists(self.USER_RC):
with open(self.USER_RC) as y:
custom = yaml.load(y)
if custom:
r = self.deep_update(r, custom)
return r
@classmethod
def deep_update(cls, d, u):
"""
Updates a dictionary with another dictionary, only it goes deep.
Stolen from http://stackoverflow.com/questions/3232943/
"""
for k, v in u.items():
if isinstance(v, collections.Mapping):
r = cls.deep_update(d.get(k, {}), v)
d[k] = r
else:
d[k] = u[k]
return d
@staticmethod
def write(config):
"""
PyYaml is incapable of preserving comments, or even specifying them as
an argument to `.dump()` (http://pyyaml.org/ticket/114), so we have to
do some regex gymnastics here to make sure that the config file remains
easy for n00bs to read.
"""
template = os.path.join(
os.path.dirname(__file__), "templates", "base.yaml")
authorisation = re.compile("^authorisation:$", re.MULTILINE)
tags = re.compile("^ tags:$", re.MULTILINE)
specification = re.compile("^specification:$", re.MULTILINE)
ripe = re.compile("^ripe-ncc:$", re.MULTILINE)
with open(template) as t:
payload = str(t.read()).format(
payload=yaml.dump(
config,
default_flow_style=False
)
)
payload = ripe.sub(
"\n# Don't mess with these, or Bad Things may happen\n"
"ripe-ncc:",
payload
)
payload = authorisation.sub(
"# Authorisation\n"
"authorisation:",
payload
)
payload = specification.sub(
"\n# Measurement Creation\n"
"specification:",
payload
)
payload = tags.sub(
" # Tags added to probes selection\n"
" tags:",
payload
)
with open(Configuration.USER_RC, "w") as rc:
rc.write(payload)
conf = Configuration().get()
| pierky/ripe-atlas-tools | ripe/atlas/tools/settings/__init__.py | Python | gpl-3.0 | 7,761 | 0 |
# -*- coding: utf-8 -*-
# Copyright 2007-2022 The HyperSpy developers
#
# This file is part of HyperSpy.
#
# HyperSpy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# HyperSpy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with HyperSpy. If not, see <http://www.gnu.org/licenses/>.
import logging
from itertools import chain
import numpy as np
from scipy.stats import halfnorm
from hyperspy.external.progressbar import progressbar
from hyperspy.misc.math_tools import check_random_state
_logger = logging.getLogger(__name__)
def _thresh(X, lambda1, vmax):
"""Soft-thresholding with clipping."""
res = np.abs(X) - lambda1
np.maximum(res, 0.0, out=res)
res *= np.sign(X)
np.clip(res, -vmax, vmax, out=res)
return res
def _mrdivide(B, A):
"""Solves xB = A as per Matlab."""
if isinstance(B, np.ndarray):
if len(B.shape) == 2 and B.shape[0] == B.shape[1]:
# square array
return np.linalg.solve(A.T, B.T).T
else:
# Set rcond default value to match numpy 1.14 default value with
# previous numpy version
rcond = np.finfo(float).eps * max(A.shape)
return np.linalg.lstsq(A.T, B.T, rcond=rcond)[0].T
else:
return B / A
def _project(W):
newW = W.copy()
np.maximum(newW, 0, out=newW)
sumsq = np.sqrt(np.sum(W ** 2, axis=0))
np.maximum(sumsq, 1, out=sumsq)
return _mrdivide(newW, np.diag(sumsq))
def _solveproj(v, W, lambda1, kappa=1, h=None, e=None, vmax=None):
m, n = W.shape
v = v.T
if vmax is None:
vmax = v.max()
if len(v.shape) == 2:
batch_size = v.shape[1]
eshape = (m, batch_size)
hshape = (n, batch_size)
else:
eshape = (m,)
hshape = (n,)
if h is None or h.shape != hshape:
h = np.zeros(hshape)
if e is None or e.shape != eshape:
e = np.zeros(eshape)
eta = kappa / np.linalg.norm(W, "fro") ** 2
maxiter = 1e6
iters = 0
while True:
iters += 1
# Solve for h
htmp = h
h = h - eta * W.T @ (W @ h + e - v)
np.maximum(h, 0.0, out=h)
# Solve for e
etmp = e
e = _thresh(v - W @ h, lambda1, vmax)
# Stop conditions
stoph = np.linalg.norm(h - htmp, 2)
stope = np.linalg.norm(e - etmp, 2)
stop = max(stoph, stope) / m
if stop < 1e-5 or iters > maxiter:
break
return h, e
class ORNMF:
"""Performs Online Robust NMF with missing or corrupted data.
The ORNMF code is based on a transcription of the online proximal gradient
descent (PGD) algorithm MATLAB code obtained from the authors of [Zhao2016]_.
It has been updated to also include L2-normalization cost function that
is able to deal with sparse corruptions and/or outliers slightly faster
(please see ORPCA implementation for details). A further modification
has been made to allow for a changing subspace W, where X ~= WH^T + E
in the ORNMF framework.
Read more in the :ref:`User Guide <mva.rnmf>`.
References
----------
.. [Zhao2016] Zhao, Renbo, and Vincent YF Tan. "Online nonnegative matrix
factorization with outliers." Acoustics, Speech and Signal Processing
(ICASSP), 2016 IEEE International Conference on. IEEE, 2016.
"""
def __init__(
self,
rank,
store_error=False,
lambda1=1.0,
kappa=1.0,
method="PGD",
subspace_learning_rate=1.0,
subspace_momentum=0.5,
random_state=None,
):
"""Creates Online Robust NMF instance that can learn a representation.
Parameters
----------
rank : int
The rank of the representation (number of components/factors)
store_error : bool, default False
If True, stores the sparse error matrix.
lambda1 : float
Nuclear norm regularization parameter.
kappa : float
Step-size for projection solver.
method : {'PGD', 'RobustPGD', 'MomentumSGD'}, default 'PGD'
* 'PGD' - Proximal gradient descent
* 'RobustPGD' - Robust proximal gradient descent
* 'MomentumSGD' - Stochastic gradient descent with momentum
subspace_learning_rate : float
Learning rate for the 'MomentumSGD' method. Should be a
float > 0.0
subspace_momentum : float
Momentum parameter for 'MomentumSGD' method, should be
a float between 0 and 1.
random_state : None or int or RandomState instance, default None
Used to initialize the subspace on the first iteration.
"""
self.n_features = None
self.iterating = False
self.t = 0
if store_error:
self.E = []
else:
self.E = None
self.rank = rank
self.robust = False
self.subspace_tracking = False
self.lambda1 = lambda1
self.kappa = kappa
self.subspace_learning_rate = subspace_learning_rate
self.subspace_momentum = subspace_momentum
self.random_state = check_random_state(random_state)
# Check options are valid
if method not in ("PGD", "RobustPGD", "MomentumSGD"):
raise ValueError("'method' not recognised")
if method == "RobustPGD":
self.robust = True
if method == "MomentumSGD":
self.subspace_tracking = True
if subspace_momentum < 0.0 or subspace_momentum > 1:
raise ValueError("'subspace_momentum' must be a float between 0 and 1")
def _setup(self, X):
self.h, self.e, self.v = None, None, None
if isinstance(X, np.ndarray):
n, m = X.shape
avg = np.sqrt(X.mean() / m)
iterating = False
else:
x = next(X)
m = len(x)
avg = np.sqrt(x.mean() / m)
X = chain([x], X)
iterating = True
self.n_features = m
self.iterating = iterating
self.W = halfnorm.rvs(
size=(self.n_features, self.rank), random_state=self.random_state
)
self.W = np.abs(avg * self.W / np.sqrt(self.rank))
self.H = []
if self.subspace_tracking:
self.vnew = np.zeros_like(self.W)
else:
self.A = np.zeros((self.rank, self.rank))
self.B = np.zeros((self.n_features, self.rank))
return X
def fit(self, X, batch_size=None):
"""Learn NMF components from the data.
Parameters
----------
X : {numpy.ndarray, iterator}
[n_samples x n_features] matrix of observations
or an iterator that yields samples, each with n_features elements.
batch_size : {None, int}
If not None, learn the data in batches, each of batch_size samples
or less.
"""
if self.n_features is None:
X = self._setup(X)
num = None
prod = np.outer
if batch_size is not None:
if not isinstance(X, np.ndarray):
raise ValueError("can't batch iterating data")
else:
prod = np.dot
length = X.shape[0]
num = max(length // batch_size, 1)
X = np.array_split(X, num, axis=0)
if isinstance(X, np.ndarray):
num = X.shape[0]
X = iter(X)
h, e = self.h, self.e
for v in progressbar(X, leave=False, total=num, disable=num == 1):
h, e = _solveproj(v, self.W, self.lambda1, self.kappa, h=h, e=e)
self.v = v
self.e = e
self.h = h
self.H.append(h)
if self.E is not None:
self.E.append(e)
self._solve_W(prod(h, h.T), prod((v.T - e), h.T))
self.t += 1
self.h = h
self.e = e
def _solve_W(self, A, B):
if not self.subspace_tracking:
self.A += A
self.B += B
eta = self.kappa / np.linalg.norm(self.A, "fro")
if self.robust:
# exactly as in the Zhao & Tan paper
n = 0
lasttwo = np.zeros(2)
while n <= 2 or (
np.abs((lasttwo[1] - lasttwo[0]) / lasttwo[0]) > 1e-5 and n < 1e9
):
self.W -= eta * (self.W @ self.A - self.B)
self.W = _project(self.W)
n += 1
lasttwo[0] = lasttwo[1]
lasttwo[1] = 0.5 * np.trace(
self.W.T.dot(self.W).dot(self.A)
) - np.trace(self.W.T.dot(self.B))
else:
# Tom Furnival (@tjof2) approach
# - copied from the ORPCA implementation
# of gradient descent in ./rpca.py
if self.subspace_tracking:
learn = self.subspace_learning_rate * (
1 + self.subspace_learning_rate * self.lambda1 * self.t
)
vold = self.subspace_momentum * self.vnew
self.vnew = (self.W @ A - B) / learn
self.W -= vold + self.vnew
else:
self.W -= eta * (self.W @ self.A - self.B)
np.maximum(self.W, 0.0, out=self.W)
self.W /= max(np.linalg.norm(self.W, "fro"), 1.0)
def project(self, X, return_error=False):
"""Project the learnt components on the data.
Parameters
----------
X : {numpy.ndarray, iterator}
[n_samples x n_features] matrix of observations
or an iterator that yields n_samples, each with n_features elements.
return_error : bool
If True, returns the sparse error matrix as well. Otherwise only
the weights (loadings)
"""
H = []
if return_error:
E = []
num = None
if isinstance(X, np.ndarray):
num = X.shape[0]
X = iter(X)
for v in progressbar(X, leave=False, total=num):
h, e = _solveproj(v, self.W, self.lambda1, self.kappa, vmax=np.inf)
H.append(h.copy())
if return_error:
E.append(e.copy())
H = np.stack(H, axis=-1)
if return_error:
return H, np.stack(E, axis=-1)
else:
return H
def finish(self):
"""Return the learnt factors and loadings."""
if len(self.H) > 0:
if len(self.H[0].shape) == 1:
H = np.stack(self.H, axis=-1)
else:
H = np.concatenate(self.H, axis=1)
return self.W, H
else:
return self.W, 1
def ornmf(
X,
rank,
store_error=False,
project=False,
batch_size=None,
lambda1=1.0,
kappa=1.0,
method="PGD",
subspace_learning_rate=1.0,
subspace_momentum=0.5,
random_state=None,
):
"""Perform online, robust NMF on the data X.
This is a wrapper function for the ORNMF class.
Parameters
----------
X : numpy array
The [n_samples, n_features] input data.
rank : int
The rank of the representation (number of components/factors)
store_error : bool, default False
If True, stores the sparse error matrix.
project : bool, default False
If True, project the data X onto the learnt model.
batch_size : {None, int}, default None
If not None, learn the data in batches, each of batch_size samples
or less.
lambda1 : float
Nuclear norm regularization parameter.
kappa : float
Step-size for projection solver.
method : {'PGD', 'RobustPGD', 'MomentumSGD'}, default 'PGD'
* 'PGD' - Proximal gradient descent
* 'RobustPGD' - Robust proximal gradient descent
* 'MomentumSGD' - Stochastic gradient descent with momentum
subspace_learning_rate : float
Learning rate for the 'MomentumSGD' method. Should be a
float > 0.0
subspace_momentum : float
Momentum parameter for 'MomentumSGD' method, should be
a float between 0 and 1.
random_state : None or int or RandomState instance, default None
Used to initialize the subspace on the first iteration.
Returns
-------
Xhat : numpy array
is the [n_features x n_samples] non-negative matrix
Only returned if store_error is True.
Ehat : numpy array
is the [n_features x n_samples] sparse error matrix
Only returned if store_error is True.
W : numpy array, shape [n_features, rank]
is the non-negative factors matrix
H : numpy array, shape [rank, n_samples]
is the non-negative loadings matrix
"""
X = X.T
_ornmf = ORNMF(
rank,
store_error=store_error,
lambda1=lambda1,
kappa=kappa,
method=method,
subspace_learning_rate=subspace_learning_rate,
subspace_momentum=subspace_momentum,
random_state=random_state,
)
_ornmf.fit(X, batch_size=batch_size)
if project:
W = _ornmf.W
H = _ornmf.project(X)
else:
W, H = _ornmf.finish()
if store_error:
Xhat = W @ H
Ehat = np.array(_ornmf.E).T
return Xhat, Ehat, W, H
else:
return W, H
| jat255/hyperspy | hyperspy/learn/ornmf.py | Python | gpl-3.0 | 13,811 | 0.00029 |
# -*- coding: utf-8 -*-
"""
UV Light 2.0 Plugin
Copyright (C) 2018 Ishraq Ibne Ashraf <ishraq@tinkerforge.com>
__init__.py: Package initialization
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public
License along with this program; if not, write to the
Free Software Foundation, Inc., 59 Temple Place - Suite 330,
Boston, MA 02111-1307, USA.
"""
from brickv.plugin_system.plugins.uv_light_v2.uv_light_v2 import UVLightV2
device_class = UVLightV2
| Tinkerforge/brickv | src/brickv/plugin_system/plugins/uv_light_v2/__init__.py | Python | gpl-2.0 | 931 | 0 |
#
# Copyright (c) SAS Institute Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import errno
import fcntl
import os
import sys
import itertools
from conary import trove, deps, errors, files, streams
from conary.dbstore import idtable, migration, sqlerrors
# Stuff related to SQL schema maintenance and migration
TROVE_TROVES_BYDEFAULT = 1 << 0
TROVE_TROVES_WEAKREF = 1 << 1
VERSION = 20
def resetTable(cu, name):
try:
cu.execute("DELETE FROM %s" % name, start_transaction = False)
return True
except Exception, e:
return False
def _createVersions(db, cu = None):
if "Versions" in db.tables:
return
if cu is None:
cu = db.cursor()
if idtable.createIdTable(db, "Versions", "versionId", "version"):
cu.execute("INSERT INTO Versions (versionId, version) VALUES (0, NULL)")
db.commit()
db.loadSchema()
# Schema creation functions
def _createFlavors(db):
if "Flavors" in db.tables:
return
cu = db.cursor()
idtable.createIdTable(db, "Flavors", "flavorId", "flavor")
cu.execute("SELECT FlavorID from Flavors")
if cu.fetchone() == None:
# reserve flavor 0 for "no flavor information"
cu.execute("INSERT INTO Flavors VALUES (0, NULL)")
idtable.createMappingTable(db, "DBFlavorMap", "instanceId", "flavorId")
db.commit()
db.loadSchema()
def createDBTroveFiles(db):
if "DBTroveFiles" in db.tables:
return
cu = db.cursor()
_createVersions(db, cu)
cu.execute("""
CREATE TABLE DBTroveFiles(
streamId %(PRIMARYKEY)s,
pathId BINARY(16),
versionId INTEGER,
path %(STRING)s,
fileId BINARY(20),
instanceId INTEGER,
isPresent INTEGER,
stream BLOB
)""" % db.keywords)
cu.execute("CREATE INDEX DBTroveFilesIdx ON DBTroveFiles(fileId)")
cu.execute("CREATE INDEX DBTroveFilesInstanceIdx2 ON DBTroveFiles(instanceId, pathId)")
cu.execute("CREATE INDEX DBTroveFilesPathIdx ON DBTroveFiles(path)")
idtable.createIdTable(db, "Tags", "tagId", "tag")
cu.execute("""
CREATE TABLE DBFileTags(
streamId INTEGER,
tagId INTEGER
)""")
db.commit()
db.loadSchema()
def createInstances(db):
if "Instances" in db.tables:
return
cu = db.cursor()
_createVersions(db, cu)
cu.execute("""
CREATE TABLE Instances(
instanceId %(PRIMARYKEY)s,
troveName %(STRING)s,
versionId INTEGER,
flavorId INTEGER,
timeStamps %(STRING)s,
isPresent INTEGER,
pinned BOOLEAN
)""" % db.keywords)
cu.execute("CREATE INDEX InstancesNameIdx ON Instances(troveName)")
cu.execute("CREATE UNIQUE INDEX InstancesIdx ON "
"Instances(troveName, versionId, flavorId)")
db.commit()
db.loadSchema()
def _createTroveTroves(db):
if "TroveTroves" in db.tables:
return
cu = db.cursor()
cu.execute("""
CREATE TABLE TroveTroves(
instanceId INTEGER NOT NULL,
includedId INTEGER NOT NULL,
flags INTEGER,
inPristine BOOLEAN
)""")
# this index is so we can quickly tell what troves are needed by another trove
cu.execute("CREATE INDEX TroveTrovesIncludedIdx ON TroveTroves(includedId)")
# This index is used to enforce that TroveTroves only contains
# unique TroveTrove (instanceId, includedId) pairs.
cu.execute("CREATE UNIQUE INDEX TroveTrovesInstanceIncluded_uq ON "
"TroveTroves(instanceId,includedId)")
db.commit()
db.loadSchema()
def createTroveInfo(db):
if "TroveInfo" in db.tables:
return
cu = db.cursor()
cu.execute("""
CREATE TABLE TroveInfo(
instanceId INTEGER NOT NULL,
infoType INTEGER NOT NULL,
data %(MEDIUMBLOB)s
)""" % db.keywords)
cu.execute("CREATE INDEX TroveInfoIdx ON TroveInfo(instanceId)")
cu.execute("CREATE INDEX TroveInfoTypeIdx ON TroveInfo(infoType, data)")
cu.execute("CREATE INDEX TroveInfoInstTypeIdx ON TroveInfo(instanceId, infoType)")
db.commit()
db.loadSchema()
def createMetadata(db):
commit = False
cu = db.cursor()
_createVersions(db, cu)
if 'Metadata' not in db.tables:
cu.execute("""
CREATE TABLE Metadata(
metadataId %(PRIMARYKEY)s,
itemId INTEGER NOT NULL,
versionId INTEGER NOT NULL,
branchId INTEGER NOT NULL,
timeStamp NUMERIC(13,3) NOT NULL
)""" % db.keywords)
commit = True
if 'MetadataItems' not in db.tables:
cu.execute("""
CREATE TABLE MetadataItems(
metadataId INTEGER NOT NULL,
class INTEGER NOT NULL,
data TEXT NOT NULL,
language VARCHAR(254) NOT NULL DEFAULT 'C'
)""")
cu.execute("CREATE INDEX MetadataItemsIdx ON MetadataItems(metadataId)")
commit = True
if commit:
db.commit()
db.loadSchema()
def createDataStore(db):
if "DataStore" in db.tables:
return
cu = db.cursor()
cu.execute("""
CREATE TABLE DataStore(
hash BINARY(20) NOT NULL,
count INTEGER,
data BLOB
)""")
cu.execute("CREATE INDEX DataStoreIdx ON DataStore(hash)")
db.commit()
db.loadSchema()
def createDatabaseAttributes(db):
if "DatabaseAttributes" in db.tables:
return
cu = db.cursor()
cu.execute("""
CREATE TABLE DatabaseAttributes(
id %(PRIMARYKEY)s,
name %(STRING)s,
value %(STRING)s
)
""" % db.keywords)
cu.execute("CREATE UNIQUE INDEX DatabaseAttributesNameIdx "
"ON DatabaseAttributes(name)")
cu.execute("INSERT INTO DatabaseAttributes (name, value) "
"VALUES ('transaction counter', '0')")
db.commit()
db.loadSchema()
def _createDepTable(db, cu, name, isTemp):
d = {"tmp" : "", "name" : name}
startTrans = not isTemp
if isTemp:
if name in db.tempTables:
resetTable(cu, name)
return False
d['tmp'] = 'TEMPORARY'
cu.execute("""
CREATE %(tmp)s TABLE %(name)s(
depId %%(PRIMARYKEY)s,
class INTEGER NOT NULL,
name VARCHAR(254) NOT NULL,
flag VARCHAR(254) NOT NULL
) %%(TABLEOPTS)s""" % d % db.keywords, start_transaction = (not isTemp))
cu.execute("CREATE UNIQUE INDEX %sIdx ON %s(class, name, flag)" %
(name, name), start_transaction = startTrans)
if isTemp:
db.tempTables[name] = True
def _createRequiresTable(db, cu, name, isTemp):
d = { "tmp" : "",
"name" : name,
"constraint" : "",
"tmpCol" : ""}
startTrans = not isTemp
if isTemp:
if name in db.tempTables:
resetTable(cu, name)
return False
d['tmp'] = 'TEMPORARY'
d['tmpCol'] = ',satisfied INTEGER DEFAULT 0'
else:
d['constraint'] = """,
CONSTRAINT %(name)s_instanceId_fk
FOREIGN KEY (instanceId) REFERENCES Instances(instanceId)
ON DELETE RESTRICT ON UPDATE CASCADE,
CONSTRAINT %(name)s_depId_fk
FOREIGN KEY (depId) REFERENCES Dependencies(depId)
ON DELETE RESTRICT ON UPDATE CASCADE
""" %d
cu.execute("""
CREATE %(tmp)s TABLE %(name)s(
instanceId INTEGER NOT NULL,
depId INTEGER NOT NULL,
depNum INTEGER,
depCount INTEGER %(constraint)s
%(tmpCol)s
) %%(TABLEOPTS)s""" % d % db.keywords, start_transaction = startTrans)
cu.execute("CREATE INDEX %(name)sIdx ON %(name)s(instanceId)" % d,
start_transaction = startTrans)
cu.execute("CREATE INDEX %(name)sIdx2 ON %(name)s(depId)" % d,
start_transaction = startTrans)
# XXX: do we really need this index?
cu.execute("CREATE INDEX %(name)sIdx3 ON %(name)s(depNum)" % d,
start_transaction = startTrans)
if isTemp:
db.tempTables[name] = True
return True
def _createProvidesTable(db, cu, name, isTemp):
d = { "tmp" : "",
"name" : name,
"constraint" : "" }
startTrans = not isTemp
if isTemp:
if name in db.tempTables:
resetTable(cu, name)
return False
d['tmp'] = 'TEMPORARY'
else:
d['constraint'] = """,
CONSTRAINT %(name)s_instanceId_fk
FOREIGN KEY (instanceId) REFERENCES Instances(instanceId)
ON DELETE RESTRICT ON UPDATE CASCADE,
CONSTRAINT %(name)s_depId_fk
FOREIGN KEY (depId) REFERENCES Dependencies(depId)
ON DELETE RESTRICT ON UPDATE CASCADE
""" %d
cu.execute("""
CREATE %(tmp)s TABLE %(name)s(
instanceId INTEGER NOT NULL,
depId INTEGER NOT NULL %(constraint)s
) %%(TABLEOPTS)s""" % d % db.keywords, start_transaction = startTrans)
cu.execute("CREATE INDEX %(name)sIdx ON %(name)s(instanceId)" % d,
start_transaction = startTrans)
cu.execute("CREATE INDEX %(name)sIdx2 ON %(name)s(depId)" % d,
start_transaction = startTrans)
if isTemp:
db.tempTables[name] = True
def _createDepWorkTable(db, cu, name):
if name in db.tempTables:
return False
cu.execute("""
CREATE TEMPORARY TABLE %s(
troveId INTEGER,
depNum INTEGER,
flagCount INTEGER,
isProvides INTEGER,
class INTEGER,
name VARCHAR(254),
flag VARCHAR(254),
merged INTEGER
) %%(TABLEOPTS)s""" % name % db.keywords, start_transaction = False)
cu.execute("""
CREATE INDEX %sIdx ON %s(troveId, class, name, flag)
""" % (name, name), start_transaction = False)
db.tempTables[name] = True
# This should be called only once per establishing a db connection
def setupTempDepTables(db, cu=None):
if cu is None:
cu = db.cursor()
_createRequiresTable(db, cu, "TmpRequires", isTemp=True)
_createProvidesTable(db, cu, "TmpProvides", isTemp=True)
_createDepTable(db, cu, 'TmpDependencies', isTemp=True)
_createDepWorkTable(db, cu, "DepCheck")
if "suspectDepsOrig" not in db.tempTables:
cu.execute("CREATE TEMPORARY TABLE suspectDepsOrig(depId integer)",
start_transaction=False)
db.tempTables["suspectDepsOrig"] = True
if "suspectDeps" not in db.tempTables:
cu.execute("CREATE TEMPORARY TABLE suspectDeps(depId integer)",
start_transaction=False)
db.tempTables["suspectDeps"] = True
if "BrokenDeps" not in db.tempTables:
cu.execute("CREATE TEMPORARY TABLE BrokenDeps(depNum INTEGER)",
start_transaction=False)
db.tempTables["BrokenDeps"] = True
if "RemovedTroveIds" not in db.tempTables:
cu.execute("""
CREATE TEMPORARY TABLE RemovedTroveIds(
rowId %(PRIMARYKEY)s,
troveId INTEGER,
nodeId INTEGER
)""" % db.keywords, start_transaction=False)
cu.execute("CREATE INDEX RemovedTroveIdsIdx ON RemovedTroveIds(troveId)",
start_transaction=False)
db.tempTables["RemovedTroveIds"] = True
if "RemovedTroves" not in db.tempTables:
cu.execute("""
CREATE TEMPORARY TABLE RemovedTroves(
name VARCHAR(254),
version %(STRING)s,
flavor %(STRING)s,
nodeId INTEGER
)""" % db.keywords, start_transaction = False)
db.tempTables["RemovedTroves"] = True
db.commit()
def createDependencies(db, skipCommit=False):
commit = False
cu = db.cursor()
if "Dependencies" not in db.tables:
_createDepTable(db, cu, "Dependencies", isTemp=False)
commit = True
if "Requires" not in db.tables:
_createRequiresTable(db, cu, "Requires", isTemp=False)
commit = True
if "Provides" not in db.tables:
_createProvidesTable(db, cu, "Provides", isTemp=False)
commit = True
if commit:
if not skipCommit:
db.commit()
db.loadSchema()
def setupTempTables(db, cu=None, skipCommit=False):
if cu is None:
cu = db.cursor()
if "getFilesTbl" not in db.tempTables:
cu.execute("""
CREATE TEMPORARY TABLE getFilesTbl(
row %(PRIMARYKEY)s,
fileId BINARY
) %(TABLEOPTS)s""" % db.keywords, start_transaction=False)
db.tempTables["getFilesTbl"] = True
if not skipCommit:
db.commit()
def createSchema(db):
_createVersions(db)
createInstances(db)
_createTroveTroves(db)
createDBTroveFiles(db)
_createFlavors(db)
createDependencies(db)
createTroveInfo(db)
createDataStore(db)
createDatabaseAttributes(db)
# SCHEMA Migration
# redefine to enable stdout messaging for the migration process
class SchemaMigration(migration.SchemaMigration):
def message(self, msg = None):
if msg is None:
msg = self.msg
print "\r%s\r" %(' '*len(self.msg)),
self.msg = msg
sys.stdout.write(msg)
sys.stdout.flush()
class MigrateTo_5(SchemaMigration):
Version = 5
def canUpgrade(self):
return self.version in [2,3,4]
def migrate(self):
from conary.local import deptable
class FakeTrove:
def setRequires(self, req):
self.r = req
def setProvides(self, prov):
self.p = prov
def getRequires(self):
return self.r
def getProvides(self):
return self.p
def __init__(self):
self.r = deps.deps.DependencySet()
self.p = deps.deps.DependencySet()
if self.version == 2:
self.cu.execute(
"ALTER TABLE DBInstances ADD COLUMN pinned BOOLEAN")
instances = [ x[0] for x in
self.cu.execute("select instanceId from DBInstances") ]
dtbl = deptable.DependencyTables(self.db)
setupTempDepTables(self.db)
troves = []
for instanceId in instances:
trv = FakeTrove()
dtbl.get(self.cu, trv, instanceId)
troves.append(trv)
self.cu.execute("delete from dependencies")
self.cu.execute("delete from requires")
self.cu.execute("delete from provides")
for instanceId, trv in itertools.izip(instances, troves):
dtbl.add(self.cu, trv, instanceId)
return self.Version
class MigrateTo_6(SchemaMigration):
Version = 6
def migrate(self):
self.cu.execute(
"ALTER TABLE TroveTroves ADD COLUMN inPristine INTEGER")
self.cu.execute("UPDATE TroveTroves SET inPristine=?", True)
# erase unused versions
self.message("Removing unused version strings...")
self.cu.execute("""
DELETE FROM Versions WHERE versionId IN
( SELECT versions.versionid
FROM versions LEFT OUTER JOIN
( SELECT versionid AS usedversions FROM dbinstances
UNION
SELECT versionid AS usedversions FROM dbtrovefiles )
ON usedversions = versions.versionid
WHERE usedversions IS NULL )
""")
return self.Version
class MigrateTo_7(SchemaMigration):
Version = 7
def migrate(self):
self.cu.execute("""
DELETE FROM TroveTroves
WHERE TroveTroves.ROWID in (
SELECT Second.ROWID
FROM TroveTroves AS First
JOIN TroveTroves AS Second USING(instanceId, includedId)
WHERE First.ROWID < Second.ROWID
)""")
self.cu.execute("CREATE UNIQUE INDEX TroveTrovesInstIncIdx ON "
"TroveTroves(instanceId,includedId)")
return self.Version
class MigrateTo_8(SchemaMigration):
Version = 8
def migrate(self):
# we don't alter here because lots of indices have changed
# names; this is just easier
self.cu.execute('DROP INDEX InstancesNameIdx')
self.cu.execute('DROP INDEX InstancesIdx')
createInstances(self.db)
self.cu.execute("""INSERT INTO Instances
(instanceId, troveName, versionId, flavorId,
timeStamps, isPresent, pinned)
SELECT instanceId, troveName, versionId, flavorId,
timeStamps, isPresent, 0 FROM DBInstances
""")
_createFlavors(self.db)
self.cu.execute('INSERT INTO Flavors SELECT * FROM DBFlavors '
'WHERE flavor IS NOT NULL')
self.cu.execute('DROP TABLE DBFlavors')
return self.Version
class MigrateTo_9(SchemaMigration):
Version = 9
def migrate(self):
for klass, infoType in [
(trove.BuildDependencies, trove._TROVEINFO_TAG_BUILDDEPS),
(trove.LoadedTroves, trove._TROVEINFO_TAG_LOADEDTROVES) ]:
for instanceId, data in \
[ x for x in self.cu.execute(
"select instanceId, data from TroveInfo WHERE "
"infoType=?", infoType) ]:
obj = klass(data)
f = obj.freeze()
if f != data:
self.cu.execute("update troveinfo set data=? where "
"instanceId=? and infoType=?", f,
instanceId, infoType)
self.cu.execute("delete from troveinfo where "
"instanceId=? and infoType=?",
instanceId, trove._TROVEINFO_TAG_SIGS)
return self.Version
class MigrateTo_10(SchemaMigration):
Version = 10
def migrate(self):
self.cu.execute("SELECT COUNT(*) FROM DBTroveFiles")
total = self.cu.fetchone()[0]
self.cu.execute("SELECT instanceId, fileId, stream FROM DBTroveFiles")
changes = []
changedTroves = set()
for i, (instanceId, fileId, stream) in enumerate(self.cu):
i += 1
if i % 1000 == 0 or (i == total):
self.message("Reordering streams and recalculating "
"fileIds... %d/%d" %(i, total))
f = files.ThawFile(stream, fileId)
if not f.provides() and not f.requires():
# if there are no deps, skip
continue
newStream = f.freeze()
newFileId = f.fileId()
if newStream == stream and newFileId == fileId:
# if the stream didn't change, skip
continue
changes.append((newFileId, newStream, fileId))
changedTroves.add(instanceId)
# make the changes
for newFileId, newStream, fileId in changes:
self.cu.execute(
"UPDATE DBTroveFiles SET fileId=?, stream=? WHERE fileId=?",
(newFileId, newStream, fileId))
# delete signatures for the instances we changed
for instanceId in changedTroves:
self.cu.execute(
"DELETE FROM troveinfo WHERE instanceId=? AND infoType=?",
(instanceId, trove._TROVEINFO_TAG_SIGS))
return self.Version
# convert contrib.rpath.com -> contrib.rpath.org
class MigrateTo_11(SchemaMigration):
Version = 11
def migrate(self):
self.cu.execute('select count(*) from versions')
total = self.cu.fetchone()[0]
updates = []
self.cu.execute("select versionid, version from versions")
for i, (versionId, version) in enumerate(self.cu):
self.message("Renaming contrib.rpath.com to contrib.rpath.org... "
"%d/%d" %(i+1, total))
if not versionId:
continue
new = version.replace('contrib.rpath.com', 'contrib.rpath.org')
if version != new:
updates.append((versionId, new))
for versionId, version in updates:
self.cu.execute("update versions set version=? where versionid=?",
(version, versionId))
# erase signature troveinfo since the version changed
self.cu.execute("""
delete from TroveInfo
where infotype = 9
and instanceid in (
select instanceid
from instances
where instances.versionid = ? )""",
(versionId,))
return self.Version
# calculate path hashes for every trove
class MigrateTo_12(SchemaMigration):
Version = 12
def migrate(self):
instanceIds = [ x[0] for x in self.cu.execute(
"select instanceId from instances") ]
for i, instanceId in enumerate(instanceIds):
if i % 20 == 0:
self.message("Updating trove %d of %d" %(
i, len(instanceIds)))
ph = trove.PathHashes()
for path, in self.cu.execute(
"select path from dbtrovefiles where instanceid=?",
instanceId):
ph.addPath(path)
self.cu.execute("""
insert into troveinfo(instanceId, infoType, data)
values(?, ?, ?)""", instanceId,
trove._TROVEINFO_TAG_PATH_HASHES, ph.freeze())
return self.Version
class MigrateTo_13(SchemaMigration):
Version = 13
def migrate(self):
self.cu.execute("DELETE FROM TroveInfo WHERE infoType=?",
trove._TROVEINFO_TAG_SIGS)
self.cu.execute("DELETE FROM TroveInfo WHERE infoType=?",
trove._TROVEINFO_TAG_FLAGS)
self.cu.execute("DELETE FROM TroveInfo WHERE infoType=?",
trove._TROVEINFO_TAG_INSTALLBUCKET)
flags = trove.TroveFlagsStream()
flags.isCollection(set = True)
collectionStream = flags.freeze()
flags.isCollection(set = False)
notCollectionStream = flags.freeze()
self.cu.execute("""
INSERT INTO TroveInfo
SELECT instanceId, ?, ? FROM Instances
WHERE NOT (trovename LIKE '%:%' OR trovename LIKE 'fileset-%')
""", trove._TROVEINFO_TAG_FLAGS, collectionStream)
self.cu.execute("""
INSERT INTO TroveInfo
SELECT instanceId, ?, ? FROM Instances
WHERE (trovename LIKE '%:%' OR trovename LIKE 'fileset-%')
""", trove._TROVEINFO_TAG_FLAGS, notCollectionStream)
return self.Version
class MigrateTo_14(SchemaMigration):
Version = 14
def migrate(self):
# we need to rerun the MigrateTo_10 migration since we missed
# some trovefiles the first time around
class M10(MigrateTo_10):
# override sanity checks to force the migration to run
# out of order
def canUpgrade(self):
return self.version == 13
m10 = M10(self.db)
m10.migrate()
# We need to make sure that loadedTroves and buildDeps troveinfo
# isn't included in any commponent's trove.
self.cu.execute("""
DELETE FROM TroveInfo
WHERE
infotype IN (4, 5)
AND instanceid IN (SELECT instanceid
FROM Instances
WHERE trovename LIKE '%:%')""")
return self.Version
class MigrateTo_15(SchemaMigration):
Version = 15
def migrate(self):
# some indexes have changed - we need to update the local schema
if "TroveInfoIdx2" in self.db.tables["TroveInfo"]:
self.cu.execute("DROP INDEX TroveInfoIdx2")
self.cu.execute("CREATE INDEX TroveInfoTypeIdx ON TroveInfo(infoType, instanceId)")
if "TroveTrovesInstanceIdx" in self.db.tables["TroveTroves"]:
self.cu.execute("DROP INDEX TroveTrovesInstanceIdx")
if "TroveTrovesInstIncIdx" in self.db.tables["TroveTroves"]:
self.cu.execute("DROP INDEX TroveTrovesInstIncIdx")
if "TroveTrovesInstanceIncluded_uq" not in self.db.tables["TroveTroves"]:
self.cu.execute(
"CREATE UNIQUE INDEX TroveTrovesInstanceIncluded_uq ON "
"TroveTroves(instanceId,includedId)")
self.db.commit()
self.db.loadSchema()
return self.Version
class MigrateTo_16(SchemaMigration):
Version = 16
def migrate(self):
cu = self.cu
cu.execute("""
CREATE TABLE TroveTroves2(
instanceId INTEGER,
includedId INTEGER,
flags INTEGER,
inPristine BOOLEAN
)""")
cu.execute('''
INSERT INTO TroveTroves2
SELECT instanceId, includedId,
CASE WHEN byDefault THEN %d ELSE 0 END,
inPristine
FROM TroveTroves''' % TROVE_TROVES_BYDEFAULT)
cu.execute('DROP TABLE TroveTroves')
cu.execute('ALTER TABLE TroveTroves2 RENAME TO TroveTroves')
cu.execute("CREATE INDEX TroveTrovesIncludedIdx ON TroveTroves(includedId)")
# This index is used to enforce that TroveTroves only contains
# unique TroveTrove (instanceId, includedId) pairs.
cu.execute("CREATE UNIQUE INDEX TroveTrovesInstanceIncluded_uq ON "
"TroveTroves(instanceId,includedId)")
self.db.commit()
self.db.loadSchema()
return self.Version
class MigrateTo_17(SchemaMigration):
Version = 17
def migrate(self):
# whoops, path hashes weren't sorted, sigs are invalid.
rows = self.cu.execute("""
SELECT instanceId,data from TroveInfo WHERE infoType=?
""", trove._TROVEINFO_TAG_PATH_HASHES)
neededChanges = []
PathHashes = trove.PathHashes
for instanceId, data in rows:
frzn = PathHashes(data).freeze()
if frzn != data:
neededChanges.append((instanceId, frzn))
cu = self.cu
for instanceId, frzn in neededChanges:
cu.execute('''DELETE FROM TroveInfo
WHERE instanceId=? AND infoType=?''', instanceId,
trove._TROVEINFO_TAG_SIGS)
cu.execute('''UPDATE TroveInfo SET data=?
WHERE instanceId=? AND infoType=?
''', frzn, instanceId, trove._TROVEINFO_TAG_PATH_HASHES)
return self.Version
class MigrateTo_18(SchemaMigration):
Version = 18
def migrate(self):
cu = self.cu
cu.execute("""
CREATE TABLE NewInstances(
instanceId %(PRIMARYKEY)s,
troveName %(STRING)s,
versionId INTEGER,
flavorId INTEGER,
timeStamps %(STRING)s,
isPresent INTEGER,
pinned BOOLEAN
)""" % self.db.keywords)
cu.execute('INSERT INTO NewInstances SELECT * FROM Instances')
cu.execute('DROP TABLE Instances')
cu.execute('ALTER TABLE NewInstances RENAME TO Instances')
# recreate indexes
cu.execute("CREATE INDEX InstancesNameIdx ON Instances(troveName)")
cu.execute("CREATE UNIQUE INDEX InstancesIdx ON "
"Instances(troveName, versionId, flavorId)")
cu.execute('''DELETE FROM TroveInfo WHERE instanceId
NOT IN (SELECT instanceId FROM Instances)''')
# delete BuildDeps, Loaded troves, label path, and policy tups
# from components (they shouldn't have had them in the first place)
cu.execute('''DELETE FROM TroveInfo
WHERE infoType in (4,5,11,12) AND
instanceId IN (
SELECT instanceId FROM Instances
WHERE troveName LIKE '%:%')''')
return self.Version
class MigrateTo_19(SchemaMigration):
Version = 19
def migrate(self):
cu = self.cu
versionStream = streams.IntStream()
versionStream.set(0)
incompleteStream = streams.ByteStream()
incompleteStream.set(1)
for tag, data in [
(trove._TROVEINFO_TAG_TROVEVERSION, versionStream.freeze()),
(trove._TROVEINFO_TAG_INCOMPLETE, incompleteStream.freeze()) ]:
cu.execute("""
INSERT INTO TroveInfo
SELECT instanceId, ?, ? FROM Instances
""", (tag, data))
return self.Version
class MigrateTo_20(SchemaMigration):
Version = 20
def migrate(self):
import tempfile
import os
from conary import dbstore
# figure out where the database lives currently
assert(self.db.driver == 'sqlite')
dbPath = self.db.database
assert(isinstance(dbPath, str))
# make a new database file
fd, fn = tempfile.mkstemp(prefix=os.path.basename(dbPath) + '-new-',
dir=os.path.dirname(dbPath))
os.close(fd)
newdb = dbstore.connect(fn, driver='sqlite')
# create the schema in the new db
newdb.loadSchema()
createSchema(newdb)
# make sure we have a good view of the new schema
newdb.commit()
newdb.loadSchema()
cu = self.cu
# have to commit in order to attach
self.db.commit()
cu.execute("ATTACH '%s' AS newdb" %fn, start_transaction=False)
for t in newdb.tables.keys():
self.message('Converting database schema to version 20 '
'- current table: %s' %t)
cu.execute('INSERT OR REPLACE INTO newdb.%s '
'SELECT * FROM %s' % (t, t))
# fix up some potentially bad entries we know about
cu.execute("""UPDATE newdb.TroveInfo
SET data='1.0'
WHERE hex(data)='31' AND infotype=3""")
cu.execute("""UPDATE newdb.Dependencies
SET flag='1.0'
WHERE name LIKE 'conary:%' AND flag='1'""");
self.message('Converting database schema to version 20 '
'- committing')
self.db.commit()
self.message('')
newdb.close()
os.chmod(fn, 0644)
os.rename(dbPath, dbPath + '-pre-schema-update')
os.rename(fn, dbPath)
self.db.reopen()
self.db.loadSchema()
return self.Version
def _lockedSql(db, func, *args):
"""
Ensure write lock on database, otherwise concurrent access can result in
"schema has changed" errors.
"""
if not db.inTransaction():
db.cursor().execute('BEGIN IMMEDIATE')
return func(*args)
# silent update while we're at schema 20. We only need to create a
# index, so there is no need to do a full blown migration and stop
# conary from working until a schema migration is done
def optSchemaUpdate(db):
# drop any ANALYZE information, because it makes sqlite go
# very slowly.
cu = db.cursor()
cu.execute("select count(*) from sqlite_master where name='sqlite_stat1'")
count = cu.fetchall()[0][0]
if count != 0:
cu.execute('select count(*) from sqlite_stat1')
count = cu.fetchall()[0][0]
if count != 0:
_lockedSql(db, cu.execute, "DELETE FROM sqlite_stat1")
# Create DatabaseAttributes (if it doesn't exist yet)
if 'DatabaseAttributes' not in db.tables:
_lockedSql(db, createDatabaseAttributes, db)
#do we have the index we need?
if "TroveInfoInstTypeIdx" not in db.tables["TroveInfo"]:
_lockedSql(db, db.createIndex, "TroveInfo", "TroveInfoInstTypeIdx", "infoType,instanceId")
if 'DBTroveFilesInstanceIdx' in db.tables['DBTroveFiles']:
_lockedSql(db, db.dropIndex, 'DBTroveFiles', 'DBTroveFilesInstanceIdx')
if 'DBTroveFilesInstanceIdx2' not in db.tables['DBTroveFiles']:
_lockedSql(db, db.createIndex, 'DBTroveFiles',
'DBTroveFilesInstanceIdx2', 'instanceId, pathId')
def _shareLock(db):
"""
Take a share lock on the database syslock when an optional migration might
run. If it conflicts due to an ongoing update then bail out.
"""
if db.database == ':memory:':
# Nothing to lock
return None, True
lockPath = os.path.join(os.path.dirname(db.database), 'syslock')
try:
lockFile = open(lockPath, 'r+')
fcntl.lockf(lockFile.fileno(), fcntl.LOCK_SH | fcntl.LOCK_NB)
except IOError as err:
if err.args[0] in (errno.EAGAIN, errno.EACCES, errno.EROFS):
# Busy or no write access; skip optional migrations
return None, False
elif err.args[0] == errno.ENOENT:
# Database has never been locked. Probably running in a testsuite,
# so proceed anyway.
return None, True
raise
return lockFile, True
def checkVersion(db):
global VERSION
version = db.getVersion()
if version == VERSION:
# the actions performed by this function should be integrated
# in the next schema update, when we have a reason to block
# conary functionality... These schema changes *MUST* not be
# required for Read Only functionality
lockFile = None
try:
try:
lockFile, locked = _shareLock(db)
if locked:
optSchemaUpdate(db)
except (sqlerrors.ReadOnlyDatabase, sqlerrors.DatabaseLocked):
pass
finally:
if lockFile:
lockFile.close()
return version
if version > VERSION:
raise NewDatabaseSchema
if version == 0:
# assume we're setting up a new environment
if "DatabaseVersion" not in db.tables:
# if DatabaseVersion does not exist, but any other tables do exist,
# then the database version is too old to deal with it
if len(db.tables) > 0:
raise OldDatabaseSchema
version = db.setVersion(VERSION)
if version in (2, 3, 4):
version = MigrateTo_5(db)()
# instantiate and call appropriate migration objects in succession.
while version and version < VERSION:
fname = 'MigrateTo_' + str(version.major + 1)
migr = sys.modules[__name__].__dict__[fname](db)
version = migr()
return version
class OldDatabaseSchema(errors.DatabaseError):
def __str__(self):
return self.msg
def __init__(self, msg = None):
if msg:
self.msg = msg
else:
self.msg = "The Conary database on this system is too old. " \
"For information on how to\nconvert this database, " \
"please visit http://wiki.rpath.com/ConaryConversion."
class NewDatabaseSchema(errors.DatabaseError):
msg = """The conary database on this system is too new. You may have multiple versions of conary installed and be running the wrong one, or your conary may have been downgraded. Please visit http://wiki.rpath.com for information on how to get support."""
def __init__(self):
errors.DatabaseError.__init__(self, self.msg)
| sassoftware/conary | conary/local/schema.py | Python | apache-2.0 | 36,311 | 0.003883 |
def __bootstrap__():
global __bootstrap__, __loader__, __file__
import sys, pkg_resources, imp
__file__ = pkg_resources.resource_filename(__name__,'win32service.pyd')
__loader__ = None; del __bootstrap__, __loader__
imp.load_dynamic(__name__,__file__)
__bootstrap__()
| JulienMcJay/eclock | windows/Python27/Lib/site-packages/pywin32-218-py2.7-win32.egg/win32service.py | Python | gpl-2.0 | 283 | 0.035336 |
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
from .layer_function_generator import generate_layer_fn, generate_layer_fn_noattr
from .. import core
from ..framework import convert_np_dtype_to_dtype_
__activations_noattr__ = [
'sigmoid',
'logsigmoid',
'exp',
'tanh',
'tanh_shrink',
'softshrink',
'sqrt',
'abs',
'ceil',
'floor',
'cos',
'sin',
'round',
'reciprocal',
'square',
'softplus',
'softsign',
]
__all__ = []
for _OP in set(__all__):
globals()[_OP] = generate_layer_fn(_OP)
# It is a hot fix in some unittest using:
# fluid.layers.scale(x=x, scale=10.0, out=out_var)
# e.g.: test_program_code.py, test_dist_train.py
globals()['_scale'] = generate_layer_fn('scale')
globals()['_elementwise_div'] = generate_layer_fn('elementwise_div')
__all__ += __activations_noattr__
for _OP in set(__activations_noattr__):
globals()[_OP] = generate_layer_fn_noattr(_OP)
__all__ += ["uniform_random"]
_uniform_random_ = generate_layer_fn('uniform_random')
def uniform_random(shape, dtype=None, min=None, max=None, seed=None):
locals_var = locals().keys()
if not isinstance(dtype, core.VarDesc.VarType):
dtype = convert_np_dtype_to_dtype_(dtype)
kwargs = dict()
for name in locals_var:
val = locals()[name]
if val is not None:
kwargs[name] = val
return _uniform_random_(**kwargs)
uniform_random.__doc__ = _uniform_random_.__doc__ + """
Examples:
>>> result = fluid.layers.uniform_random(shape=[32, 784])
"""
__all__ += ['hard_shrink']
_hard_shrink_ = generate_layer_fn('hard_shrink')
def hard_shrink(x, threshold=None):
locals_var = locals().keys()
kwargs = dict()
for name in locals_var:
val = locals()[name]
if val is not None:
kwargs[name] = val
return _hard_shrink_(**kwargs)
hard_shrink.__doc__ = _hard_shrink_.__doc__ + """
Examples:
>>> data = fluid.layers.data(name="input", shape=[784])
>>> result = fluid.layers.hard_shrink(x=data, threshold=0.3)
"""
__all__ += ['cumsum']
_cum_sum_ = generate_layer_fn('cumsum')
def cumsum(x, axis=None, exclusive=None, reverse=None):
locals_var = locals().keys()
kwargs = dict()
for name in locals_var:
val = locals()[name]
if val is not None:
kwargs[name] = val
return _cum_sum_(**kwargs)
cumsum.__doc__ = _cum_sum_.__doc__ + """
Examples:
>>> data = fluid.layers.data(name="input", shape=[32, 784])
>>> result = fluid.layers.cumsum(data, axis=0)
"""
__all__ += ['thresholded_relu']
_thresholded_relu_ = generate_layer_fn('thresholded_relu')
def thresholded_relu(x, threshold=None):
locals_var = locals().keys()
kwargs = dict()
for name in locals_var:
val = locals()[name]
if val is not None:
kwargs[name] = val
_thresholded_relu_(**kwargs)
thresholded_relu.__doc__ = _thresholded_relu_.__doc__ + """
Examples:
>>> data = fluid.layers.data(name="input", shape=[1])
>>> result = fluid.layers.thresholded_relu(data, threshold=0.4)
"""
| reyoung/Paddle | python/paddle/fluid/layers/ops.py | Python | apache-2.0 | 3,701 | 0.00027 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Create a static WebGL library and run it in the browser."""
from __future__ import absolute_import, print_function
import os, shutil, SimpleHTTPServer, SocketServer
import tvm
from tvm.contrib import emscripten, util
import numpy as np
def try_static_webgl_library():
curr_path = os.path.dirname(os.path.abspath(os.path.expanduser(__file__)))
# Change to lib/ which contains "libtvm_runtime.bc".
os.chdir(os.path.join(curr_path, "../../lib"))
# Create OpenGL module.
n = tvm.var("n")
A = tvm.placeholder((n,), name='A', dtype="float")
B = tvm.compute((n,), lambda *i: A[i], name="B")
s = tvm.create_schedule(B.op)
s[B].opengl()
target_host = "llvm -target=asmjs-unknown-emscripten -system-lib"
f = tvm.build(s, [A, B], name="identity", target="opengl",
target_host=target_host)
# Create a JS library that contains both the module and the tvm runtime.
path_dso = "identity_static.js"
f.export_library(path_dso, emscripten.create_js, options=[
"-s", "USE_GLFW=3",
"-s", "USE_WEBGL2=1",
"-lglfw",
])
# Create "tvm_runtime.js" and "identity_static.html" in lib/
shutil.copyfile(os.path.join(curr_path, "../../web/tvm_runtime.js"),
"tvm_runtime.js")
shutil.copyfile(os.path.join(curr_path, "test_static_webgl_library.html"),
"identity_static.html")
port = 8080
handler = SimpleHTTPServer.SimpleHTTPRequestHandler
httpd = SocketServer.TCPServer(("", port), handler)
print("Please open http://localhost:" + str(port) + "/identity_static.html")
httpd.serve_forever()
if __name__ == "__main__":
try_static_webgl_library()
| Huyuwei/tvm | tests/webgl/test_static_webgl_library.py | Python | apache-2.0 | 2,490 | 0.001606 |
import _plotly_utils.basevalidators
class XpadValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(
self, plotly_name="xpad", parent_name="scatter.marker.colorbar", **kwargs
):
super(XpadValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "colorbars"),
min=kwargs.pop("min", 0),
**kwargs
)
| plotly/plotly.py | packages/python/plotly/plotly/validators/scatter/marker/colorbar/_xpad.py | Python | mit | 460 | 0.002174 |
import sys
import pkg_resources
from style.styled_string_builder import _StyledStringBuilder
try:
__version__ = pkg_resources.get_distribution('style').version
except Exception:
__version__ = 'unknown'
_enabled = sys.stdout.isatty()
if '--color' in sys.argv:
_enabled = True
elif '--no-color' in sys.argv:
_enabled = False
styled_string_builder = _StyledStringBuilder([], True)
styled_string_builder.enabled = _enabled
styled_string_builder.__version__ = __version__
sys.modules[__name__] = styled_string_builder
| lmittmann/clr | style/__init__.py | Python | mit | 533 | 0 |
from django import http
import mock
from nose.tools import eq_
from olympia.amo.tests import TestCase
from olympia.addons import decorators as dec
from olympia.addons.models import Addon
class TestAddonView(TestCase):
def setUp(self):
super(TestAddonView, self).setUp()
self.addon = Addon.objects.create(slug='x', type=1)
self.func = mock.Mock()
self.func.return_value = mock.sentinel.OK
self.func.__name__ = 'mock_function'
self.view = dec.addon_view(self.func)
self.request = mock.Mock()
self.slug_path = '/addon/%s/reviews' % self.addon.slug
self.request.path = self.id_path = '/addon/%s/reviews' % self.addon.id
self.request.GET = {}
def test_301_by_id(self):
res = self.view(self.request, str(self.addon.id))
self.assert3xx(res, self.slug_path, 301)
def test_slug_replace_no_conflict(self):
self.request.path = '/addon/{id}/reviews/{id}345/path'.format(
id=self.addon.id)
res = self.view(self.request, str(self.addon.id))
self.assert3xx(res, '/addon/{slug}/reviews/{id}345/path'.format(
id=self.addon.id, slug=self.addon.slug), 301)
def test_301_with_querystring(self):
self.request.GET = mock.Mock()
self.request.GET.urlencode.return_value = 'q=1'
res = self.view(self.request, str(self.addon.id))
self.assert3xx(res, self.slug_path + '?q=1', 301)
def test_200_by_slug(self):
res = self.view(self.request, self.addon.slug)
eq_(res, mock.sentinel.OK)
def test_404_by_id(self):
with self.assertRaises(http.Http404):
self.view(self.request, str(self.addon.id * 2))
def test_404_by_slug(self):
with self.assertRaises(http.Http404):
self.view(self.request, self.addon.slug + 'xx')
def test_alternate_qs_301_by_id(self):
def qs():
return Addon.objects.filter(type=1)
view = dec.addon_view_factory(qs=qs)(self.func)
res = view(self.request, str(self.addon.id))
self.assert3xx(res, self.slug_path, 301)
def test_alternate_qs_200_by_slug(self):
def qs():
return Addon.objects.filter(type=1)
view = dec.addon_view_factory(qs=qs)(self.func)
res = view(self.request, self.addon.slug)
eq_(res, mock.sentinel.OK)
def test_alternate_qs_404_by_id(self):
def qs():
return Addon.objects.filter(type=2)
view = dec.addon_view_factory(qs=qs)(self.func)
with self.assertRaises(http.Http404):
view(self.request, str(self.addon.id))
def test_alternate_qs_404_by_slug(self):
def qs():
return Addon.objects.filter(type=2)
view = dec.addon_view_factory(qs=qs)(self.func)
with self.assertRaises(http.Http404):
view(self.request, self.addon.slug)
def test_addon_no_slug(self):
app = Addon.objects.create(type=1, name='xxxx')
res = self.view(self.request, app.slug)
eq_(res, mock.sentinel.OK)
def test_slug_isdigit(self):
app = Addon.objects.create(type=1, name='xxxx')
app.update(slug=str(app.id))
r = self.view(self.request, app.slug)
eq_(r, mock.sentinel.OK)
request, addon = self.func.call_args[0]
eq_(addon, app)
class TestAddonViewWithUnlisted(TestAddonView):
def setUp(self):
super(TestAddonViewWithUnlisted, self).setUp()
self.view = dec.addon_view_factory(
qs=Addon.with_unlisted.all)(self.func)
@mock.patch('olympia.access.acl.check_unlisted_addons_reviewer',
lambda r: False)
@mock.patch('olympia.access.acl.check_addon_ownership',
lambda *args, **kwargs: False)
def test_unlisted_addon(self):
"""Return a 404 for non authorized access."""
self.addon.update(is_listed=False)
with self.assertRaises(http.Http404):
self.view(self.request, self.addon.slug)
@mock.patch('olympia.access.acl.check_unlisted_addons_reviewer',
lambda r: False)
@mock.patch('olympia.access.acl.check_addon_ownership',
lambda *args, **kwargs: True)
def test_unlisted_addon_owner(self):
"""Addon owners have access."""
self.addon.update(is_listed=False)
assert self.view(self.request, self.addon.slug) == mock.sentinel.OK
request, addon = self.func.call_args[0]
assert addon == self.addon
@mock.patch('olympia.access.acl.check_unlisted_addons_reviewer',
lambda r: True)
@mock.patch('olympia.access.acl.check_addon_ownership',
lambda *args, **kwargs: False)
def test_unlisted_addon_unlisted_admin(self):
"""Unlisted addon reviewers have access."""
self.addon.update(is_listed=False)
assert self.view(self.request, self.addon.slug) == mock.sentinel.OK
request, addon = self.func.call_args[0]
assert addon == self.addon
| jpetto/olympia | src/olympia/addons/tests/test_decorators.py | Python | bsd-3-clause | 5,023 | 0 |
'''
Created on 3 cze 2014
@author: Przemek
'''
from src.items.bytes import Bytes
from src.parser.measurable import Measurable
class StringIdItem(Measurable):
'''
classdocs
'''
def __init__(self, parent):
'''
Constructor
'''
Measurable.__init__(self, parent)
self._data = Bytes(self, 4) | PrzemekBurczyk/dalvik-compiler | src/items/string_id_item.py | Python | mit | 347 | 0.005764 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import webob.dec
from nova import test
from nova import context
from nova import flags
from nova.api.openstack.limits import RateLimitingMiddleware
from nova.api.openstack.common import limited
from nova.tests.api.openstack import fakes
from webob import Request
FLAGS = flags.FLAGS
@webob.dec.wsgify
def simple_wsgi(req):
return ""
class RateLimitingMiddlewareTest(test.TestCase):
def test_get_action_name(self):
middleware = RateLimitingMiddleware(simple_wsgi)
def verify(method, url, action_name):
req = Request.blank(url)
req.method = method
action = middleware.get_action_name(req)
self.assertEqual(action, action_name)
verify('PUT', '/servers/4', 'PUT')
verify('DELETE', '/servers/4', 'DELETE')
verify('POST', '/images/4', 'POST')
verify('POST', '/servers/4', 'POST servers')
verify('GET', '/foo?a=4&changes-since=never&b=5', 'GET changes-since')
verify('GET', '/foo?a=4&monkeys-since=never&b=5', None)
verify('GET', '/servers/4', None)
verify('HEAD', '/servers/4', None)
def exhaust(self, middleware, method, url, username, times):
req = Request.blank(url, dict(REQUEST_METHOD=method),
headers={'X-Auth-User': username})
req.environ['nova.context'] = context.RequestContext(username,
username)
for i in range(times):
resp = req.get_response(middleware)
self.assertEqual(resp.status_int, 200)
resp = req.get_response(middleware)
self.assertEqual(resp.status_int, 413)
self.assertTrue('Retry-After' in resp.headers)
def test_single_action(self):
middleware = RateLimitingMiddleware(simple_wsgi)
self.exhaust(middleware, 'DELETE', '/servers/4', 'usr1', 100)
self.exhaust(middleware, 'DELETE', '/servers/4', 'usr2', 100)
def test_POST_servers_action_implies_POST_action(self):
middleware = RateLimitingMiddleware(simple_wsgi)
self.exhaust(middleware, 'POST', '/servers/4', 'usr1', 10)
self.exhaust(middleware, 'POST', '/images/4', 'usr2', 10)
self.assertTrue(set(middleware.limiter._levels) == \
set(['usr1:POST', 'usr1:POST servers', 'usr2:POST']))
def test_POST_servers_action_correctly_ratelimited(self):
middleware = RateLimitingMiddleware(simple_wsgi)
# Use up all of our "POST" allowance for the minute, 5 times
for i in range(5):
self.exhaust(middleware, 'POST', '/servers/4', 'usr1', 10)
# Reset the 'POST' action counter.
del middleware.limiter._levels['usr1:POST']
# All 50 daily "POST servers" actions should be all used up
self.exhaust(middleware, 'POST', '/servers/4', 'usr1', 0)
def test_proxy_ctor_works(self):
middleware = RateLimitingMiddleware(simple_wsgi)
self.assertEqual(middleware.limiter.__class__.__name__, "Limiter")
middleware = RateLimitingMiddleware(simple_wsgi, service_host='foobar')
self.assertEqual(middleware.limiter.__class__.__name__, "WSGIAppProxy")
| superstack/nova | nova/tests/api/openstack/__init__.py | Python | apache-2.0 | 3,850 | 0.000519 |
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# Modules used for ETL - Create User
# Modules required:
import os
import xmlrpclib, sys, csv, ConfigParser
from datetime import datetime
# Set up parameters (for connection to Open ERP Database) *********************
config = ConfigParser.ConfigParser()
file_config = os.path.expanduser('~/ETL/generalfood/openerp.cfg')
config.read([file_config])
dbname = config.get('dbaccess','dbname')
user = config.get('dbaccess','user')
pwd = config.get('dbaccess','pwd')
server = config.get('dbaccess','server')
port = config.get('dbaccess','port') # verify if it's necessary: getint
separator = eval(config.get('dbaccess','separator')) # test
# XMLRPC connection for autentication (UID) and proxy
sock = xmlrpclib.ServerProxy('http://%s:%s/xmlrpc/common' % (server, port), allow_none=True)
uid = sock.login(dbname ,user ,pwd)
sock = xmlrpclib.ServerProxy('http://%s:%s/xmlrpc/object' % (server, port), allow_none=True)
if len(sys.argv) != 2:
print "Use: errata_corrige parameters\n parameters: partner"
sys.exit()
if sys.argv[1] == 'partner':
result = sock.execute(dbname, uid, pwd, "quality.claim" , "correct_parent_partner")
print "Partner updated"
| Micronaet/micronaet-quality | quality/etl/errata_corrige.py | Python | agpl-3.0 | 1,215 | 0.014815 |
'''
3D Rotating Monkey Head
========================
This example demonstrates using OpenGL to display a rotating monkey head. This
includes loading a Blender OBJ file, shaders written in OpenGL's Shading
Language (GLSL), and using scheduled callbacks.
The monkey.obj file is an OBJ file output from the Blender free 3D creation
software. The file is text, listing vertices and faces and is loaded
using a class in the file objloader.py. The file simple.glsl is
a simple vertex and fragment shader written in GLSL.
'''
from kivy.app import App
from kivy.clock import Clock
from kivy.core.window import Window
from kivy.uix.widget import Widget
from kivy.resources import resource_find
from kivy.graphics.transformation import Matrix
from kivy.graphics.opengl import *
from kivy.graphics import *
from objloader import ObjFile
class Renderer(Widget):
def __init__(self, **kwargs):
self.canvas = RenderContext(compute_normal_mat=True)
self.canvas.shader.source = resource_find('simple.glsl')
self.scene = ObjFile(resource_find("monkey.obj"))
super(Renderer, self).__init__(**kwargs)
with self.canvas:
self.cb = Callback(self.setup_gl_context)
PushMatrix()
self.setup_scene()
PopMatrix()
self.cb = Callback(self.reset_gl_context)
Clock.schedule_interval(self.update_glsl, 1 / 60.)
def setup_gl_context(self, *args):
glEnable(GL_DEPTH_TEST)
def reset_gl_context(self, *args):
glDisable(GL_DEPTH_TEST)
def update_glsl(self, delta):
asp = self.width / float(self.height)
proj = Matrix().view_clip(-asp, asp, -1, 1, 1, 100, 1)
self.canvas['projection_mat'] = proj
self.canvas['diffuse_light'] = (1.0, 1.0, 0.8)
self.canvas['ambient_light'] = (0.1, 0.1, 0.1)
self.rot.angle += delta * 100
def setup_scene(self):
Color(1, 1, 1, 1)
PushMatrix()
Translate(0, 0, -3)
self.rot = Rotate(1, 0, 1, 0)
m = list(self.scene.objects.values())[0]
UpdateNormalMatrix()
self.mesh = Mesh(
vertices=m.vertices,
indices=m.indices,
fmt=m.vertex_format,
mode='triangles',
)
PopMatrix()
class RendererApp(App):
def build(self):
return Renderer()
if __name__ == "__main__":
RendererApp().run()
| Cheaterman/kivy | examples/3Drendering/main.py | Python | mit | 2,405 | 0 |
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utility functions used in XLNet model."""
from __future__ import absolute_import
from __future__ import division
# from __future__ import google_type_annotations
from __future__ import print_function
import json
import os
import tensorflow as tf
def create_run_config(is_training, is_finetune, flags):
"""Helper function for creating RunConfig."""
kwargs = dict(
is_training=is_training,
use_tpu=flags.use_tpu,
dropout=flags.dropout,
dropout_att=flags.dropout_att,
init_method=flags.init_method,
init_range=flags.init_range,
init_std=flags.init_std,
clamp_len=flags.clamp_len)
if not is_finetune:
kwargs.update(dict(
mem_len=flags.mem_len,
reuse_len=flags.reuse_len,
bi_data=flags.bi_data,
clamp_len=flags.clamp_len,
same_length=flags.same_length))
return RunConfig(**kwargs)
# TODO(hongkuny): refactor XLNetConfig and RunConfig.
class XLNetConfig(object):
"""Configs for XLNet model.
XLNetConfig contains hyperparameters that are specific to a model checkpoint;
i.e., these hyperparameters should be the same between
pretraining and finetuning.
The following hyperparameters are defined:
n_layer: int, the number of layers.
d_model: int, the hidden size.
n_head: int, the number of attention heads.
d_head: int, the dimension size of each attention head.
d_inner: int, the hidden size in feed-forward layers.
ff_activation: str, "relu" or "gelu".
untie_r: bool, whether to untie the biases in attention.
n_token: int, the vocab size.
"""
def __init__(self, FLAGS=None, json_path=None, args_dict=None):
"""Constructing an XLNetConfig.
One of FLAGS or json_path should be provided.
Args:
FLAGS: An FLAGS instance.
json_path: A path to a json config file.
args_dict: A dict for args.
"""
assert FLAGS is not None or json_path is not None or args_dict is not None
self.keys = ['n_layer', 'd_model', 'n_head', 'd_head', 'd_inner',
'ff_activation', 'untie_r', 'n_token']
if FLAGS is not None:
self.init_from_flags(FLAGS)
if json_path is not None:
self.init_from_json(json_path)
if args_dict is not None:
self.init_from_dict(args_dict)
def init_from_dict(self, args_dict):
"""Constructs a `BertConfig` from a Python dictionary of parameters."""
for key in self.keys:
setattr(self, key, args_dict[key])
def init_from_flags(self, flags):
for key in self.keys:
setattr(self, key, getattr(flags, key))
def init_from_json(self, json_path):
with tf.io.gfile.GFile(json_path) as f:
json_data = json.load(f)
self.init_from_dict(json_data)
def to_json(self, json_path):
"""Save XLNetConfig to a json file."""
json_data = {}
for key in self.keys:
json_data[key] = getattr(self, key)
json_dir = os.path.dirname(json_path)
if not tf.io.gfile.exists(json_dir):
tf.io.gfile.makedirs(json_dir)
with tf.io.gfile.GFile(json_path, 'w') as f:
json.dump(json_data, f, indent=4, sort_keys=True)
class RunConfig(object):
"""Class of RunConfig.
RunConfig contains hyperparameters that could be different
between pretraining and finetuning.
These hyperparameters can also be changed from run to run.
We store them separately from XLNetConfig for flexibility.
"""
def __init__(self,
is_training,
use_tpu,
dropout,
dropout_att,
init_method='normal',
init_range=0.1,
init_std=0.02,
mem_len=None,
reuse_len=None,
bi_data=False,
clamp_len=-1,
same_length=False,
use_cls_mask=True):
"""Initializes RunConfig.
Args:
is_training: bool, whether in training mode.
use_tpu: bool, whether TPUs are used.
dropout: float, dropout rate.
dropout_att: float, dropout rate on attention probabilities.
init_method: str, the initialization scheme, either "normal" or "uniform".
init_range: float, initialize the parameters with a uniform distribution
in [-init_range, init_range]. Only effective when init="uniform".
init_std: float, initialize the parameters with a normal distribution
with mean 0 and stddev init_std. Only effective when init="normal".
mem_len: int, the number of tokens to cache.
reuse_len: int, the number of tokens in the currect batch to be cached
and reused in the future.
bi_data: bool, whether to use bidirectional input pipeline.
Usually set to True during pretraining and False during finetuning.
clamp_len: int, clamp all relative distances larger than clamp_len.
-1 means no clamping.
same_length: bool, whether to use the same attention length
for each token.
use_cls_mask: bool, whether to introduce cls mask.
"""
self.init_method = init_method
self.init_range = init_range
self.init_std = init_std
self.is_training = is_training
self.dropout = dropout
self.dropout_att = dropout_att
self.use_tpu = use_tpu
self.mem_len = mem_len
self.reuse_len = reuse_len
self.bi_data = bi_data
self.clamp_len = clamp_len
self.same_length = same_length
self.use_cls_mask = use_cls_mask
| alexgorban/models | official/nlp/xlnet/xlnet_config.py | Python | apache-2.0 | 6,110 | 0.003764 |
#!/usr/bin/env python3
import sys
from mltm.cli import add_entry, show_entries
if __name__ == '__main__':
n = len(sys.argv[1:])
if n == 0:
show_entries()
elif sys.argv[1] == 'add':
add_entry()
else:
show_entries(sys.argv[1])
| sanchopanca/my-long-term-memory | app.py | Python | apache-2.0 | 269 | 0 |
import os
path = os.path.dirname(os.path.realpath(__file__))
sbmlFilePath = os.path.join(path, 'BIOMD0000000102.xml')
with open(sbmlFilePath,'r') as f:
sbmlString = f.read()
def module_exists(module_name):
try:
__import__(module_name)
except ImportError:
return False
else:
return True
if module_exists('libsbml'):
import libsbml
sbml = libsbml.readSBMLFromString(sbmlString) | biomodels/BIOMD0000000102 | BIOMD0000000102/model.py | Python | cc0-1.0 | 427 | 0.009368 |
import difflib
import json
import connexion
from flask import Response, abort, jsonify
from auslib.blobs.base import BlobValidationError, createBlob
from auslib.db import OutdatedDataError, ReadOnlyError
from auslib.global_state import dbo
from auslib.web.admin.views.base import AdminView, requirelogin, serialize_signoff_requirements
from auslib.web.admin.views.problem import problem
from auslib.web.admin.views.scheduled_changes import (
EnactScheduledChangeView,
ScheduledChangeHistoryView,
ScheduledChangesView,
ScheduledChangeView,
SignoffsView,
)
from auslib.web.common.releases import release_list, serialize_releases
__all__ = ["SingleReleaseView", "SingleLocaleView"]
def createRelease(release, product, changed_by, transaction, releaseData):
blob = createBlob(releaseData)
dbo.releases.insert(changed_by=changed_by, transaction=transaction, name=release, product=product, data=blob)
return dbo.releases.getReleases(name=release, transaction=transaction)[0]
# TODO: certain cases here can return a 400 while still modifying the database
# https://bugzilla.mozilla.org/show_bug.cgi?id=1246993 has more details
def changeRelease(release, changed_by, transaction, existsCallback, commitCallback, log):
"""Generic function to change an aspect of a release. It relies on a
PartialReleaseForm existing and does some upfront work and checks before
doing anything. It will, for the named release and any found in the
'copyTo' field of the PartialReleaseForm:
- Create the release if it doesn't already exist.
- return a 400 Response if the release exists and old_data_version doesn't.
- return a 400 Response if the product name in the form doesn't match the existing one.
- update the version column of the release table if the one in the form doesn't match it.
- if the release already exists, 'existsCallback' will be called. If
that function returns True, a 201 Response will be returned upon
successful completion. If that function returns False, a 200 Response
will be returned instead.
@type release: string
@param release: The primary release to update. Additional releases found
in the 'copyTo' field of the PartialReleaseForm will also be
updated.
@type changed_by: string
@param changed_by: The username making the change.
@type transaction: AUSTransaction object
@param transaction: The transaction object to be used for all database
operations.
@type existsCallback: callable
@param existsCallback: The callable to call to determine whether to
consider this a "new" change or not. It must
receive 3 positional arguments:
- the name of the release
- the product name from the PartialReleaseForm
- the version from the PartialReleaseForm
@type commitCallback: callable
@param commitCallback: The callable to call after all prerequisite checks
and updates are done. It must receive 6 positional
arguments:
- the name of the release
- the product name from the PartialReleaseForm
- the version from the PartialReleaseForm
- the data from the PartialReleaseForm
- the most recent version of the data for the
release from the database
- the old_data_version from the PartialReleaseForm
"""
new = True
product = connexion.request.get_json().get("product")
incomingData = json.loads(connexion.request.get_json().get("data"))
copyTo = list()
if connexion.request.get_json().get("copyTo"):
copyTo = json.loads(connexion.request.get_json().get("copyTo"))
alias = list()
if connexion.request.get_json().get("alias"):
alias = json.loads(connexion.request.get_json().get("alias"))
old_data_version = connexion.request.get_json().get("data_version")
# schema_version is an attribute at the root level of a blob.
# Endpoints that receive an entire blob can find it there.
# Those that don't have to pass it as a form element instead.
if connexion.request.get_json().get("schema_version"):
schema_version = connexion.request.get_json().get("schema_version")
elif incomingData.get("schema_version"):
schema_version = incomingData.get("schema_version")
else:
return problem(400, "Bad Request", "schema_version is required")
if connexion.request.get_json().get("hashFunction"):
hashFunction = connexion.request.get_json().get("hashFunction")
elif incomingData.get("hashFunction"):
hashFunction = incomingData.get("hashFunction")
else:
hashFunction = None
allReleases = [release]
if copyTo:
allReleases += copyTo
for rel in allReleases:
try:
releaseInfo = dbo.releases.getReleases(name=rel, transaction=transaction)[0]
if existsCallback(rel, product):
new = False
# "release" is the one named in the URL (as opposed to the
# ones that can be provided in copyTo), and we treat it as
# the "primary" one
if rel == release:
# Make sure that old_data_version is provided, because we need to verify it when updating.
if not old_data_version:
msg = "Release exists, data_version must be provided"
log.warning("Bad input: %s", rel)
return problem(400, "Bad Request", msg)
# If the product we're given doesn't match the one in the DB, panic.
if product != releaseInfo["product"]:
msg = "Product name '%s' doesn't match the one on the release object ('%s') for release '%s'" % (product, releaseInfo["product"], rel)
log.warning("Bad input: %s", rel)
return problem(400, "Bad Request", msg)
if "hashFunction" in releaseInfo["data"] and hashFunction and hashFunction != releaseInfo["data"]["hashFunction"]:
msg = "hashFunction '{0}' doesn't match the one on the release " "object ('{1}') for release '{2}'".format(
hashFunction, releaseInfo["data"]["hashFunction"], rel
)
log.warning("Bad input: %s", rel)
return problem(400, "Bad Request", msg)
# If this isn't the release in the URL...
else:
# Use the data_version we just grabbed from the dbo.
old_data_version = releaseInfo["data_version"]
except IndexError:
# If the release doesn't already exist, create it, and set old_data_version appropriately.
newReleaseData = dict(name=rel, schema_version=schema_version)
if hashFunction:
newReleaseData["hashFunction"] = hashFunction
try:
releaseInfo = createRelease(rel, product, changed_by, transaction, newReleaseData)
except BlobValidationError as e:
msg = "Couldn't create release: %s" % e
log.warning("Bad input: %s", rel)
return problem(400, "Bad Request", msg, ext={"exception": e.errors})
except ValueError as e:
msg = "Couldn't create release: %s" % e
log.warning("Bad input: %s", rel)
return problem(400, "Bad Request", msg, ext={"exception": e.args})
old_data_version = 1
extraArgs = {}
if alias:
extraArgs["alias"] = alias
try:
commitCallback(rel, product, incomingData, releaseInfo["data"], old_data_version, extraArgs)
except BlobValidationError as e:
msg = "Couldn't update release: %s" % e
log.warning("Bad input: %s", rel)
return problem(400, "Bad Request", msg, ext={"exception": e.errors})
except ReadOnlyError as e:
msg = "Couldn't update release: %s" % e
log.warning("Bad input: %s", rel)
return problem(403, "Forbidden", msg, ext={"exception": e.args})
except (ValueError, OutdatedDataError) as e:
msg = "Couldn't update release: %s" % e
log.warning("Bad input: %s", rel)
return problem(400, "Bad Request", msg, ext={"exception": e.args})
new_data_version = dbo.releases.getReleases(name=release, transaction=transaction)[0]["data_version"]
if new:
status = 201
else:
status = 200
return Response(status=status, response=json.dumps(dict(new_data_version=new_data_version)))
class SingleLocaleView(AdminView):
"""/releases/[release]/builds/[platform]/[locale]"""
@requirelogin
def _put(self, release, platform, locale, changed_by, transaction):
"""Something important to note about this method is that using the
"copyTo" field of the form, updates can be made to more than just
the release named in the URL. However, the release in the URL is
still considered the primary one, and used to make decisions about
what to set the status code to, and what data_version applies to.
In an ideal world we would probably require a data_version for the
releases named in copyTo as well."""
def exists(rel, product):
if rel == release:
return dbo.releases.localeExists(name=rel, platform=platform, locale=locale, transaction=transaction)
return False
def commit(rel, product, localeData, releaseData, old_data_version, extraArgs):
return dbo.releases.addLocaleToRelease(
name=rel,
product=product,
platform=platform,
locale=locale,
data=localeData,
alias=extraArgs.get("alias"),
old_data_version=old_data_version,
changed_by=changed_by,
transaction=transaction,
)
return changeRelease(release, changed_by, transaction, exists, commit, self.log)
class SingleReleaseView(AdminView):
@requirelogin
def _put(self, release, changed_by, transaction):
if dbo.releases.getReleases(name=release, limit=1):
if not connexion.request.get_json().get("data_version"):
return problem(400, "Bad Request", "data_version field is missing")
try:
blob = createBlob(connexion.request.get_json().get("blob"))
dbo.releases.update(
where={"name": release},
what={"data": blob, "product": connexion.request.get_json().get("product")},
changed_by=changed_by,
old_data_version=connexion.request.get_json().get("data_version"),
transaction=transaction,
)
except BlobValidationError as e:
msg = "Couldn't update release: %s" % e
self.log.warning("Bad input: %s", msg)
return problem(400, "Bad Request", "Couldn't update release", ext={"exception": e.errors})
except ReadOnlyError as e:
msg = "Couldn't update release: %s" % e
self.log.warning("Bad input: %s", msg)
return problem(403, "Forbidden", "Couldn't update release. Release is marked read only", ext={"exception": e.args})
except ValueError as e:
msg = "Couldn't update release: %s" % e
self.log.warning("Bad input: %s", msg)
return problem(400, "Bad Request", "Couldn't update release", ext={"exception": e.args})
# the data_version might jump by more than 1 if outdated blobs are
# merged
data_version = dbo.releases.getReleases(name=release, transaction=transaction)[0]["data_version"]
return jsonify(new_data_version=data_version)
else:
try:
blob = createBlob(connexion.request.get_json().get("blob"))
dbo.releases.insert(
changed_by=changed_by, transaction=transaction, name=release, product=connexion.request.get_json().get("product"), data=blob
)
except BlobValidationError as e:
msg = "Couldn't update release: %s" % e
self.log.warning("Bad input: %s", msg)
return problem(400, "Bad Request", "Couldn't update release", ext={"exception": e.errors})
except ValueError as e:
msg = "Couldn't update release: %s" % e
self.log.warning("Bad input: %s", msg)
return problem(400, "Bad Request", "Couldn't update release", ext={"exception": e.args})
return Response(status=201)
@requirelogin
def _post(self, release, changed_by, transaction):
def exists(rel, product):
if rel == release:
return True
return False
def commit(rel, product, newReleaseData, releaseData, old_data_version, extraArgs):
releaseData.update(newReleaseData)
blob = createBlob(releaseData)
return dbo.releases.update(
where={"name": rel}, what={"data": blob, "product": product}, changed_by=changed_by, old_data_version=old_data_version, transaction=transaction
)
return changeRelease(release, changed_by, transaction, exists, commit, self.log)
@requirelogin
def _delete(self, release, changed_by, transaction):
releases = dbo.releases.getReleaseInfo(names=[release], nameOnly=True, limit=1)
if not releases:
return problem(404, "Not Found", "Release: %s not found" % release)
release = releases[0]
# query argument i.e. data_version is also required.
# All input value validations already defined in swagger specification and carried out by connexion.
try:
old_data_version = int(connexion.request.args.get("data_version"))
dbo.releases.delete(where={"name": release["name"]}, changed_by=changed_by, old_data_version=old_data_version, transaction=transaction)
except ReadOnlyError as e:
msg = "Couldn't delete release: %s" % e
self.log.warning("Bad input: %s", msg)
return problem(403, "Forbidden", "Couldn't delete %s. Release is marked read only" % release["name"], ext={"exception": e.args})
return Response(status=200)
class ReleaseReadOnlyView(AdminView):
"""/releases/:release/read_only"""
def get(self, release):
try:
is_release_read_only = dbo.releases.isReadOnly(name=release, limit=1)
except KeyError as e:
return problem(404, "Not Found", json.dumps(e.args))
return jsonify(read_only=is_release_read_only)
@requirelogin
def _put(self, release, changed_by, transaction):
releases = dbo.releases.getReleaseInfo(names=[release], nameOnly=True, limit=1)
if not releases:
return problem(404, "Not Found", "Release: %s not found" % release)
data_version = connexion.request.get_json().get("data_version")
is_release_read_only = dbo.releases.isReadOnly(release)
if connexion.request.get_json().get("read_only"):
if not is_release_read_only:
dbo.releases.update(
where={"name": release}, what={"read_only": True}, changed_by=changed_by, old_data_version=data_version, transaction=transaction
)
data_version += 1
else:
dbo.releases.update(
where={"name": release}, what={"read_only": False}, changed_by=changed_by, old_data_version=data_version, transaction=transaction
)
data_version += 1
return Response(status=201, response=json.dumps(dict(new_data_version=data_version)))
class ReleasesAPIView(AdminView):
"""/releases"""
def get(self, **kwargs):
releases = release_list(connexion.request)
if not connexion.request.args.get("names_only"):
requirements = dbo.releases.getPotentialRequiredSignoffs(releases)
for release in releases:
release["required_signoffs"] = serialize_signoff_requirements(requirements[release["name"]])
return serialize_releases(connexion.request, releases)
@requirelogin
def _post(self, changed_by, transaction):
if dbo.releases.getReleaseInfo(names=[connexion.request.get_json().get("name")], transaction=transaction, nameOnly=True, limit=1):
return problem(
400,
"Bad Request",
"Release: %s already exists" % connexion.request.get_json().get("name"),
ext={"exception": "Database already contains the release"},
)
try:
blob = createBlob(connexion.request.get_json().get("blob"))
name = dbo.releases.insert(
changed_by=changed_by,
transaction=transaction,
name=connexion.request.get_json().get("name"),
product=connexion.request.get_json().get("product"),
data=blob,
)
except BlobValidationError as e:
msg = "Couldn't create release: %s" % e
self.log.warning("Bad input: %s", msg)
return problem(400, "Bad Request", "Couldn't create release", ext={"exception": e.errors})
except ValueError as e:
msg = "Couldn't create release: %s" % e
self.log.warning("Bad input: %s", msg)
return problem(400, "Bad Request", "Couldn't create release", ext={"exception": e.args})
release = dbo.releases.getReleases(name=name, transaction=transaction, limit=1)[0]
return Response(status=201, response=json.dumps(dict(new_data_version=release["data_version"])))
class SingleReleaseColumnView(AdminView):
""" /releases/columns/:column"""
def get(self, column):
releases = dbo.releases.getReleaseInfo()
column_values = []
if column not in releases[0].keys():
return problem(404, "Not Found", "Requested column does not exist")
for release in releases:
for key, value in release.items():
if key == column and value is not None:
column_values.append(value)
column_values = list(set(column_values))
ret = {"count": len(column_values), column: column_values}
return jsonify(ret)
class ReleaseScheduledChangesView(ScheduledChangesView):
"""/scheduled_changes/releases"""
def __init__(self):
super(ReleaseScheduledChangesView, self).__init__("releases", dbo.releases)
def get(self):
where = {}
name = connexion.request.args.get("name")
if name:
where["base_name"] = name
return super(ReleaseScheduledChangesView, self).get(where)
@requirelogin
def _post(self, transaction, changed_by):
if connexion.request.get_json().get("when", None) is None:
return problem(400, "Bad Request", "'when' cannot be set to null when scheduling a new change " "for a Release")
change_type = connexion.request.get_json().get("change_type")
what = {}
for field in connexion.request.get_json():
if field == "csrf_token":
continue
what[field] = connexion.request.get_json()[field]
if change_type == "update":
if not what.get("data_version", None):
return problem(400, "Bad Request", "Missing field", ext={"exception": "data_version is missing"})
if what.get("data", None):
what["data"] = createBlob(what.get("data"))
elif change_type == "insert":
if not what.get("product", None):
return problem(400, "Bad Request", "Missing field", ext={"exception": "product is missing"})
if what.get("data", None):
what["data"] = createBlob(what.get("data"))
else:
return problem(400, "Bad Request", "Missing field", ext={"exception": "Missing blob 'data' value"})
elif change_type == "delete":
if not what.get("data_version", None):
return problem(400, "Bad Request", "Missing field", ext={"exception": "data_version is missing"})
return super(ReleaseScheduledChangesView, self)._post(what, transaction, changed_by, change_type)
class ReleaseScheduledChangeView(ScheduledChangeView):
"""/scheduled_changes/releases/<int:sc_id>"""
def __init__(self):
super(ReleaseScheduledChangeView, self).__init__("releases", dbo.releases)
@requirelogin
def _post(self, sc_id, transaction, changed_by):
# TODO: modify UI and clients to stop sending 'change_type' in request body
sc_release = self.sc_table.select(where={"sc_id": sc_id}, transaction=transaction, columns=["change_type"])
if sc_release:
change_type = sc_release[0]["change_type"]
else:
return problem(404, "Not Found", "Unknown sc_id", ext={"exception": "No scheduled change for release found for given sc_id"})
what = {}
for field in connexion.request.get_json():
# Only data may be changed when editing an existing Scheduled Change for
# an existing Release. Name cannot be changed because it is a PK field, and product
# cannot be changed because it almost never makes sense to (and can be done
# by deleting/recreating instead).
# Any Release field may be changed when editing an Scheduled Change for a new Release
if (
(change_type == "delete" and field not in ["when", "data_version"])
or (change_type == "update" and field not in ["when", "data", "data_version"])
or (change_type == "insert" and field not in ["when", "name", "product", "data"])
):
continue
what[field] = connexion.request.get_json()[field]
if change_type in ["update", "delete"] and not what.get("data_version", None):
return problem(400, "Bad Request", "Missing field", ext={"exception": "data_version is missing"})
elif change_type == "insert" and "data" in what and not what.get("data", None):
# edit scheduled change for new release
return problem(400, "Bad Request", "Null/Empty Value", ext={"exception": "data cannot be set to null when scheduling insertion of a new release"})
if what.get("data", None):
what["data"] = createBlob(what.get("data"))
return super(ReleaseScheduledChangeView, self)._post(sc_id, what, transaction, changed_by, connexion.request.get_json().get("sc_data_version", None))
@requirelogin
def _delete(self, sc_id, transaction, changed_by):
return super(ReleaseScheduledChangeView, self)._delete(sc_id, transaction, changed_by)
class EnactReleaseScheduledChangeView(EnactScheduledChangeView):
"""/scheduled_changes/releases/<int:sc_id>/enact"""
def __init__(self):
super(EnactReleaseScheduledChangeView, self).__init__("releases", dbo.releases)
@requirelogin
def _post(self, sc_id, transaction, changed_by):
return super(EnactReleaseScheduledChangeView, self)._post(sc_id, transaction, changed_by)
class ReleaseScheduledChangeSignoffsView(SignoffsView):
"""/scheduled_changes/releases/<int:sc_id>/signoffs"""
def __init__(self):
super(ReleaseScheduledChangeSignoffsView, self).__init__("releases", dbo.releases)
class ReleaseScheduledChangeHistoryView(ScheduledChangeHistoryView):
"""/scheduled_changes/releases/<int:sc_id>/revisions"""
def __init__(self):
super(ReleaseScheduledChangeHistoryView, self).__init__("releases", dbo.releases)
@requirelogin
def _post(self, sc_id, transaction, changed_by):
return super(ReleaseScheduledChangeHistoryView, self)._post(sc_id, transaction, changed_by)
class ScheduledReleaseFieldView(AdminView):
def __init__(self):
self.table = dbo.releases.scheduled_changes
def get_value(self, sc_id, field=None):
data = self.table.select(where={"sc_id": sc_id}, transaction=None)[0]
if not data:
abort(400, "Bad sc_id")
if not field:
return data
if field not in data:
raise KeyError("Bad field")
return data[field]
class ScheduledReleaseDiffView(ScheduledReleaseFieldView):
"""/diff/:sc_id"""
def get_release(self, sc):
data = dbo.releases.select(where={"name": sc["base_name"], "product": sc["base_product"]}, limit=1)[0]
if not data:
abort(400, "Bad sc_id")
return data
def get(self, sc_id):
sc = self.get_value(sc_id)
release = self.get_release(sc)
if "data" not in release:
return problem(400, "Bad Request", "Bad field")
previous = json.dumps(release["data"], indent=2, sort_keys=True)
value = json.dumps(sc["base_{}".format("data")], indent=2, sort_keys=True)
result = difflib.unified_diff(
previous.splitlines(),
value.splitlines(),
fromfile="Current Version (Data Version {})".format(release["data_version"]),
tofile="Scheduled Update (sc_id {})".format(sc["sc_id"]),
lineterm="",
)
return Response("\n".join(result), content_type="text/plain")
| testbhearsum/balrog | src/auslib/web/admin/views/releases.py | Python | mpl-2.0 | 26,017 | 0.003306 |
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import unittest
from tempest.test_discover import plugins
def load_tests(loader, tests, pattern):
ext_plugins = plugins.TempestTestPluginManager()
suite = unittest.TestSuite()
base_path = os.path.split(os.path.dirname(os.path.abspath(__file__)))[0]
base_path = os.path.split(base_path)[0]
# Load local tempest tests
for test_dir in ['api', 'scenario']:
full_test_dir = os.path.join(base_path, 'tempest', test_dir)
if not pattern:
suite.addTests(loader.discover(full_test_dir,
top_level_dir=base_path))
else:
suite.addTests(loader.discover(full_test_dir, pattern=pattern,
top_level_dir=base_path))
plugin_load_tests = ext_plugins.get_plugin_load_tests_tuple()
if not plugin_load_tests:
return suite
# Load any installed plugin tests
for plugin in plugin_load_tests:
test_dir, top_path = plugin_load_tests[plugin]
if not pattern:
suite.addTests(loader.discover(test_dir, top_level_dir=top_path))
else:
suite.addTests(loader.discover(test_dir, pattern=pattern,
top_level_dir=top_path))
return suite
| openstack/tempest | tempest/test_discover/test_discover.py | Python | apache-2.0 | 1,891 | 0 |
import Adafruit_BBIO.PWM as PWM
class Servo:
def __init__(self, pin):
self.servo_pin = pin
self.duty_min = 3
self.duty_max = 14.5
self.duty_span = self.duty_max - self.duty_min
def StartServo(self):
print("Starting servo")
print(self.servo_pin)
PWM.start(self.servo_pin, (100 - self.duty_min), 60.0, 1)
self.current_angle = 90.0
self.SetAngle(self.current_angle)
def SetAngle(self, angle):
angle_f = float(angle)
duty = 100 - ((angle_f / 180) * self.duty_span + self.duty_min)
PWM.set_duty_cycle(self.servo_pin, duty)
def IncreaseAngle(self, angle):
self.current_angle += angle
self.SetAngle(self.current_angle)
def DecreaseAngle(self, angle):
self.current_angle -= angle
self.SetAngle(self.current_angle)
def StopServo(self):
PWM.stop(self.servo_pin)
| sahdman/Plane | Servo.py | Python | gpl-3.0 | 804 | 0.039801 |
from __future__ import absolute_import
from Components.VariableValue import VariableValue
from Components.Renderer.Renderer import Renderer
from enigma import eSlider
class Progress(VariableValue, Renderer):
def __init__(self):
Renderer.__init__(self)
VariableValue.__init__(self)
self.__start = 0
self.__end = 100
GUI_WIDGET = eSlider
def changed(self, what):
if what[0] == self.CHANGED_CLEAR:
(self.range, self.value) = ((0, 1), 0)
return
range = self.source.range or 100
value = self.source.value
if value is None:
value = 0
if range > 2**31-1:
range = 2**31-1
if value > range:
value = range
if value < 0:
value = 0
(self.range, self.value) = ((0, range), value)
def postWidgetCreate(self, instance):
instance.setRange(self.__start, self.__end)
def setRange(self, range):
(self.__start, self.__end) = range
if self.instance is not None:
self.instance.setRange(self.__start, self.__end)
def getRange(self):
return self.__start, self.__end
range = property(getRange, setRange)
| atvcaptain/enigma2 | lib/python/Components/Renderer/Progress.py | Python | gpl-2.0 | 1,048 | 0.02958 |
from distutils.core import setup
setup(name="card",
author="Benoit Michau",
author_email="michau.benoit@gmail.com",
url="http://michau.benoit.free.fr/codes/smartcard/",
description="A library to manipulate smartcards used in telecommunications systems (SIM, USIM)",
long_description=open("README.txt", "r").read(),
version="0.1.0",
license="GPLv2",
packages=["card"])
| 1mentat/card | setup.py | Python | gpl-2.0 | 417 | 0.002398 |
# Test methods with long descriptive names can omit docstrings
# pylint: disable=missing-docstring
from unittest.mock import MagicMock
import numpy as np
from AnyQt.QtCore import QRectF
from Orange.data import Table, Domain, ContinuousVariable, DiscreteVariable
from Orange.widgets.tests.base import WidgetTest, WidgetOutputsTestMixin
from Orange.widgets.visualize.owscatterplot import \
OWScatterPlot, ScatterPlotVizRank
class TestOWScatterPlot(WidgetTest, WidgetOutputsTestMixin):
@classmethod
def setUpClass(cls):
super().setUpClass()
WidgetOutputsTestMixin.init(cls)
cls.signal_name = "Data"
cls.signal_data = cls.data
def setUp(self):
self.widget = self.create_widget(OWScatterPlot)
def test_set_data(self):
# Connect iris to scatter plot
self.send_signal("Data", self.data)
# First two attribute should be selected as x an y
self.assertEqual(self.widget.attr_x, self.data.domain[0])
self.assertEqual(self.widget.attr_y, self.data.domain[1])
# Class var should be selected as color
self.assertIs(self.widget.graph.attr_color, self.data.domain.class_var)
# Change which attributes are displayed
self.widget.attr_x = self.data.domain[2]
self.widget.attr_y = self.data.domain[3]
# Disconnect the data
self.send_signal("Data", None)
# removing data should have cleared attributes
self.assertEqual(self.widget.attr_x, None)
self.assertEqual(self.widget.attr_y, None)
self.assertEqual(self.widget.graph.attr_color, None)
# and remove the legend
self.assertEqual(self.widget.graph.legend, None)
# Connect iris again
# same attributes that were used last time should be selected
self.send_signal("Data", self.data)
self.assertIs(self.widget.attr_x, self.data.domain[2])
self.assertIs(self.widget.attr_y, self.data.domain[3])
def test_score_heuristics(self):
domain = Domain([ContinuousVariable(c) for c in "abcd"],
DiscreteVariable("c", values="ab"))
a = np.arange(10).reshape((10, 1))
data = Table(domain, np.hstack([a, a, a, a]), a >= 5)
self.send_signal("Data", data)
vizrank = ScatterPlotVizRank(self.widget)
self.assertEqual([x.name for x in vizrank.score_heuristic()],
list("abcd"))
def test_optional_combos(self):
domain = self.data.domain
d1 = Domain(domain.attributes[:2], domain.class_var,
[domain.attributes[2]])
t1 = Table(d1, self.data)
self.send_signal("Data", t1)
self.widget.graph.attr_size = domain.attributes[2]
d2 = Domain(domain.attributes[:2], domain.class_var,
[domain.attributes[3]])
t2 = Table(d2, self.data)
self.send_signal("Data", t2)
def _select_data(self):
self.widget.graph.select_by_rectangle(QRectF(4, 3, 3, 1))
return self.widget.graph.get_selection()
def test_error_message(self):
"""Check if error message appears and then disappears when
data is removed from input"""
data = self.data.copy()
data.X[:, 0] = np.nan
self.send_signal("Data", data)
self.assertTrue(self.widget.Warning.missing_coords.is_shown())
self.send_signal("Data", None)
self.assertFalse(self.widget.Warning.missing_coords.is_shown())
def test_report_on_empty(self):
self.widget.report_plot = MagicMock()
self.widget.report_caption = MagicMock()
self.widget.report_items = MagicMock()
self.widget.send_report() # Essentially, don't crash
self.widget.report_plot.assert_not_called()
self.widget.report_caption.assert_not_called()
self.widget.report_items.assert_not_called()
| cheral/orange3 | Orange/widgets/visualize/tests/test_owscatterplot.py | Python | bsd-2-clause | 3,898 | 0.000257 |
'''Autogenerated by xml_generate script, do not edit!'''
from OpenGL import platform as _p, arrays
# Code generation uses this
from OpenGL.raw.GLX import _types as _cs
# End users want this...
from OpenGL.raw.GLX._types import *
from OpenGL.raw.GLX import _errors
from OpenGL.constant import Constant as _C
import ctypes
_EXTENSION_NAME = 'GLX_OML_sync_control'
def _f( function ):
return _p.createFunction( function,_p.PLATFORM.GLX,'GLX_OML_sync_control',error_checker=_errors._error_checker)
@_f
@_p.types(_cs.Bool,ctypes.POINTER(_cs.Display),_cs.GLXDrawable,ctypes.POINTER(_cs.int32_t),ctypes.POINTER(_cs.int32_t))
def glXGetMscRateOML(dpy,drawable,numerator,denominator):pass
@_f
@_p.types(_cs.Bool,ctypes.POINTER(_cs.Display),_cs.GLXDrawable,ctypes.POINTER(_cs.int64_t),ctypes.POINTER(_cs.int64_t),ctypes.POINTER(_cs.int64_t))
def glXGetSyncValuesOML(dpy,drawable,ust,msc,sbc):pass
@_f
@_p.types(_cs.int64_t,ctypes.POINTER(_cs.Display),_cs.GLXDrawable,_cs.int64_t,_cs.int64_t,_cs.int64_t)
def glXSwapBuffersMscOML(dpy,drawable,target_msc,divisor,remainder):pass
@_f
@_p.types(_cs.Bool,ctypes.POINTER(_cs.Display),_cs.GLXDrawable,_cs.int64_t,_cs.int64_t,_cs.int64_t,ctypes.POINTER(_cs.int64_t),ctypes.POINTER(_cs.int64_t),ctypes.POINTER(_cs.int64_t))
def glXWaitForMscOML(dpy,drawable,target_msc,divisor,remainder,ust,msc,sbc):pass
@_f
@_p.types(_cs.Bool,ctypes.POINTER(_cs.Display),_cs.GLXDrawable,_cs.int64_t,ctypes.POINTER(_cs.int64_t),ctypes.POINTER(_cs.int64_t),ctypes.POINTER(_cs.int64_t))
def glXWaitForSbcOML(dpy,drawable,target_sbc,ust,msc,sbc):pass
| stack-of-tasks/rbdlpy | tutorial/lib/python2.7/site-packages/OpenGL/raw/GLX/OML/sync_control.py | Python | lgpl-3.0 | 1,569 | 0.045252 |
import conv
import tools
from ..api.clss import api
from ..sql.clss import sql
from pandas import DataFrame
import time as tm
class data(object):
def __init__(self):
self.a = api()
self.s = sql()
self.jobs = []
self.trd = DataFrame()
self.prc = DataFrame()
def add_trades(self, exchange, symbol, limit='', since='',
auto_since='no', ping_limit=1.0):
job = {'exchange':exchange,'symbol':symbol}
self.a.add_job(exchange, symbol, 'trades', limit=limit, since=since,
auto_since=auto_since, ping_limit=ping_limit)
self.jobs.append(job)
def get_trades(self, exchange='', symbol='', start=''):
trd = self.s.select('trades',exchange=exchange,
symbol=symbol,start=start)
self.trd = self.trd.append(trd)
self.trd = self.trd.drop_duplicates(['tid','exchange'])
def run_trades(self, exchange, symbol):
self.trd = self.trd.append(self.a.run(exchange,symbol,'trades'))
self.trd = self.trd.drop_duplicates(['tid','exchange'])
def run_loop(self, time, to_sql=60, log='no'):
dump = tm.time() + to_sql
end = tm.time() + time
while tm.time() < end:
for job in self.jobs:
self.run_trades(job['exchange'], job['symbol'])
if tm.time() > dump:
dump = tm.time() + to_sql
self.to_sql(log)
def get_price(self, exchange='', symbol='',
freq='', start=''):
prc = self.s.select('price',exchange=exchange,symbol=symbol,
freq=freq, start=start)
self.prc = self.prc.append(prc)
self.prc = self.prc.drop_duplicates(['timestamp','exchange',
'symbol','freq'])
return prc
def run_price(self, exchange, symbol, freq, label='left',
from_sql='no', start=''):
if from_sql == 'yes':
self.get_trades(exchange, symbol, start=start)
# get_trades already applied exchange, symbol checks
trd = self.trd
else:
trd = self.trd
if exchange <> '':
trd = self.trd[self.trd.exchange==exchange]
if symbol <> '':
trd = self.trd[self.trd.symbol==symbol]
trd = tools.date_index(trd)
if len(trd.index) > 0:
prc = conv.olhcv(trd, freq, label=label)
self.prc = self.prc.append(prc)
self.prc = self.prc.drop_duplicates(['timestamp','exchange',
'symbol','freq'])
def to_sql(self, log='no'):
if 'sent' in self.trd:
trd = self.trd[self.trd['sent']<>'yes']
else:
trd = self.trd
if 'sent' in self.prc:
prc = self.prc[self.prc['sent']<>'yes']
else:
prc = self.prc
self.s.insert('trades', trd)
self.s.insert('price', prc)
if log == 'yes':
print trd
print prc
self.trd['sent'] = 'yes'
self.prc['sent'] = 'yes'
| rosspalmer/bitQuant | bitquant/data/clss.py | Python | mit | 3,127 | 0.011193 |
#! /usr/bin/env python
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import sys
import taskit
long_description = '''TaskIt -- A light-weight task management library.
TaskIt is a light-weight library to turn a function into a full-featured,
threadable task. It is completely X-Python Compatible and has no external
dependencies. The simple version is completely self-contained, whereas the
distributed version has a simple, obvious way to connect with the backends.'''
def main():
setup(script_args=sys.argv[1:] if len(sys.argv) > 1 else ['install'],
name='taskit',
version=taskit.__version__,
description='TaskIt -- A light-weight task management library.',
long_description=long_description,
author='Daniel Foerster/pydsigner',
author_email='pydsigner@gmail.com',
packages=['taskit'],
package_data={'taskit': ['doc/*.md']},
license='LGPLv3',
url='http://github.com/pydsigner/taskit',
classifiers=['Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Programming Language :: Python',])
if __name__ == '__main__':
main()
| pydsigner/taskit | setup.py | Python | lgpl-3.0 | 1,455 | 0.003436 |
"""
spring2.py
The rk4_two() routine in this program does a two step integration using
an array method. The current x and xprime values are kept in a global
list named 'val'.
val[0] = current position; val[1] = current velocity
The results are compared with analytically calculated values.
"""
from pylab import *
def accn(t, val):
force = -spring_const * val[0] - damping * val[1]
return force/mass
def vel(t, val):
return val[1]
def rk4_two(t, h): # Time and Step value
global xxp # x and xprime values in a 'xxp'
k1 = [0,0] # initialize 5 empty lists.
k2 = [0,0]
k3 = [0,0]
k4 = [0,0]
tmp= [0,0]
k1[0] = vel(t,xxp)
k1[1] = accn(t,xxp)
for i in range(2): # value of functions at t + h/2
tmp[i] = xxp[i] + k1[i] * h/2
k2[0] = vel(t + h/2, tmp)
k2[1] = accn(t + h/2, tmp)
for i in range(2): # value of functions at t + h/2
tmp[i] = xxp[i] + k2[i] * h/2
k3[0] = vel(t + h/2, tmp)
k3[1] = accn(t + h/2, tmp)
for i in range(2): # value of functions at t + h
tmp[i] = xxp[i] + k3[i] * h
k4[0] = vel(t+h, tmp)
k4[1] = accn(t+h, tmp)
for i in range(2): # value of functions at t + h
xxp[i] = xxp[i] + ( k1[i] + \
2.0*k2[i] + 2.0*k3[i] + k4[i]) * h/ 6.0
t = 0.0 # Stating time
h = 0.01 # Runge-Kutta step size, time increment
xxp = [2.0, 0.0] # initial position & velocity
spring_const = 100.0 # spring constant
mass = 2.0 # mass of the oscillating object
damping = 0.0
tm = [0.0] # Lists to store time, position & velocity
x = [xxp[0]]
xp = [xxp[1]]
xth = [xxp[0]]
while t < 5:
rk4_two(t,h) # Do one step RK integration
t = t + h
tm.append(t)
xp.append(xxp[1])
x.append(xxp[0])
th = 2.0 * cos(sqrt(spring_const/mass)* (t))
xth.append(th)
plot(tm,x)
plot(tm,xth,'+')
show()
| wavicles/pycode-browser | Code/Physics/spring2.py | Python | gpl-3.0 | 1,784 | 0.040919 |
#!/usr/bin/env python
# reflect input bytes to output, printing as it goes
import serial, sys, optparse, time
parser = optparse.OptionParser("davis_log")
parser.add_option("--baudrate", type='int', default=57600, help='baud rate')
opts, args = parser.parse_args()
if len(args) != 2:
print("usage: reflector.py <DEVICE> <logfile>")
sys.exit(1)
device = args[0]
logfile = args[1]
port = serial.Serial(device, opts.baudrate, timeout=5, dsrdtr=False, rtscts=False, xonxoff=False)
log = open(logfile, mode="a")
while True:
line = port.readline()
line = line.rstrip()
out = "%s %.2f\n" % (line, time.time())
log.write(out);
log.flush()
sys.stdout.write(out)
sys.stdout.flush()
| tridge/DavisSi1000 | Firmware/tools/davis_log.py | Python | bsd-2-clause | 715 | 0.004196 |
# -*- coding: utf-8 -*-
'''
File encryption for stash
Uses AES in CBC mode.
usage: crypt.py [-h] [-k KEY] [-d] infile [outfile]
positional arguments:
infile File to encrypt/decrypt.
outfile Output file.
optional arguments:
-h, --help show this help message and exit
-k KEY, --key KEY Encrypt/Decrypt Key.
-d, --decrypt Flag to decrypt.
'''
from __future__ import print_function
import argparse
import base64
import os
_stash = globals()['_stash']
try:
import pyaes
except ImportError:
print('Installing Required packages...')
_stash('pip install pyaes')
import pyaes
class Crypt(object):
def __init__(self, in_filename, out_filename=None):
self.in_filename = in_filename
self.out_filename = out_filename
def aes_encrypt(self, key=None, chunksize=64 * 1024):
self.out_filename = self.out_filename or self.in_filename + '.enc'
if key is None:
key = base64.b64encode(os.urandom(32))[:32]
aes = pyaes.AESModeOfOperationCTR(key)
with open(self.in_filename, 'rb') as infile:
with open(self.out_filename, 'wb') as outfile:
pyaes.encrypt_stream(aes, infile, outfile)
return key
def aes_decrypt(self, key, chunksize=64 * 1024):
self.out_filename = self.out_filename or os.path.splitext(self.in_filename)[0]
aes = pyaes.AESModeOfOperationCTR(key)
with open(self.in_filename, 'rb') as infile:
with open(self.out_filename, 'wb') as outfile:
pyaes.decrypt_stream(aes, infile, outfile)
if __name__ == '__main__':
ap = argparse.ArgumentParser()
ap.add_argument(
'-k',
'--key',
action='store',
default=None,
help='Encrypt/Decrypt Key.',
)
ap.add_argument(
'-d',
'--decrypt',
action='store_true',
default=False,
help='Flag to decrypt.',
)
#ap.add_argument('-t','--type',action='store',choices={'aes','rsa'},default='aes')
ap.add_argument('infile', action='store', help='File to encrypt/decrypt.')
ap.add_argument('outfile', action='store', nargs='?', help='Output file.')
args = ap.parse_args()
crypt = Crypt(args.infile, args.outfile)
if args.decrypt:
crypt.aes_decrypt(args.key.encode())
else:
nk = crypt.aes_encrypt(args.key)
if args.key is None:
print("Key: %s" % nk.decode())
| ywangd/stash | bin/crypt.py | Python | mit | 2,462 | 0.001219 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import sys
from winsys._compat import unittest
import uuid
import winerror
import win32api
import win32con
import win32evtlog
import win32security
import pywintypes
from winsys.tests import utils as testutils
from winsys import event_logs, registry, utils
LOG_NAME = event_logs.DEFAULT_LOG_NAME
GUID = "_winsys-%s" % uuid.uuid1()
#
# Utility functions
#
def yield_logs(computer=None, log_name=LOG_NAME):
hLog = win32evtlog.OpenEventLog(computer, log_name)
try:
while True:
entries = win32evtlog.ReadEventLog(
hLog,
win32evtlog.EVENTLOG_BACKWARDS_READ | win32evtlog.EVENTLOG_SEQUENTIAL_READ,
0
)
if entries:
for entry in entries:
yield entry
else:
break
finally:
win32evtlog.CloseEventLog(hLog)
#
# TESTS
#
@unittest.skipUnless(testutils.i_am_admin(), "These tests must be run as Administrator")
class TestEventLogs(unittest.TestCase):
#
# Fixtures
#
def setUp(self):
event_logs.EventSource.create(GUID, LOG_NAME)
self.registry_root = registry.registry(r"HKLM\SYSTEM\CurrentControlSet\Services\Eventlog")
def tearDown(self):
event_logs.event_source(r"%s\%s" %(LOG_NAME, GUID)).delete()
#
# Event Source
#
def test_create_source(self):
log_name = "System"
guid = "_winsys-test_create_source-%s" % uuid.uuid1()
try:
source = event_logs.EventSource.create(guid, log_name)
self.assertTrue(self.registry_root + log_name + guid)
except:
raise
else:
source.delete()
self.assertFalse(bool(self.registry_root + log_name + guid))
def test_create_source_at_default(self):
guid = "_winsys-test_create_source_at_default-%s" % uuid.uuid1()
try:
source = event_logs.EventSource.create(guid)
self.assertTrue(self.registry_root + event_logs.DEFAULT_LOG_NAME + guid)
except:
raise
else:
source.delete()
self.assertFalse(bool(self.registry_root + event_logs.DEFAULT_LOG_NAME + guid))
def test_event_sources(self):
log_name = "System"
self.assertEqual(
set(s.name for s in event_logs.event_sources(log_name)),
set(r.name for r in self.registry_root + log_name)
)
self.assertTrue(all(isinstance(s, event_logs.EventSource) for s in event_logs.event_sources(log_name)))
def test_event_source_from_event_source(self):
for s in event_logs.event_sources():
self.assertTrue(isinstance(s, event_logs.EventSource))
self.assertTrue(event_logs.event_source(s) is s)
break
def test_event_source_from_none(self):
self.assertTrue(event_logs.event_source(None) is None)
def test_event_source_from_bad_string(self):
with self.assertRaises(event_logs.x_event_logs):
event_logs.event_source("")
def test_event_source_from_good_string(self):
self.assertTrue(
isinstance(
event_logs.event_source(r"%s\%s" %(LOG_NAME, GUID)),
event_logs.EventSource
)
)
def test_event_source_from_good_string_default_log(self):
self.assertTrue(
isinstance(
event_logs.event_source(GUID),
event_logs.EventSource
)
)
def test_event_source_as_string(self):
self.assertTrue(event_logs.event_source(GUID).as_string())
def test_event_source_log_event(self):
data = str(GUID).encode("utf8")
event_logs.event_source(GUID).log_event(data=data)
for event in yield_logs():
if event.SourceName == GUID and event.Data == data:
self.assertTrue(True)
break
else:
self.assertTrue(False)
#
# Event logs
#
def test_event_logs(self):
self.assertEqual(
set(s.name for s in event_logs.event_logs()),
set(r.name for r in self.registry_root.keys())
)
self.assertTrue(all(isinstance(s, event_logs.EventLog) for s in event_logs.event_logs()))
def test_event_log_from_event_log(self):
for l in event_logs.event_logs():
self.assertTrue(isinstance(l, event_logs.EventLog))
self.assertTrue(event_logs.event_log(l) is l)
break
def test_event_log_from_none(self):
self.assertTrue(event_logs.event_log(None) is None)
def test_event_log_from_bad_string(self):
with self.assertRaises(event_logs.x_event_logs):
event_logs.event_log ("")
def test_event_log_from_good_string(self):
self.assertTrue(
isinstance(
event_logs.event_log(LOG_NAME),
event_logs.EventLog
)
)
def test_event_log_clear_no_save(self):
log_name = "Internet Explorer"
source_name = "_winsys-%s" % uuid.uuid1()
source = event_logs.EventSource.create(source_name, log_name)
log = event_logs.event_log(log_name)
hLog = win32evtlog.OpenEventLog(None, log_name)
try:
log.log_event(source, message="hello")
self.assertNotEquals(win32evtlog.GetNumberOfEventLogRecords(hLog), 0)
log.clear()
self.assertEqual(win32evtlog.GetNumberOfEventLogRecords(hLog), 0)
finally:
win32evtlog.CloseEventLog(hLog)
source.delete()
def test_event_log_clear_with_save(self):
log_name = "Internet Explorer"
source_name = "_winsys-%s" % uuid.uuid1()
source = event_logs.EventSource.create(source_name, log_name)
log = event_logs.event_log(log_name)
hLog = win32evtlog.OpenEventLog(None, log_name)
try:
log.log_event(source, message="hello")
self.assertNotEquals(win32evtlog.GetNumberOfEventLogRecords(hLog), 0)
log.clear()
self.assertEqual(win32evtlog.GetNumberOfEventLogRecords(hLog), 0)
finally:
win32evtlog.CloseEventLog(hLog)
source.delete()
#
# Module-level functions
#
def test_log_event(self):
data = str(GUID).encode("utf8")
event_logs.log_event("%s\\%s" %(LOG_NAME, GUID), data=data)
for event in yield_logs():
if event.SourceName == GUID and event.Data == data:
self.assertTrue(True)
break
else:
self.assertTrue(False)
if __name__ == "__main__":
unittest.main()
if sys.stdout.isatty(): raw_input("Press enter...")
| operepo/ope | laptop_credential/winsys/tests/test_event_logs.py | Python | mit | 6,794 | 0.002944 |
#!/usr/bin/env python
# -*- coding:utf-8 -*-
# from __future__ import unicode_literals
import codecs
import sys
reload(sys)
sys.setdefaultencoding('utf8')
# ranges of ordinals of unicode ideographic characters
ranges = [
{"from": ord(u"\u3300"), "to": ord(u"\u33ff")}, # compatibility ideographs
{"from": ord(u"\ufe30"), "to": ord(u"\ufe4f")}, # compatibility ideographs
{"from": ord(u"\uf900"), "to": ord(u"\ufaff")}, # compatibility ideographs
# {"from": ord(u"\U0002f800"), "to": ord(u"\U0002fa1f")}, # compatibility ideographs
{"from": ord(u"\u30a0"), "to": ord(u"\u30ff")}, # Japanese Kana
{"from": ord(u"\u2e80"), "to": ord(u"\u2eff")}, # cjk radicals supplement
{"from": ord(u"\u4e00"), "to": ord(u"\u9fff")},
{"from": ord(u"\u3400"), "to": ord(u"\u4dbf")},
# {"from": ord(u"\U00020000"), "to": ord(u"\U0002a6df")},
# {"from": ord(u"\U0002a700"), "to": ord(u"\U0002b73f")},
# {"from": ord(u"\U0002b740"), "to": ord(u"\U0002b81f")},
# {"from": ord(u"\U0002b820"), "to": ord(u"\U0002ceaf")} # included as of Unicode 8.0
]
def is_cjk(char):
return any([range["from"] <= ord(char) <= range["to"] for range in ranges])
def load_csv_to_dict(filepath, delimiter=','):
u"""Load a .csv file to dictionary
Args:
- filepath: path to the csv file
- delimiter: delimiter in the csv file
Input: csv file
key,val
...
Return:
dictionary
"""
table = {}
with codecs.open(filepath,
'r', encoding='utf-8') as f:
for line in f:
key, val = line.strip().split(delimiter)
table[key] = val
return table
| jntkym/rappers | utils.py | Python | mit | 1,664 | 0.001202 |
# -*- coding:utf-8 -*-
import os
import random
import sys
import importlib
importlib.reload(sys)
UNK_ID = 3
train_encode_file = 'data/middle_data/train.enc'
train_decode_file = 'data/middle_data/train.dec'
test_encode_file = 'data/middle_data/test.enc'
test_decode_file = 'data/middle_data/test.dec'
train_encode_vocabulary_file = 'data/voca_data/train_encode_vocabulary'
train_decode_vocabulary_file = 'data/voca_data/train_decode_vocabulary'
print("对话转向量...")
# 把对话字符串转为向量形式
def convert_to_vector(input_file, vocabulary_file, output_file):
tmp_vocab = []
with open(vocabulary_file, "r", encoding="utf8") as f:
tmp_vocab.extend(f.readlines())
tmp_vocab = [line.strip() for line in tmp_vocab]
vocab = dict([(x, y) for (y, x) in enumerate(tmp_vocab)])
# {'硕': 3142, 'v': 577, 'I': 4789, '\ue796': 4515, '拖': 1333, '疤': 2201 ...}
output_f = open(output_file, 'w', encoding="utf8")
with open(input_file, 'r', encoding="utf8") as f:
for line in f:
line_vec = []
for words in line.strip():
line_vec.append(vocab.get(words, UNK_ID))
output_f.write(" ".join([str(num) for num in line_vec]) + "\n")
output_f.close()
convert_to_vector(train_encode_file, train_encode_vocabulary_file, 'data/vector_data/train_encode.vec')
convert_to_vector(train_decode_file, train_decode_vocabulary_file, 'data/vector_data/train_decode.vec')
convert_to_vector(test_encode_file, train_encode_vocabulary_file, 'data/vector_data/test_encode.vec')
convert_to_vector(test_decode_file, train_decode_vocabulary_file, 'data/vector_data/test_decode.vec')
| xinruobingqing/robotChat | vector.py | Python | apache-2.0 | 1,599 | 0.012203 |
from __future__ import unicode_literals
import pytest
@pytest.fixture
def doc(EN):
return EN('This is a sentence. This is another sentence. And a third.')
@pytest.mark.models
def test_sent_spans(doc):
sents = list(doc.sents)
assert sents[0].start == 0
assert sents[0].end == 5
assert len(sents) == 3
assert sum(len(sent) for sent in sents) == len(doc)
@pytest.mark.models
def test_root(doc):
np = doc[2:4]
assert len(np) == 2
assert np.orth_ == 'a sentence'
assert np.root.orth_ == 'sentence'
assert np.root.head.orth_ == 'is'
| lukw00/spaCy | tests/spans/test_span.py | Python | mit | 579 | 0.001727 |
#!/usr/bin/env python
# encoding: utf-8
import sys
import subprocess
from flask_script import Manager
from flask_script.commands import ShowUrls
from flask_migrate import MigrateCommand
from application import create_app
from application.extensions import db
from utils.commands import GEventServer, ProfileServer
manager = Manager(create_app)
manager.add_option('-c', '--config', dest='mode', required=False)
manager.add_command("showurls", ShowUrls())
manager.add_command("gevent", GEventServer())
manager.add_command("profile", ProfileServer())
manager.add_command('db', MigrateCommand)
@manager.option('-c', '--config', help='enviroment config')
def simple_run(config):
app = create_app(config)
app.run(host="0.0.0.0", port=9192, debug=True)
@manager.command
def lint():
"""Runs code linter."""
lint = subprocess.call(['flake8', '--ignore=E402,F403,E501', 'application/',
'manage.py', 'tests/']) == 0
if lint:
print('OK')
sys.exit(lint)
@manager.command
def test():
"""Runs unit tests."""
tests = subprocess.call(['python', '-c', 'import tests; tests.run()'])
sys.exit(tests)
@manager.command
def create_db():
"""create tables"""
db.create_all()
if __name__ == "__main__":
manager.run()
| luke0922/celery_learning | manage.py | Python | apache-2.0 | 1,290 | 0.000775 |
#
# Copyright (c) 2004 Conectiva, Inc.
#
# Written by Gustavo Niemeyer <niemeyer@conectiva.com>
#
# This file is part of Smart Package Manager.
#
# Smart Package Manager is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as published
# by the Free Software Foundation; either version 2 of the License, or (at
# your option) any later version.
#
# Smart Package Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Smart Package Manager; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
from smart.const import ENFORCE, OPTIONAL, INSTALL, REMOVE, RECURSIONLIMIT
from smart.cache import PreRequires
from smart import *
import os, sys
MAXSORTERDEPTH = RECURSIONLIMIT-50
class LoopError(Error): pass
class ElementGroup(object):
def __init__(self):
self._relations = {} # (pred, succ) -> True
def getRelations(self):
return self._relations.keys()
def addPredecessor(self, succ, pred):
self._relations[(pred, succ)] = True
def addSuccessor(self, pred, succ):
self._relations[(pred, succ)] = True
class ElementOrGroup(ElementGroup): pass
class ElementAndGroup(ElementGroup): pass
class ElementSorter(object):
def __init__(self):
self._successors = {} # pred -> {(succ, kind): True}
self._predcount = {} # succ -> n
self._groups = {} # (pred, succ, kind) -> [group, ...]
self._disabled = {} # (pred, succ, kind) -> True
def reset(self):
self._successors.clear()
self._groups.clear()
def _getLoop(self, start, end=None):
if end is None:
end = start
successors = self._successors
path = [start]
done = {}
loop = {}
while path:
head = path[-1]
dct = successors.get(head)
if dct:
for succ, kind in dct:
if (head, succ, kind) not in self._disabled:
if succ in loop or succ == end:
loop.update(dict.fromkeys(path, True))
loop[end] = True # If end != start
elif succ not in done:
done[succ] = True
path.append(succ)
break
else:
path.pop()
else:
path.pop()
return loop
def _checkLoop(self, start, end=None):
if end is None:
end = start
successors = self._successors
queue = [start]
done = {}
while queue:
elem = queue.pop()
dct = successors.get(elem)
if dct:
for succ, kind in dct:
if (elem, succ, kind) not in self._disabled:
if succ == end:
return True
elif succ not in done:
done[succ] = True
queue.append(succ)
return False
def getLoops(self):
successors = self._successors
predcount = self._predcount
loops = {}
for elem in successors:
if predcount.get(elem) and elem not in loops:
loop = self._getLoop(elem)
if loop:
loops.update(loop)
return loops
def getLoopPaths(self, loops):
if not loops:
return []
successors = self._successors
paths = []
done = {}
for elem in loops:
if elem not in done:
path = [elem]
while path:
head = path[-1]
dct = successors.get(head)
if dct:
for succ, kind in dct:
if (succ in loops and
(head, succ, kind) not in self._disabled):
done[succ] = True
if succ == elem:
paths.append(path+[elem])
else:
headsucc = (head, succ)
if headsucc not in done:
done[headsucc] = True
path.append(succ)
break
else:
path.pop()
else:
path.pop()
return paths
def _breakLoops(self, elem, loops, rellock, reclock, depth=0):
if depth > MAXSORTERDEPTH:
return False
result = True
dct = self._successors.get(elem)
if dct:
for succ, kind in dct.keys():
# Should we care about this relation?
if succ not in loops:
continue
tup = (elem, succ, kind)
if tup in self._disabled:
continue
# Check if the loop for this specific relation is still alive.
if not self._checkLoop(succ, elem):
continue
# Some upper frame is already checking this. Protect
# from infinite recursion.
if tup in reclock:
result = False
break
# If this relation is locked, our only chance is breaking
# it forward.
if tup in rellock:
reclock[tup] = True
loop = self._getLoop(succ, elem)
broke = self._breakLoops(succ, loop, rellock,
reclock, depth+1)
del reclock[tup]
if not broke:
result = False
continue
# If this relation is optional, break it now.
if kind is OPTIONAL:
self._breakRelation(*tup)
continue
# We have an enforced relation. Let's check if we
# have OR groups that could satisfy it.
groups = self._groups.get(tup)
if groups:
# Any enforced AND groups tell us we can't
# break this relation.
for group in groups:
if type(group) is ElementAndGroup:
groups = None
break
if groups:
# Check if we can remove the relation from all groups.
reenable = {}
for group in groups:
reenable[group] = []
active = 0
for gpred, gsucc in group._relations:
gtup = (gpred, gsucc, kind)
if gtup in self._disabled:
if gtup not in rellock:
reenable[group].append(gtup)
else:
active += 1
if active > 1: break
if active > 1:
del reenable[group]
elif not reenable[group]:
break
else:
# These relations must not be reenabled in
# the loop breaking steps below.
relations = self._breakRelation(*tup)
for rtup in relations:
rellock[rtup] = True
# Reenable the necessary relations, if possible.
# Every group must have at least one active relation
# so that we can disable our own relation.
for group in reenable:
succeeded = False
# Check if some iteration of _breakLoop() below
# already reenabled one relation with success.
for gtup in reenable[group]:
if gtup not in self._disabled:
succeeded = True
break
if succeeded:
continue
# Nope. Let's try to do that here.
for gtup in reenable[group]:
erelations = self._enableRelation(*gtup)
for etup in erelations:
rellock[etup] = True
for epred, esucc, ekind in erelations:
eloop = self._getLoop(esucc, epred)
if (eloop and not
self._breakLoops(esucc, eloop, rellock,
reclock, depth+1)):
break
else:
succeeded = True
for etup in erelations:
del rellock[etup]
if succeeded:
break
self._breakRelation(*gtup)
if not succeeded:
break
else:
# Done!
for rtup in relations:
del rellock[rtup]
continue
# Some OR group failed to exchange the relation,
# so we can't break our own relation.
for rtup in self._enableRelation(*tup):
del rellock[rtup]
# Our last chance is breaking it forward.
reclock[tup] = True
loop = self._getLoop(succ, elem)
broke = self._breakLoops(succ, loop, rellock, reclock, depth+1)
del reclock[tup]
if not broke:
result = False
return result
def _breakRelation(self, pred, succ, kind):
tup = (pred, succ, kind)
self._disabled[tup] = True
relations = {tup: True}
groups = self._groups.get(tup)
if groups:
for group in groups:
if type(group) is ElementAndGroup:
for gpred, gsucc in group._relations:
gtup = (gpred, gsucc, kind)
self._disabled[gtup] = True
relations[gtup] = True
return relations
def _enableRelation(self, pred, succ, kind):
tup = (pred, succ, kind)
del self._disabled[tup]
relations = {tup: True}
groups = self._groups.get(tup)
if groups:
for group in groups:
if type(group) is ElementAndGroup:
for gpred, gsucc in group._relations:
if gpred != pred or gsucc != succ:
gtup = (gpred, gsucc, kind)
del self._disabled[gtup]
relations[gtup] = True
return relations
def getSuccessors(self, elem):
succs = {}
for succ, kind in self._successors[elem]:
if (pred, succ, kind) not in self._disabled:
succs[succ] = True
return succs
def getPredecessors(self, elem):
preds = {}
for pred in self._successors:
for succ, kind in self._successors[pred]:
if succ == elem and (pred, succ, kind) not in self._disabled:
preds[pred] = True
return preds
def getAllSuccessors(self, elem):
succs = {}
queue = [elem]
while queue:
elem = queue.pop()
for succ, kind in self._successors[elem]:
if (succ not in all and
(elem, succ, kind) not in self._disabled):
succs[succ] = True
queue.append(succ)
return succs
def getAllPredecessors(self, elem):
preds = {}
queue = [elem]
while queue:
elem = queue.pop()
for pred in self._successors:
for succ, kind in self._successors[pred]:
if (succ == elem and
(pred, succ, kind) not in self._disabled):
preds[elem] = True
queue.append(elem)
return preds
def breakLoops(self):
successors = self._successors
result = True
loops = self.getLoops()
if loops:
for elem in successors:
if elem in loops:
if not self._breakLoops(elem, loops, {}, {}):
result = False
return result
def addElement(self, elem):
if elem not in self._successors:
self._successors[elem] = ()
def addPredecessor(self, succ, pred, kind=ENFORCE):
self.addSuccessor(pred, succ, kind)
def addSuccessor(self, pred, succ, kind=ENFORCE):
successors = self._successors
predcount = self._predcount
if succ not in successors:
successors[succ] = ()
dct = successors.get(pred)
if not dct:
successors[pred] = {(succ, kind): True}
if succ not in predcount:
predcount[succ] = 1
else:
predcount[succ] += 1
elif (succ, kind) not in dct:
dct[(succ, kind)] = True
if succ not in predcount:
predcount[succ] = 1
else:
predcount[succ] += 1
groups = self._groups.get((pred, succ, kind))
if groups:
group = ElementAndGroup()
group.addPredecessor(succ, pred)
groups.append(group)
def addGroup(self, group, kind=ENFORCE):
if not group._relations:
return
if len(group._relations) == 1:
pred, succ = iter(group._relations).next()
self.addSuccessor(pred, succ, kind)
return
successors = self._successors
predcount = self._predcount
for pred, succ in group._relations:
groups = self._groups.get((pred, succ, kind))
if not groups:
groups = self._groups[(pred, succ, kind)] = []
dct = successors.get(pred)
if dct and (succ, kind) in dct:
group = ElementAndGroup()
group.addSuccessor(pred, succ)
groups.append(group)
groups.append(group)
if succ not in successors:
successors[succ] = ()
dct = successors.get(pred)
if not dct:
successors[pred] = {(succ, kind): True}
if succ not in predcount:
predcount[succ] = 1
else:
predcount[succ] += 1
elif (succ, kind) not in dct:
dct[(succ, kind)] = True
if succ not in predcount:
predcount[succ] = 1
else:
predcount[succ] += 1
def getSorted(self):
successors = self._successors
predcount = self._predcount.copy()
self._profile(1)
brokeall = self.breakLoops()
self._profile(2)
if not brokeall:
raise LoopError, _("Unbreakable loops found while sorting")
for pred, succ, kind in self._disabled:
predcount[succ] -= 1
result = [x for x in successors if not predcount.get(x)]
for elem in result:
dct = successors.get(elem)
if dct:
for succ, kind in dct:
if (elem, succ, kind) in self._disabled:
continue
left = predcount.get(succ)
if left is None:
continue
if left-1 == 0:
del predcount[succ]
result.append(succ)
else:
predcount[succ] -= 1
self._profile(3)
if len(result) != len(successors):
raise Error, _("Internal error: there are still loops (%d != %d)!")\
% (len(result), len(successors))
return result
def _profile(self, id):
if sysconf.get("sorter-profile"):
import time
if id == 1:
successors = self._successors
enforce = 0
optional = 0
ngroups = 0
for pred in self._successors:
for succ, kind in successors[pred]:
groups = self._groups.get((pred, succ, kind))
if groups:
ngroups += len(groups)
if kind is ENFORCE:
enforce += 1
else:
optional += 1
print "Number of elements:", len(successors)
print "Number of relations:", enforce+optional
print "Number of relation groups:", ngroups
print "Number of enforced relations:", enforce
print "Number of optional relations:", optional
self._profile_start = time.clock()
elif id == 2:
print "Number of disabled relations:", len(self._disabled)
print "Break delay: %.2fs" % (time.clock()-self._profile_start)
self._profile_start = time.clock()
elif id == 3:
print "Sort delay: %.2fs" % (time.clock()-self._profile_start)
class ChangeSetSorter(ElementSorter):
def __init__(self, changeset=None):
ElementSorter.__init__(self)
if changeset:
self.setChangeSet(changeset)
def setChangeSet(self, changeset):
self.reset()
for pkg in changeset:
op = changeset[pkg]
elem = (pkg, op)
self.addElement(elem)
# Packages being installed or removed must go in
# before their dependencies are removed, or after
# their dependencies are reinstalled.
for req in pkg.requires:
group = ElementOrGroup()
for prv in req.providedby:
for prvpkg in prv.packages:
if prvpkg is pkg:
continue
if changeset.get(prvpkg) is INSTALL:
group.addSuccessor((prvpkg, INSTALL), elem)
elif prvpkg.installed:
if changeset.get(prvpkg) is not REMOVE:
break
group.addSuccessor(elem, (prvpkg, REMOVE))
else:
continue
break
else:
relations = group.getRelations()
if relations:
# Should Requires of PreRequires become PreRequires
# as well?
if isinstance(req, PreRequires):
kind = ENFORCE
else:
kind = OPTIONAL
self.addGroup(group, kind)
if op is INSTALL:
# Upgraded packages being removed must go in
# before this package's installation. Notice that
# depending on the package manager, these remove
# entries will probably be ripped out and dealt
# by the package manager itself during upgrades.
upgpkgs = [upgpkg for prv in pkg.provides
for upg in prv.upgradedby
for upgpkg in upg.packages]
upgpkgs.extend([prvpkg for upg in pkg.upgrades
for prv in upg.providedby
for prvpkg in prv.packages])
for upgpkg in upgpkgs:
if upgpkg is pkg:
continue
if changeset.get(upgpkg) is REMOVE:
self.addSuccessor((upgpkg, REMOVE), elem, ENFORCE)
# Conflicted packages being removed must go in
# before this package's installation.
cnfpkgs = [prvpkg for cnf in pkg.conflicts
for prv in cnf.providedby
for prvpkg in prv.packages
if prvpkg is not pkg]
cnfpkgs.extend([cnfpkg for prv in pkg.provides
for cnf in prv.conflictedby
for cnfpkg in cnf.packages
if cnfpkg is not pkg])
for cnfpkg in cnfpkgs:
if cnfpkg is pkg:
continue
if changeset.get(cnfpkg) is REMOVE:
self.addSuccessor((cnfpkg, REMOVE), elem, ENFORCE)
assert len(self._successors) == len(changeset)
| dmacvicar/spacewalk | client/solaris/smartpm/smart/sorter.py | Python | gpl-2.0 | 22,108 | 0.001357 |
#! /usr/bin/env python
"""
# control_get_firmware.py: get firmware version of Gemalto readers
# Copyright (C) 2009-2012 Ludovic Rousseau
"""
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, see <http://www.gnu.org/licenses/>.
from smartcard.System import readers
from smartcard.pcsc.PCSCPart10 import (SCARD_SHARE_DIRECT,
SCARD_LEAVE_CARD, SCARD_CTL_CODE, getTlvProperties)
for reader in readers():
cardConnection = reader.createConnection()
cardConnection.connect(mode=SCARD_SHARE_DIRECT,
disposition=SCARD_LEAVE_CARD)
print "Reader:", reader
# properties returned by IOCTL_FEATURE_GET_TLV_PROPERTIES
properties = getTlvProperties(cardConnection)
# Gemalto devices supports a control code to get firmware
key = 'PCSCv2_PART10_PROPERTY_wIdVendor'
if key in properties:
if properties[key] == 0x08E6:
get_firmware = [0x02]
IOCTL_SMARTCARD_VENDOR_IFD_EXCHANGE = SCARD_CTL_CODE(1)
res = cardConnection.control(IOCTL_SMARTCARD_VENDOR_IFD_EXCHANGE,
get_firmware)
print " Firmware:", "".join([chr(x) for x in res])
else:
print " Not a Gemalto reader"
key = 'PCSCv2_PART10_PROPERTY_sFirmwareID'
if key in properties:
firmware = properties[key]
print " Firmware:", firmware
else:
print " %s not supported" % key
else:
print " %s not supported" % key
| sixtyfive/pcsc-ctapi-wrapper | PCSC/UnitaryTests/control_get_firmware.py | Python | lgpl-2.1 | 2,053 | 0.001461 |
from corpsearchsystem import CorpSearchSystem
from modules.editdistance.normalized import \
NormalizedEditDistanceQueryToHandle,\
NormalizedEditDistanceQueryToDisplayName
from modules.editdistance.lengths import \
LengthOfQuery, LengthOfHandle, LengthOfDisplayName
from modules.editdistance.stopwords import \
NormalizedEditDistanceStopwordsQueryToHandle,\
NormalizedEditDistanceStopwordsQueryToDisplayName
from modules.description.counts import OccurrencesOfQueryInDescCaseInsensitive
from modules.description.cosinesimilarity import \
CosineSimilarityDescriptionAndQuery, \
CosineSimilarityDescriptionAndDDG
from modules.languagemodels.bigram import \
DescriptionLanguageModel, \
PostContentLanguageModel
Baseline = CorpSearchSystem('Baseline', [
NormalizedEditDistanceStopwordsQueryToHandle,
NormalizedEditDistanceStopwordsQueryToDisplayName
])
# Baseline plus x.
PlusLengths = CorpSearchSystem('+ Lengths', [
NormalizedEditDistanceStopwordsQueryToHandle,
NormalizedEditDistanceStopwordsQueryToDisplayName,
LengthOfQuery,
LengthOfHandle,
LengthOfDisplayName
])
PlusQueryOccurrences = CorpSearchSystem('+ Query Occurrences', [
NormalizedEditDistanceStopwordsQueryToHandle,
NormalizedEditDistanceStopwordsQueryToDisplayName,
OccurrencesOfQueryInDescCaseInsensitive
])
PlusDescriptionCosineSimilarity = CorpSearchSystem(
'+ Description-Query Cosine Similarity', [
NormalizedEditDistanceStopwordsQueryToHandle,
NormalizedEditDistanceStopwordsQueryToDisplayName,
CosineSimilarityDescriptionAndQuery
])
PlusDescriptionDDGCosineSimilarity = CorpSearchSystem(
'+ Description-DDG Cosine Similarity', [
NormalizedEditDistanceStopwordsQueryToHandle,
NormalizedEditDistanceStopwordsQueryToDisplayName,
CosineSimilarityDescriptionAndDDG
])
PlusDescriptionLanguageModels = CorpSearchSystem(
'+ Description Language Models', [
NormalizedEditDistanceStopwordsQueryToHandle,
NormalizedEditDistanceStopwordsQueryToDisplayName,
DescriptionLanguageModel
])
PlusPostContentLanguageModels = CorpSearchSystem(
'+ Post Content Language Models', [
NormalizedEditDistanceStopwordsQueryToHandle,
NormalizedEditDistanceStopwordsQueryToDisplayName,
PostContentLanguageModel
])
# End Baseline plus x.
Final = CorpSearchSystem('Production System', [
NormalizedEditDistanceStopwordsQueryToHandle,
NormalizedEditDistanceStopwordsQueryToDisplayName,
LengthOfQuery,
LengthOfHandle,
LengthOfDisplayName,
OccurrencesOfQueryInDescCaseInsensitive,
CosineSimilarityDescriptionAndQuery,
CosineSimilarityDescriptionAndDDG,
DescriptionLanguageModel,
PostContentLanguageModel
])
| WING-NUS/corpSearch | system/systems.py | Python | lgpl-3.0 | 2,889 | 0 |
#!/usr/bin/env python
from __future__ import division
from math import *
import numpy as np
from numpy.fft import fft, fftshift, fftfreq
def fermi(x, smearing):
"""Return Fermi function"""
return 1./(1. + np.exp(x/smearing))
def step(x, delta, smearing):
"""Return smoothed step-function Fermi(x-Delta)-Fermi(x)"""
return fermi(x-delta,smearing) - fermi(x,smearing)
def build_wire(x, heights, delta, smearing):
"""Return sum_(n=-N,N) alpha_n step(x-n*Delta)"""
N_module = heights.size
wire = np.zeros(x.size)
for n in np.arange(N_module):
wire = ( wire + heights[n]*
step(x - (n-N_module/2.)*delta, delta, smearing) )
return wire
def powerspectrum(data, dx):
"""Return power-spectrum of input signal"""
powerspec = np.abs(fftshift(fft(data))*dx)**2
freq = 2.*pi*fftfreq(data.size, dx)
return freq, powerspec
def AGS(k, heights, delta, smearing):
"""Return roughness-height power spectrum W(k)"""
N_module = np.arange(heights.size)
# Take correlation of alphas into account:
omega = ([ np.exp(-1j*n*k*delta)*heights[n] for n in N_module ])
omega = np.sum(omega, axis=0)
omega = np.abs(omega)**2 / heights.size
return (1./delta * (2.*pi*smearing*
np.sinh(k*pi*smearing)**(-1)*np.sin(k*delta/2.))**2) * omega
def SGS(k, heights, delta, smearing):
"""Return roughness-height power spectrum S(k)"""
N_module = np.arange(heights.size)
# a[n-1] and a[n+1] = 0 for n=N and n=0
a = np.concatenate([ [0],heights,[0] ])
# Take correlation of alphas into account:
omega = ([ np.exp(-1j*n*k*delta)*(a[n]*(a[n]-a[n+1])*
np.exp(-1j*k*delta) + a[n]*(a[n]-a[n-1])) for n in N_module ])
omega = np.sum(omega, axis=0)
omega = np.abs(omega)**2 / heights.size
return ( 1./delta / 72. * (k*pi*(1.+k**2*smearing**2)*
np.sinh(k*pi*smearing)**(-1))**2 ) * omega
def transmission(n, d, L, sigma, k, heights, delta, smearing):
"""
Return transmission T based on analytical expressions for W and S
"""
# only symmetric wire geometry considered yet
invLbAGS=(4.*sigma**2 / d**6 *(n*pi)**4 /
k**2 * AGS(2*k,heights,delta,smearing))
invLbSGS=(0.5*(sigma/d*pi*n)**4 / k**2 *
(1./3. + 1./(pi*n)**2)**2 * SGS(2*k,heights,delta,smearing))
return np.exp(-L*(invLbAGS+invLbSGS)), invLbAGS, invLbSGS
| ottodietz/rough-q1d | q1d/q1d_step.py | Python | gpl-3.0 | 2,411 | 0.014932 |
from cred.models import CredChangeQ
from django.conf import settings
from django.utils import timezone
def base_template_reqs(request):
cntx = {
'pageurl': request.path,
'LDAP_ENABLED': settings.LDAP_ENABLED,
'GOAUTH2_ENABLED': settings.GOAUTH2_ENABLED,
'EXPORT_ENABLED': not settings.RATTIC_DISABLE_EXPORT,
'TEMPLATE_DEBUG': settings.TEMPLATE_DEBUG,
'ALLOWPWCHANGE': not (settings.LDAP_ENABLED
and not settings.AUTH_LDAP_ALLOW_PASSWORD_CHANGE),
'rattic_icon': 'rattic/img/rattic_icon_normal.png',
'rattic_logo': 'rattic/img/rattic_logo_normal.svg',
}
if settings.HELP_SYSTEM_FILES:
cntx['helplinks'] = True
else:
cntx['helplinks'] = False
if request.user.is_authenticated():
cntx['changeqcount'] = CredChangeQ.objects.for_user(request.user).count()
return cntx
def logo_selector(request):
cntx = {}
tz = timezone.get_current_timezone()
time = tz.normalize(timezone.now())
if ((time.hour > 20 and time.hour < 24) or
(time.hour >= 0 and time.hour < 6)):
cntx['rattic_icon'] = 'rattic/img/rattic_icon_sleeping.png'
cntx['rattic_logo'] = 'rattic/img/rattic_logo_sleeping.svg'
return cntx
| ipernet/RatticWeb | ratticweb/context_processors.py | Python | gpl-2.0 | 1,262 | 0.001585 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.test import TestCase
from django.contrib.auth import get_user_model
from django.core.urlresolvers import reverse
import json
from model_mommy import mommy
from io import BytesIO
from PIL import Image
from tiny_rest.tests import Client
import status
from blog.models import Post, Comment
User = get_user_model()
class TestPostAPIView(TestCase):
def setUp(self):
self.client = Client()
self.user = User.objects.create_user(
'user', 'user@email.com', '123456'
)
self.list_url = reverse('blog:post_api')
self.posts = mommy.make(Post, user=self.user, _quantity=20)
self.detail_url = reverse('blog:post_api', args=[self.posts[0].pk])
self.file_obj = BytesIO()
image = Image.new('RGBA', size=(50, 50), color=(256, 0, 0))
image.save(self.file_obj, 'PNG')
self.file_obj.name = 'test.jpg'
self.file_obj.seek(0)
self.client.login(username='user', password='123456')
def tearDown(self):
for post in Post.objects.all():
post.image.delete()
post.delete()
def test_list(self):
self.client.logout()
response = self.client.get(self.list_url)
data = json.loads(response.content.decode())
self.assertEqual(len(data['data']), 10)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response = self.client.get(self.list_url, {'page': 'invalid'})
data = json.loads(response.content.decode())
self.assertEqual(len(data['data']), 10)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response = self.client.get(self.list_url, {'page': 100})
data = json.loads(response.content.decode())
self.assertEqual(len(data['data']), 10)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_detail(self):
self.client.logout()
response = self.client.get(self.detail_url)
data = json.loads(response.content.decode())
self.assertEqual(data['id'], self.posts[0].pk)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.posts[0].delete()
response = self.client.get(self.detail_url)
data = json.loads(response.content.decode())
self.assertEqual(data['error'], 'Resource Not Found')
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_create(self):
response = self.client.post(self.list_url, {})
data = json.loads(response.content.decode())
self.assertEqual(data['error']['body'][0], 'This field is required.')
self.assertEqual(data['error']['image'][0], 'This field is required.')
self.assertEqual(data['error']['slug'][0], 'This field is required.')
self.assertEqual(data['error']['title'][0], 'This field is required.')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
response = self.client.post(
self.list_url,
{
'title': 'my post',
'slug': 'my-post',
'body': 'my body',
'image': self.file_obj
}
)
data = json.loads(response.content.decode())
self.assertEqual(data['title'], 'my post')
self.assertEqual(data['slug'], 'my-post')
self.assertEqual(data['body'], 'my body')
self.assertEqual(data['user']['id'], self.user.pk)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_update(self):
response = self.client.put(self.detail_url, {})
data = json.loads(response.content.decode())
self.assertEqual(data['error']['body'][0], 'This field is required.')
self.assertEqual(data['error']['slug'][0], 'This field is required.')
self.assertEqual(data['error']['title'][0], 'This field is required.')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
response = self.client.put(
self.detail_url,
{
'title': 'my post',
'slug': 'my-post',
'body': 'my body',
'image': self.file_obj
},
)
data = json.loads(response.content.decode())
self.assertEqual(data['title'], 'my post')
self.assertEqual(data['slug'], 'my-post')
self.assertEqual(data['body'], 'my body')
self.assertEqual(data['user']['id'], self.user.pk)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_partial_update(self):
post = self.posts[0]
response = self.client.patch(
self.detail_url,
{
'title': 'my post',
},
)
data = json.loads(response.content.decode())
self.assertEqual(data['title'], 'my post')
self.assertEqual(data['slug'], post.slug)
self.assertEqual(data['body'], post.body)
self.assertEqual(data['user']['id'], post.user.pk)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_destroy(self):
post = self.posts[0]
response = self.client.delete(self.detail_url)
self.assertFalse(Post.objects.filter(pk=post.pk).exists())
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
response = self.client.delete(self.detail_url)
data = json.loads(response.content.decode())
self.assertEqual(data['error'], 'Resource Not Found')
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
class TestCommentAPIView(TestCase):
def setUp(self):
self.client = Client()
self.user = User.objects.create_user(
'user', 'user@email.com', '123456'
)
self.post = mommy.make(Post, user=self.user)
self.comments = mommy.make(
Comment, post=self.post, user=self.user, _quantity=20
)
self.list_url = reverse('blog:comment_api', args=[self.post.pk])
self.detail_url = reverse(
'blog:comment_api', args=[self.post.pk, self.comments[0].pk]
)
self.client.login(username='user', password='123456')
def test_list(self):
self.client.logout()
response = self.client.get(self.list_url)
data = json.loads(response.content.decode())
self.assertEqual(len(data['data']), 10)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response = self.client.get(self.list_url, {'page': 'invalid'})
data = json.loads(response.content.decode())
self.assertEqual(len(data['data']), 10)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response = self.client.get(self.list_url, {'page': 100})
data = json.loads(response.content.decode())
self.assertEqual(len(data['data']), 10)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_detail(self):
self.client.logout()
response = self.client.get(self.detail_url)
data = json.loads(response.content.decode())
self.assertEqual(data['id'], self.comments[0].pk)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.comments[0].delete()
response = self.client.get(self.detail_url)
data = json.loads(response.content.decode())
self.assertEqual(data['error'], 'Resource Not Found')
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_create(self):
response = self.client.post(self.list_url, {})
data = json.loads(response.content.decode())
self.assertEqual(
data['error']['comment'][0], 'This field is required.'
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
response = self.client.post(
self.list_url,
{
'comment': 'my comment'
}
)
data = json.loads(response.content.decode())
self.assertEqual(data['post'], self.post.pk)
self.assertEqual(data['comment'], 'my comment')
self.assertEqual(data['user']['id'], self.user.pk)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_update(self):
response = self.client.put(self.detail_url, {})
data = json.loads(response.content.decode())
self.assertEqual(
data['error']['comment'][0], 'This field is required.'
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
response = self.client.put(
self.detail_url,
{
'comment': 'my comment'
},
)
data = json.loads(response.content.decode())
self.assertEqual(data['post'], self.post.pk)
self.assertEqual(data['comment'], 'my comment')
self.assertEqual(data['user']['id'], self.user.pk)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_partial_update(self):
comment = self.comments[0]
response = self.client.patch(
self.detail_url,
{
'comment': 'my comment',
},
)
data = json.loads(response.content.decode())
self.assertEqual(data['id'], comment.pk)
self.assertEqual(data['post'], self.post.pk)
self.assertEqual(data['comment'], 'my comment')
self.assertEqual(data['user']['id'], self.user.pk)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_destroy(self):
comment = self.comments[0]
response = self.client.delete(self.detail_url)
self.assertFalse(Comment.objects.filter(pk=comment.pk).exists())
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
response = self.client.delete(self.detail_url)
data = json.loads(response.content.decode())
self.assertEqual(data['error'], 'Resource Not Found')
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
| allisson/django-tiny-rest | testproject/blog/tests/test_views.py | Python | mit | 10,142 | 0 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-09-18 06:21
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('CareerTinder', '0004_auto_20160918_0152'),
]
operations = [
migrations.RenameField(
model_name='hiree',
old_name='first_name',
new_name='email',
),
migrations.RenameField(
model_name='hiree',
old_name='last_name',
new_name='name',
),
]
| sarojaerabelli/HVGS | CareerTinderServer/CareerTinder/migrations/0005_auto_20160918_0221.py | Python | mit | 573 | 0 |
import pygame
import time
import scripts
"""
Score class
Handles all the score area
package: ianna
"""
class IannaScore():
def __init__ (self, buffer, screen, game_entities):
self.score_image = pygame.image.load('artwork/marcador.png').convert()
self.font = pygame.image.load('artwork/font.png').convert()
self.chars = []
self.buffer = buffer
self.screen = screen
self.game_entities = game_entities
self.weapons = []
self.weapons.append(pygame.image.load('artwork/marcador_armas_sword.png').convert())
self.weapons.append(pygame.image.load('artwork/marcador_armas_eclipse.png').convert())
self.weapons.append(pygame.image.load('artwork/marcador_armas_axe.png').convert())
self.weapons.append(pygame.image.load('artwork/marcador_armas_blade.png').convert())
self.first_object_in_inventory = 0
# We have 64 chars, in ASCII order starting by BLANK (32)
# There are some special chars, look at the font!
for tile_x in range (0,32):
rect = (tile_x*8, 0, 8, 8)
self.chars.append(self.font.subsurface(rect))
for tile_x in range (0,32):
rect = (tile_x*8, 8, 8, 8)
self.chars.append(self.font.subsurface(rect))
def clean_text_area(self):
for y in range(0,3):
for x in range(0,30):
self.buffer.blit(self.chars[0],(8+x*8,168+y*8))
def print_string(self,string):
fpsClock = pygame.time.Clock()
y=0
x=0
i=0
while i < len(string):
word = ""
# Find the word
while string[i] != ',' and string[i] != '.' and string[i] != ' ':
word = word + string[i]
i = i + 1
# Add the punctuation character
word = word + string[i]
i = i + 1
# Now print it
if x + len(word) > 30:
y = y + 1
x = 0
if y == 3: # We need to wait until the player presses any key
self.buffer.blit(self.chars[32],(240,184))
pygame.transform.scale(self.buffer,(256*3,192*3),self.screen)
pygame.display.flip()
self.wait_for_keypress()
y = 0
self.clean_text_area()
j = 0
while j < len(word):
char = ord(word[j]) - 32
self.buffer.blit(self.chars[char],(8+x*8,168+y*8))
x = x + 1
j = j + 1
pygame.transform.scale(self.buffer,(256*3,192*3),self.screen)
pygame.display.flip()
fpsClock.tick(25) # run at 10 fps
self.buffer.blit(self.chars[32],(240,184))
pygame.transform.scale(self.buffer,(256*3,192*3),self.screen)
pygame.display.flip()
self.wait_for_keypress()
def print_char(self,char,x,y):
char = ord(str(char)) - 32
self.buffer.blit(self.chars[char],(x,y))
def wait_for_keypress(self):
'''
Silly function, just wait for a keypress to happen
In the Spectrum version, it should be way better
'''
keypressed = False
keyreleased = False
key = None
while (not keypressed) and (not keyreleased):
events = pygame.event.get()
for event in events:
if event.type == pygame.KEYDOWN: # keypressed, wait until it is released
key = event.key
keypressed = True
if event.type == pygame.KEYUP: # keypressed, wait until it is released
if key == event.key:
keyreleased = True
def print_meter(self,x,value, color):
'''
Display an entity health, on X
'''
y=191
value = value*23/100
rect = [x+2,y-value,5,value]
pygame.draw.rect(self.buffer,color,rect)
def print_inventory(self,player):
'''
Display the inventory
'''
currentx = 24
x = 0
if player.current_object > self.first_object_in_inventory + 2:
self.first_object_in_inventory = self.first_object_in_inventory + 1
elif player.current_object < self.first_object_in_inventory:
self.first_object_in_inventory = self.first_object_in_inventory - 1
for item in player.inventory[self.first_object_in_inventory:]:
if x == 3:
break
self.buffer.blit(player.map.tile_table[self.tiles_per_pickable_object[item]], (currentx,168))
currentx = currentx + 24
x = x + 1
# Use a marker for the current selected object
self.buffer.blit(self.chars[63],(24+(player.current_object-self.first_object_in_inventory)*24,184))
def draw(self):
self.buffer.set_clip(pygame.Rect(0,160,256,192)) # set clipping area for game, should then set clipping for score area
self.buffer.blit(self.score_image,(0,160))
# Print barbarian energy
self.print_meter(168,(self.game_entities[0].energy*100) / self.game_entities[0].get_entity_max_energy(),(255,0,0))
# Print barbarian level
self.print_meter(176,(self.game_entities[0].experience*100) / self.game_entities[0].get_player_max_exp(),(0,255,255))
# Print current weapon
self.buffer.blit(self.weapons[self.game_entities[0].weapon-1],(112,168))
if self.game_entities[1] and self.game_entities[1].enemy_type != "OBJECT_ENEMY_ROCK":
entity = self.game_entities[1]
energy = (entity.energy*100) / entity.enemy_energy[entity.enemy_type][entity.level]
self.print_meter(192,energy,(0,255,0))
# Print energy in numbers
if entity.energy > 99:
print "WARNING: enemy energy is > 100"
else:
self.print_char(entity.energy/10,200,176)
self.print_char(entity.energy%10,208,176)
self.print_char(entity.level,208,184)
if self.game_entities[2] and self.game_entities[2].enemy_type not in ('OBJECT_ENEMY_ROCK','OBJECT_ENEMY_SECONDARY'):
entity = self.game_entities[2]
energy = (entity.energy*100) / entity.enemy_energy[entity.enemy_type][entity.level]
self.print_meter(216,energy,(0,255,0))
if entity.energy > 99:
print "WARNING: enemy energy is > 100"
else:
self.print_char(entity.energy/10,224,176)
self.print_char(entity.energy%10,232,176)
self.print_char(entity.level,232,184)
self.print_inventory(self.game_entities[0])
# Remember to copy this from scripts.py when new objects are created
tiles_per_pickable_object = { "OBJECT_KEY_GREEN": 217,
"OBJECT_KEY_BLUE": 218,
"OBJECT_KEY_YELLOW": 219,
"OBJECT_BREAD": 220,
"OBJECT_MEAT": 221,
"OBJECT_HEALTH": 222,
"OBJECT_KEY_RED": 223,
"OBJECT_KEY_WHITE": 224,
"OBJECT_KEY_PURPLE": 225,
}
| fjpena/sword-of-ianna-zx | python_src/ianna_score.py | Python | apache-2.0 | 6,072 | 0.049407 |
from sympy.core import pi, oo, symbols, Function, Rational, Integer, GoldenRatio, EulerGamma, Catalan, Lambda, Dummy
from sympy.functions import Piecewise, sin, cos, Abs, exp, ceiling, sqrt
from sympy.utilities.pytest import raises
from sympy.printing.jscode import JavascriptCodePrinter
from sympy.utilities.lambdify import implemented_function
from sympy.tensor import IndexedBase, Idx
# import test
from sympy import jscode
x, y, z = symbols('x,y,z')
g = Function('g')
def test_printmethod():
assert jscode(Abs(x)) == "Math.abs(x)"
def test_jscode_sqrt():
assert jscode(sqrt(x)) == "Math.sqrt(x)"
assert jscode(x**0.5) == "Math.sqrt(x)"
assert jscode(sqrt(x)) == "Math.sqrt(x)"
def test_jscode_Pow():
assert jscode(x**3) == "Math.pow(x, 3)"
assert jscode(x**(y**3)) == "Math.pow(x, Math.pow(y, 3))"
assert jscode(1/(g(x)*3.5)**(x - y**x)/(x**2 + y)) == \
"Math.pow(3.5*g(x), -x + Math.pow(y, x))/(Math.pow(x, 2) + y)"
assert jscode(x**-1.0) == '1/x'
def test_jscode_constants_mathh():
assert jscode(exp(1)) == "Math.E"
assert jscode(pi) == "Math.PI"
assert jscode(oo) == "Number.POSITIVE_INFINITY"
assert jscode(-oo) == "Number.NEGATIVE_INFINITY"
def test_jscode_constants_other():
assert jscode(2*GoldenRatio) == "var GoldenRatio = 1.61803398874989;\n2*GoldenRatio"
assert jscode(2*Catalan) == "var Catalan = 0.915965594177219;\n2*Catalan"
assert jscode(2*EulerGamma) == "var EulerGamma = 0.577215664901533;\n2*EulerGamma"
def test_jscode_Rational():
assert jscode(Rational(3,7)) == "3/7"
assert jscode(Rational(18,9)) == "2"
assert jscode(Rational(3,-7)) == "-3/7"
assert jscode(Rational(-3,-7)) == "3/7"
def test_jscode_Integer():
assert jscode(Integer(67)) == "67"
assert jscode(Integer(-1)) == "-1"
def test_jscode_functions():
assert jscode(sin(x) ** cos(x)) == "Math.pow(Math.sin(x), Math.cos(x))"
def test_jscode_inline_function():
x = symbols('x')
g = implemented_function('g', Lambda(x, 2*x))
assert jscode(g(x)) == "2*x"
g = implemented_function('g', Lambda(x, 2*x/Catalan))
assert jscode(g(x)) == "var Catalan = %s;\n2*x/Catalan" %Catalan.n()
A = IndexedBase('A')
i = Idx('i', symbols('n', integer=True))
g = implemented_function('g', Lambda(x, x*(1 + x)*(2 + x)))
assert jscode(g(A[i]), assign_to=A[i]) == (
"for (var i=0; i<n; i++){\n"
" A[i] = (1 + A[i])*(2 + A[i])*A[i];\n"
"}"
)
def test_jscode_exceptions():
assert jscode(ceiling(x)) == "Math.ceil(x)"
assert jscode(Abs(x)) == "Math.abs(x)"
def test_jscode_boolean():
assert jscode(x & y) == "x && y"
assert jscode(x | y) == "x || y"
assert jscode(~x) == "!x"
assert jscode(x & y & z) == "x && y && z"
assert jscode(x | y | z) == "x || y || z"
assert jscode((x & y) | z) == "z || x && y"
assert jscode((x | y) & z) == "z && (x || y)"
def test_jscode_Piecewise():
p = jscode(Piecewise((x, x<1), (x**2, True)))
s = \
"""\
if (x < 1) {
x
}
else {
Math.pow(x, 2)
}\
"""
assert p == s
def test_jscode_Piecewise_deep():
p = jscode(2*Piecewise((x, x<1),(x**2, True)))
s = \
"""\
2*if (x < 1) {
x
}
else {
Math.pow(x, 2)
}\
"""
assert p == s
def test_jscode_settings():
raises(TypeError, lambda : jscode(sin(x),method="garbage"))
def test_jscode_Indexed():
from sympy.tensor import IndexedBase, Idx
from sympy import symbols
i,j,k,n,m,o = symbols('i j k n m o', integer=True)
p = JavascriptCodePrinter()
p._not_c = set()
x = IndexedBase('x')[Idx(j, n)]
assert p._print_Indexed(x) == 'x[j]'
A = IndexedBase('A')[Idx(i, m), Idx(j, n)]
assert p._print_Indexed(A) == 'A[%s]'% str(j + n*i)
B = IndexedBase('B')[Idx(i, m), Idx(j, n), Idx(k, o)]
assert p._print_Indexed(B) == 'B[%s]'% str(k + i*n*o + j*o)
assert p._not_c == set()
def test_jscode_loops_matrix_vector():
n,m = symbols('n m', integer=True)
A = IndexedBase('A')
x = IndexedBase('x')
y = IndexedBase('y')
i = Idx('i', m)
j = Idx('j', n)
s = (
'for (var i=0; i<m; i++){\n'
' y[i] = 0;\n'
'}\n'
'for (var i=0; i<m; i++){\n'
' for (var j=0; j<n; j++){\n'
' y[i] = y[i] + A[i*n + j]*x[j];\n'
' }\n'
'}'
)
c = jscode(A[i, j]*x[j], assign_to=y[i])
assert c == s
def test_dummy_loops():
# the following line could also be
# [Dummy(s, integer=True) for s in 'im']
# or [Dummy(integer=True) for s in 'im']
i, m = symbols('i m', integer=True, cls=Dummy)
x = IndexedBase('x')
y = IndexedBase('y')
i = Idx(i, m)
expected = (
'for (var i_%(icount)i=0; i_%(icount)i<m_%(mcount)i; i_%(icount)i++){\n'
' y[i_%(icount)i] = x[i_%(icount)i];\n'
'}'
) % {'icount': i.label.dummy_index, 'mcount': m.dummy_index}
code = jscode(x[i], assign_to=y[i])
assert code == expected
def test_jscode_loops_add():
from sympy.tensor import IndexedBase, Idx
from sympy import symbols
n, m = symbols('n m', integer=True)
A = IndexedBase('A')
x = IndexedBase('x')
y = IndexedBase('y')
z = IndexedBase('z')
i = Idx('i', m)
j = Idx('j', n)
s = (
'for (var i=0; i<m; i++){\n'
' y[i] = x[i] + z[i];\n'
'}\n'
'for (var i=0; i<m; i++){\n'
' for (var j=0; j<n; j++){\n'
' y[i] = y[i] + A[i*n + j]*x[j];\n'
' }\n'
'}'
)
c = jscode(A[i, j]*x[j] + x[i] + z[i], assign_to=y[i])
assert c == s
def test_jscode_loops_multiple_contractions():
from sympy.tensor import IndexedBase, Idx
from sympy import symbols
n, m, o, p = symbols('n m o p', integer=True)
a = IndexedBase('a')
b = IndexedBase('b')
y = IndexedBase('y')
i = Idx('i', m)
j = Idx('j', n)
k = Idx('k', o)
l = Idx('l', p)
s = (
'for (var i=0; i<m; i++){\n'
' y[i] = 0;\n'
'}\n'
'for (var i=0; i<m; i++){\n'
' for (var j=0; j<n; j++){\n'
' for (var k=0; k<o; k++){\n'
' for (var l=0; l<p; l++){\n'
' y[i] = y[i] + b[j*o*p + k*p + l]*a[i*n*o*p + j*o*p + k*p + l];\n'
' }\n'
' }\n'
' }\n'
'}'
)
c = jscode(b[j, k, l]*a[i, j, k, l], assign_to=y[i])
assert c == s
def test_jscode_loops_addfactor():
from sympy.tensor import IndexedBase, Idx
from sympy import symbols
n, m, o, p = symbols('n m o p', integer=True)
a = IndexedBase('a')
b = IndexedBase('b')
c = IndexedBase('c')
y = IndexedBase('y')
i = Idx('i', m)
j = Idx('j', n)
k = Idx('k', o)
l = Idx('l', p)
s = (
'for (var i=0; i<m; i++){\n'
' y[i] = 0;\n'
'}\n'
'for (var i=0; i<m; i++){\n'
' for (var j=0; j<n; j++){\n'
' for (var k=0; k<o; k++){\n'
' for (var l=0; l<p; l++){\n'
' y[i] = (a[i*n*o*p + j*o*p + k*p + l] + b[i*n*o*p + j*o*p + k*p + l])*c[j*o*p + k*p + l] + y[i];\n'
' }\n'
' }\n'
' }\n'
'}'
)
c = jscode((a[i, j, k, l] + b[i, j, k, l])*c[j, k, l], assign_to=y[i])
assert c == s
def test_jscode_loops_multiple_terms():
from sympy.tensor import IndexedBase, Idx
from sympy import symbols
n, m, o, p = symbols('n m o p', integer=True)
a = IndexedBase('a')
b = IndexedBase('b')
c = IndexedBase('c')
y = IndexedBase('y')
i = Idx('i', m)
j = Idx('j', n)
k = Idx('k', o)
s0 = (
'for (var i=0; i<m; i++){\n'
' y[i] = 0;\n'
'}\n'
)
s1 = (
'for (var i=0; i<m; i++){\n'
' for (var j=0; j<n; j++){\n'
' for (var k=0; k<o; k++){\n'
' y[i] = b[j]*b[k]*c[i*n*o + j*o + k] + y[i];\n'
' }\n'
' }\n'
'}\n'
)
s2 = (
'for (var i=0; i<m; i++){\n'
' for (var k=0; k<o; k++){\n'
' y[i] = b[k]*a[i*o + k] + y[i];\n'
' }\n'
'}\n'
)
s3 = (
'for (var i=0; i<m; i++){\n'
' for (var j=0; j<n; j++){\n'
' y[i] = b[j]*a[i*n + j] + y[i];\n'
' }\n'
'}\n'
)
c = jscode(b[j]*a[i, j] + b[k]*a[i, k] + b[j]*b[k]*c[i, j, k], assign_to=y[i])
assert (c == s0 + s1 + s2 + s3[:-1] or
c == s0 + s1 + s3 + s2[:-1] or
c == s0 + s2 + s1 + s3[:-1] or
c == s0 + s2 + s3 + s1[:-1] or
c == s0 + s3 + s1 + s2[:-1] or
c == s0 + s3 + s2 + s1[:-1])
| srjoglekar246/sympy | sympy/printing/tests/test_jscode.py | Python | bsd-3-clause | 8,432 | 0.010911 |
import logging
log = logging.getLogger(__name__)
def has_bin(arg):
"""
Helper function checks whether args contains binary data
:param args: list | tuple | bytearray | dict
:return: (bool)
"""
if type(arg) is list or type(arg) is tuple:
return reduce(lambda has_binary, item: has_binary or has_bin(item), arg, False)
if type(arg) is bytearray or hasattr(arg, 'read'):
return True
if type(arg) is dict:
return reduce(lambda has_binary, item: has_binary or has_bin(item), [v for k, v in arg.items()], False)
return False
| shuoli84/gevent_socketio2 | socketio/__init__.py | Python | mit | 583 | 0.003431 |
import codecs
import os
import re
from setuptools import Command, find_packages, setup
here = os.path.abspath(os.path.dirname(__file__))
version = "0.0.0"
changes = os.path.join(here, "CHANGES.rst")
match = r"^#*\s*(?P<version>[0-9]+\.[0-9]+(\.[0-9]+)?)$"
with codecs.open(changes, encoding="utf-8") as changes:
for line in changes:
res = re.match(match, line)
if res:
version = res.group("version")
break
# Get the long description
with codecs.open(os.path.join(here, "README.rst"), encoding="utf-8") as f:
long_description = f.read()
# Get version
with codecs.open(os.path.join(here, "CHANGES.rst"), encoding="utf-8") as f:
changelog = f.read()
install_requirements = ["simple-rest-client>=1.0.0"]
tests_requirements = ["pytest", "pytest-cov", "coveralls"]
class VersionCommand(Command):
description = "print library version"
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
print(version)
setup(
name="vindi",
version=version,
description="Integração com API da Vindi (Python 3.6+)",
long_description=long_description,
url="https://github.com/allisson/python-vindi",
author="Allisson Azevedo",
author_email="allisson@gmail.com",
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Topic :: Software Development :: Libraries",
],
keywords="rest client http vindi",
packages=find_packages(exclude=["docs", "tests*"]),
setup_requires=["pytest-runner"],
install_requires=install_requirements,
tests_require=tests_requirements,
cmdclass={"version": VersionCommand},
)
| allisson/python-vindi | setup.py | Python | mit | 1,845 | 0 |
# -*- coding: utf-8 -*-
# © 2016 FactorLibre - Hugo Santos <hugo.santos@factorlibre.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from . import computed_purchase_order
| factorlibre/odoo-addons-cpo | purchase_compute_order_product_filter_season/models/__init__.py | Python | agpl-3.0 | 197 | 0 |
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
import bisect
import numpy as np
import bpy
from bpy.props import EnumProperty, FloatProperty, BoolProperty
from sverchok.node_tree import SverchCustomTreeNode
from sverchok.data_structure import updateNode, dataCorrect, repeat_last
# spline function modifed from
# from looptools 4.5.2 done by Bart Crouch
# calculates natural cubic splines through all given knots
def cubic_spline(locs, tknots):
knots = list(range(len(locs)))
n = len(knots)
if n < 2:
return False
x = tknots[:]
result = []
for j in range(3):
a = []
for i in locs:
a.append(i[j])
h = []
for i in range(n-1):
if x[i+1] - x[i] == 0:
h.append(1e-8)
else:
h.append(x[i+1] - x[i])
q = [False]
for i in range(1, n-1):
q.append(3/h[i]*(a[i+1]-a[i]) - 3/h[i-1]*(a[i]-a[i-1]))
l = [1.0]
u = [0.0]
z = [0.0]
for i in range(1, n-1):
l.append(2*(x[i+1]-x[i-1]) - h[i-1]*u[i-1])
if l[i] == 0:
l[i] = 1e-8
u.append(h[i] / l[i])
z.append((q[i] - h[i-1] * z[i-1]) / l[i])
l.append(1.0)
z.append(0.0)
b = [False for i in range(n-1)]
c = [False for i in range(n)]
d = [False for i in range(n-1)]
c[n-1] = 0.0
for i in range(n-2, -1, -1):
c[i] = z[i] - u[i]*c[i+1]
b[i] = (a[i+1]-a[i])/h[i] - h[i]*(c[i+1]+2*c[i])/3
d[i] = (c[i+1]-c[i]) / (3*h[i])
for i in range(n-1):
result.append([a[i], b[i], c[i], d[i], x[i]])
splines = []
for i in range(len(knots)-1):
splines.append([result[i], result[i+n-1], result[i+(n-1)*2]])
return(splines)
def eval_spline(splines, tknots, t_in):
out = []
for t in t_in:
n = bisect.bisect(tknots, t, lo=0, hi=len(tknots))-1
if n > len(splines)-1:
n = len(splines)-1
if n < 0:
n = 0
pt = []
for i in range(3):
ax, bx, cx, dx, tx = splines[n][i]
x = ax + bx*(t-tx) + cx*(t-tx)**2 + dx*(t-tx)**3
pt.append(x)
out.append(pt)
return out
class SvInterpolationNodeMK2(bpy.types.Node, SverchCustomTreeNode):
'''Vector Interpolate'''
bl_idname = 'SvInterpolationNodeMK2'
bl_label = 'Vector Interpolation mk2'
bl_icon = 'OUTLINER_OB_EMPTY'
t_in_x = FloatProperty(name="tU",
default=.5, min=0, max=1, precision=5,
update=updateNode)
t_in_y = FloatProperty(name="tV",
default=.5, min=0, max=1, precision=5,
update=updateNode)
defgrid = BoolProperty(name='default_grid', default=True,
update=updateNode)
regimes = [('P', 'Pattern', "Pattern", 0),
('G', 'Grid', "Grid", 1)]
regime = EnumProperty(name='regime',
default='G', items=regimes,
update=updateNode)
directions = [('UV', 'UV', "Two directions", 0),
('U', 'U', "One direction", 1)]
direction = EnumProperty(name='Direction',
default='U', items=directions,
update=updateNode)
modes = [('SPL', 'Cubic', "Cubic Spline", 0),
('LIN', 'Linear', "Linear Interpolation", 1)]
mode = EnumProperty(name='Mode',
default="SPL", items=modes,
update=updateNode)
def sv_init(self, context):
self.inputs.new('VerticesSocket', 'Vertices')
self.inputs.new('StringsSocket', 'IntervalX').prop_name = 't_in_x'
self.inputs.new('StringsSocket', 'IntervalY').prop_name = 't_in_y'
self.outputs.new('VerticesSocket', 'Vertices')
def draw_buttons(self, context, layout):
#pass
col = layout.column(align=True)
row = col.row(align=True)
row.prop(self, 'mode', expand=True)
row = col.row(align=True)
row.prop(self, 'regime', expand=True)
if self.regime == 'G':
row = col.row(align=True)
row.prop(self, 'direction', expand=True)
col.prop(self, 'defgrid')
def interpol(self, verts, t_ins):
verts_out = []
for v, t_in in zip(verts, repeat_last(t_ins)):
pts = np.array(v).T
tmp = np.apply_along_axis(np.linalg.norm, 0, pts[:, :-1]-pts[:, 1:])
t = np.insert(tmp, 0, 0).cumsum()
t = t/t[-1]
t_corr = [min(1, max(t_c, 0)) for t_c in t_in]
# this should also be numpy
if self.mode == 'LIN':
out = [np.interp(t_corr, t, pts[i]) for i in range(3)]
verts_out.append(list(zip(*out)))
else: # SPL
spl = cubic_spline(v, t)
out = eval_spline(spl, t, t_corr)
verts_out.append(out)
return verts_out
def process(self):
if not any(s.is_linked for s in self.outputs):
return
if self.inputs['Vertices'].is_linked:
verts = self.inputs['Vertices'].sv_get()
verts = dataCorrect(verts)
t_ins_x = self.inputs['IntervalX'].sv_get()
t_ins_y = self.inputs['IntervalY'].sv_get()
if self.regime == 'P' and self.direction == 'U':
self.direction = 'UV'
if self.defgrid:
t_ins_x = [[i/10 for i in range(11)]]
t_ins_y = [[i/10 for i in range(11)]]
if self.regime == 'G':
vertsX = self.interpol(verts, t_ins_x)
if self.direction == 'UV':
verts_T = np.swapaxes(np.array(vertsX),0,1).tolist()
verts_out = self.interpol(verts_T, t_ins_y)
else:
verts_out = vertsX
else:
verts_out_ = []
for x,y in zip(t_ins_x[0],t_ins_y[0]):
vertsX = self.interpol(verts, [[x]])
verts_T = np.swapaxes(np.array(vertsX),0,1).tolist()
vertsY = self.interpol(verts_T, [[y]])
verts_out_.extend(vertsY)
verts_out = [[i[0] for i in verts_out_]]
self.outputs['Vertices'].sv_set(verts_out)
def register():
bpy.utils.register_class(SvInterpolationNodeMK2)
def unregister():
bpy.utils.unregister_class(SvInterpolationNodeMK2)
| elfnor/sverchok | nodes/vector/interpolation_mk2.py | Python | gpl-3.0 | 7,301 | 0.003013 |
from functions import global_functions
def debug_step_here():
"""
Creates a Breakpoint when executed in debug-mode
:return:
"""
if global_functions.get_config_key("debug"):
try:
input("Press any key to continue")
except SyntaxError:
pass
def print_out_var(variable):
"""
Prints out a variable when executed in debug-mode
:param variable:
:return:
"""
if global_functions.get_config_key("debug"):
print(variable)
debug_step_here()
| Fahrenholz/maat-analyzer | functions/debugging_functions.py | Python | gpl-3.0 | 537 | 0.005587 |
import os.path
import urllib.parse
import requests
import rfc6266
import settings
import utilities
from markdown import Extension
from markdown.inlinepatterns import ImagePattern, IMAGE_LINK_RE
class ImageDownloadPattern(ImagePattern):
def handleMatch(self, match):
el = super(ImageDownloadPattern, self).handleMatch(match)
urlparts = urllib.parse.urlparse(el.attrib["src"])
if urlparts.netloc:
response = requests.get(urlparts.geturl())
response.raise_for_status()
filename = rfc6266.parse_requests_response(response).filename_unsafe
with open(os.path.join(settings.get("folder"), filename), "wb") as f:
f.write(response.content)
el.attrib["src"] = filename
utilities.fix_image(os.path.join(settings.get("folder"), filename), settings.get("features")["width"])
return el
class ImageDownload(Extension):
def extendMarkdown(self, md, md_globals):
md.inlinePatterns['image_link'] = ImageDownloadPattern(IMAGE_LINK_RE, md)
def makeExtension(configs={}):
return ImageDownload(configs=configs)
| Tigge/trello-to-web | markdown_imaged.py | Python | mit | 1,141 | 0.003506 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for rsyslog state checks."""
from grr.lib import flags
from grr.lib import test_lib
from grr.lib.checks import checks_test_lib
from grr.parsers import config_file
class RsyslogCheckTests(checks_test_lib.HostCheckTest):
"""Test the rsyslog checks."""
@classmethod
def setUpClass(cls):
cls.LoadCheck("rsyslog.yaml")
cls.parser = config_file.RsyslogParser()
def testLoggingAuthRemoteOK(self):
chk_id = "CIS-LOGGING-AUTH-REMOTE"
test_data = {
"/etc/rsyslog.conf":
"*.* @@tcp.example.com.:514;RSYSLOG_ForwardFormat"
}
host_data = self.GenFileData("LinuxRsyslogConfigs", test_data, self.parser)
results = self.RunChecks(host_data)
self.assertCheckUndetected(chk_id, results)
def testLoggingAuthRemoteFail(self):
chk_id = "CIS-LOGGING-AUTH-REMOTE"
test_data = {"/etc/rsyslog.conf": "*.* /var/log/messages"}
host_data = self.GenFileData("LinuxRsyslogConfigs", test_data, self.parser)
sym = "Missing attribute: No remote destination for auth logs."
found = ["Expected state was not found"]
results = self.RunChecks(host_data)
self.assertCheckDetectedAnom(chk_id, results, sym, found)
def testLoggingFilePermissions(self):
chk_id = "CIS-LOGGING-FILE-PERMISSIONS"
ro = self.CreateStat("/test/ro", 0, 0, 0o0100640)
rw = self.CreateStat("/test/rw", 0, 0, 0o0100666)
sym = "Found: Log configurations can be modified by non-privileged users."
found = ["/test/rw user: 0, group: 0, mode: -rw-rw-rw-"]
results = self.GenResults(["LinuxRsyslogConfigs"], [[ro, rw]])
self.assertCheckDetectedAnom(chk_id, results, sym, found)
def main(argv):
test_lib.GrrTestProgram(argv=argv)
if __name__ == "__main__":
flags.StartMain(main)
| pidydx/grr | grr/checks/rsyslog_test.py | Python | apache-2.0 | 1,801 | 0.004442 |
import logging
# This is a minimal configuration to get you started with the Text mode.
# If you want to connect Errbot to chat services, checkout
# the options in the more complete config-template.py from here:
# https://raw.githubusercontent.com/errbotio/errbot/master/errbot/config-template.py
BACKEND = 'Text' # Errbot will start in text mode (console only mode) and will answer commands from there.
BOT_DATA_DIR = r'D:\Work\Python\learn-python\cases\errbot\data'
BOT_EXTRA_PLUGIN_DIR = 'D:\Work\Python\learn-python\cases\errbot\plugins'
BOT_LOG_FILE = r'D:\Work\Python\learn-python\cases\errbot\errbot.log'
BOT_LOG_LEVEL = logging.DEBUG
BOT_ADMINS = ('CHANGE ME', ) # !! Don't leave that to "CHANGE ME" if you connect your errbot to a chat system !! | PeytonXu/learn-python | cases/errbot/config.py | Python | mit | 761 | 0.011827 |
#!/usr/bin/env python
##
# This file is part of the uSherpa Python Library project
#
# Copyright (C) 2012 Stefan Wendler <sw@kaltpost.de>
#
# The uSherpa Python Library is free software; you can redistribute
# it and/or modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# uSherpa Python Library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with the JSherpa firmware; if not, write to the Free
# Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
# 02111-1307 USA.
##
'''
uSherpa Python Library setup-script. To install this library use:
sudo python setup.py install
'''
from distutils.core import setup
setup(name='pysherpa',
version='0.1',
description='uSherpa Python Library',
long_description='Client library for Python to use MCU running uSherpa Firmware. Depends on pyserial.',
author='Stefan Wendler',
author_email='sw@usherpa.org',
url='http://www.usherpa.org/',
license='LGPL 2.1',
packages=['usherpa'],
platforms=['Linux'],
package_dir = {'': 'src'},
requires = ['serial(>=2.4)']
)
| wendlers/usherpa-pysherpa | setup.py | Python | lgpl-2.1 | 1,459 | 0.023304 |
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
import string
import re
import sass
import json
from django.utils.translation import ugettext as _
from django.core.urlresolvers import reverse
from django.conf import settings
from django.contrib.gis.geos import Polygon
from django.core.exceptions import ValidationError
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render, get_object_or_404
from stormwater.models import PolygonalMapFeature
from treemap.models import User, Species, StaticPage, Instance, Boundary
from treemap.plugin import get_viewable_instances_filter
from treemap.lib.user import get_audits, get_audits_params
from treemap.lib import COLOR_RE
from treemap.lib.perms import map_feature_is_creatable
from treemap.units import get_unit_abbreviation, get_units
from treemap.util import leaf_models_of_class
_SCSS_VAR_NAME_RE = re.compile('^[_a-zA-Z][-_a-zA-Z0-9]*$')
def edits(request, instance):
"""
Request a variety of different audit types.
Params:
- models
Comma separated list of models (only Tree and Plot are supported)
- model_id
The ID of a specfici model. If specified, models must also
be defined and have only one model
- user
Filter by a specific user
- exclude (default: true)
Set to false to ignore edits that are currently pending
- page_size
Size of each page to return (up to PAGE_MAX)
- page
The page to return
"""
params = get_audits_params(request)
user_id = request.GET.get('user', None)
user = None
if user_id is not None:
user = User.objects.get(pk=user_id)
return get_audits(request.user, instance, request.GET.copy(), user,
**params)
def index(request, instance):
return HttpResponseRedirect(reverse('map', kwargs={
'instance_url_name': instance.url_name}))
def get_map_view_context(request, instance):
if request.user and not request.user.is_anonymous():
iuser = request.user.get_effective_instance_user(instance)
resource_classes = [resource for resource in instance.resource_classes
if map_feature_is_creatable(iuser, resource)]
else:
resource_classes = []
context = {
'fields_for_add_tree': [
(_('Tree Height'), 'Tree.height')
],
'resource_classes': resource_classes,
'only_one_resource_class': len(resource_classes) == 1,
'polygon_area_units': get_unit_abbreviation(
get_units(instance, 'greenInfrastructure', 'area')),
'q': request.GET.get('q'),
}
add_map_info_to_context(context, instance)
return context
def add_map_info_to_context(context, instance):
all_polygon_types = {c.map_feature_type
for c in leaf_models_of_class(PolygonalMapFeature)}
my_polygon_types = set(instance.map_feature_types) & all_polygon_types
context['has_polygons'] = len(my_polygon_types) > 0
context['has_boundaries'] = instance.boundaries.exists()
def static_page(request, instance, page):
static_page = StaticPage.get_or_new(instance, page)
return {'content': static_page.content,
'title': static_page.name}
def boundary_to_geojson(request, instance, boundary_id):
boundary = get_object_or_404(Boundary.all_objects, pk=boundary_id)
geom = boundary.geom
# Leaflet prefers to work with lat/lng so we do the transformation
# here, since it way easier than doing it client-side
geom.transform('4326')
return HttpResponse(geom.geojson)
def add_anonymous_boundary(request):
request_dict = json.loads(request.body)
srid = request_dict.get('srid', 4326)
polygon = Polygon(request_dict.get('polygon', []), srid=srid)
if srid != 3857:
polygon.transform(3857)
b = Boundary.anonymous(polygon)
b.save()
return {'id': b.id}
def boundary_autocomplete(request, instance):
max_items = request.GET.get('max_items', None)
boundaries = instance.boundaries \
.filter(searchable=True) \
.order_by('sort_order', 'name')[:max_items]
return [{'name': boundary.name,
'category': boundary.category,
'id': boundary.pk,
'value': boundary.name,
'tokens': boundary.name.split(),
'sortOrder': boundary.sort_order}
for boundary in boundaries]
def species_list(request, instance):
max_items = request.GET.get('max_items', None)
species_qs = instance.scope_model(Species)\
.order_by('common_name')\
.values('common_name', 'genus', 'species', 'cultivar',
'other_part_of_name', 'id')
if max_items:
species_qs = species_qs[:max_items]
# Split names by space so that "el" will match common_name="Delaware Elm"
def tokenize(species):
names = (species['common_name'],
species['genus'],
species['species'],
species['cultivar'],
species['other_part_of_name'])
tokens = set()
for name in names:
if name:
tokens = tokens.union(name.split())
# Names are sometimes in quotes, which should be stripped
return {token.strip(string.punctuation) for token in tokens}
def annotate_species_dict(sdict):
sci_name = Species.get_scientific_name(sdict['genus'],
sdict['species'],
sdict['cultivar'],
sdict['other_part_of_name'])
display_name = "%s [%s]" % (sdict['common_name'],
sci_name)
tokens = tokenize(species)
sdict.update({
'scientific_name': sci_name,
'value': display_name,
'tokens': tokens})
return sdict
return [annotate_species_dict(species) for species in species_qs]
def compile_scss(request):
"""
Reads key value pairs from the query parameters and adds them as scss
variables with color values, then imports the main entry point to our scss
file.
Any variables provided will be put in the scss file, but only those which
override variables with '!default' in our normal .scss files should have
any effect
"""
# Webpack and libsass have different opinions on how url(...) works
scss = "$staticUrl: '/static/';\n"
# We can probably be a bit looser with what we allow here in the future if
# we need to, but we must do some checking so that libsass doesn't explode
for key, value in request.GET.items():
if _SCSS_VAR_NAME_RE.match(key) and COLOR_RE.match(value):
scss += '$%s: #%s;\n' % (key, value)
elif key == 'url':
# Ignore the cache-buster query parameter
continue
else:
raise ValidationError("Invalid SCSS values %s: %s" % (key, value))
scss += '@import "%s";' % settings.SCSS_ENTRY
scss = scss.encode('utf-8')
return sass.compile(string=scss, include_paths=[settings.SCSS_ROOT])
def public_instances_geojson(request):
def instance_geojson(instance):
return {
'type': 'Feature',
'geometry': {
'type': 'Point',
'coordinates': [instance.center_lat_lng.x,
instance.center_lat_lng.y]
},
'properties': {
'name': instance.name,
'url': reverse(
'instance_index_view',
kwargs={'instance_url_name': instance.url_name}),
'plot_count': instance.plot_count()
}
}
instances = (Instance.objects
.filter(is_public=True)
.filter(get_viewable_instances_filter()))
return [instance_geojson(instance) for instance in instances]
def error_page(status_code):
template = '%s.html' % status_code
def inner_fn(request):
reasons = {
404: _('URL or resource not found'),
500: _('An unhandled error occured'),
503: _('Resource is temporarily unavailable')
}
# API requests with an unhandled error should return JSON, not HTML
if ((request.path.startswith('/api/') or
'application/json' in request.META.get('HTTP_ACCEPT', ''))):
response = HttpResponse(json.dumps(
{'status': 'Failure', 'reason': reasons[status_code]}),
content_type='application/json')
else:
response = render(request, template)
response.status_code = status_code
return response
return inner_fn
| maurizi/otm-core | opentreemap/treemap/views/misc.py | Python | agpl-3.0 | 8,998 | 0 |
# generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "ur_description"
PROJECT_SPACE_DIR = "/home/us-robot/catkin_ws/install"
PROJECT_VERSION = "1.2.0"
| robotic-ultrasound-image-system/ur5 | build/ur5-master/universal_robot-kinetic-devel/ur_description/catkin_generated/pkg.installspace.context.pc.py | Python | apache-2.0 | 380 | 0 |
#! /usr/bin/env python
# Copyright (C) 2005
# ASTRON (Netherlands Institute for Radio Astronomy)
# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands
#
# This file is part of the LOFAR software suite.
# The LOFAR software suite is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# The LOFAR software suite is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>.
#
# $Id$
# makeClass.py: Script to make default class files in a Package/srcdir in the
# LOFAR development tree. normal class files, main program and templates
# are covered
#
# Usage:
# ./makeClass [-h] [-t list [-d] | -m] [ClassName]
# Args:
# ClassName The name of the Class that will be created
# h,--help usage
# t,--templated list This is an automated templated class,
# list can contain a comma seperated list
# with the template parameters. Example:
# makeClass -t T,U className
# d,--diy Do it yourself (manual template instanciation)
# Only together with -t
# m,--main This is a main program for a class
#
# Revisions:
#
# 26-01-2005 Initial Release.
#
# import all packages we need
#
import os
import sys
import getopt
import re
from datetime import date
def openFile(name,mode):
try:
file = open (name,mode)
except IOError, message:
sys.exit("Error opening file: %s" % message)
return file
def replacePackageAndClassName(readFile,writeFile,packageName,
className,subDirName):
aLine=readFile.readline()
year=`date.today().year`
while aLine != "":
#set start of copyright year
if aLine.find("%YEAR%") > -1:
aLine = str.replace(aLine,"%YEAR%",year)
# replace SUB with Subdir when needed
if aLine.find("%SUB%") > -1:
if subDirName != "":
aLine = str.replace(aLine,"%SUB%",subDirName+"/")
else:
aLine = str.replace(aLine,"%SUB%",subDirName)
# replace SUBUPPER with Subdir in uppercase when needed
if aLine.find("%SUBUPPER%") > -1:
if subDirName != "":
aLine = str.replace(aLine,"%SUBUPPER%",subDirName.upper()+"_")
else:
aLine = str.replace(aLine,"%SUBUPPER%",subDirName.upper())
# replace PACKAGE with real name
if aLine.find("%PACKAGE%") > -1:
aLine = str.replace(aLine,"%PACKAGE%",packageName)
# replace PACKAGEUPPER with uppercase Package name
if aLine.find("%PACKAGEUPPER%") > -1:
aLine = str.replace(aLine,"%PACKAGEUPPER%",packageName.upper())
# replace CLASS with real name
if aLine.find("%CLASS%") > -1:
aLine = str.replace(aLine,"%CLASS%",className)
# replace CLASSUPPER with uppercase classname
if aLine.find("%CLASSUPPER%") > -1:
aLine = str.replace(aLine,"%CLASSUPPER%",className.upper())
writeFile.write(aLine)
aLine=readFile.readline()
def addTemplates(type,readFile,writeFile,className,packageName,templateList,autoTemplate,subDirName):
aLine=readFile.readline()
year=`date.today().year`
while aLine != "":
#set start of copyright year
if aLine.find("%YEAR%") > -1:
aLine = str.replace(aLine,"%YEAR%",year)
# replace SUB with Subdir when needed
if aLine.find("%SUB%") > -1:
if subDirName != "":
aLine = str.replace(aLine,"%SUB%",subDirName+"/")
else:
aLine = str.replace(aLine,"%SUB%",subDirName)
# replace SUBUPPER with Subdir in uppercase when needed
if aLine.find("%SUBUPPER%") > -1:
if subDirName != "":
aLine = str.replace(aLine,"%SUBUPPER%",subDirName.upper()+"_")
else:
aLine = str.replace(aLine,"%SUBUPPER%",subDirName.upper())
# replace PACKAGE with real name
if aLine.find("%PACKAGE%") > -1:
aLine= str.replace(aLine,"%PACKAGE%",packageName)
# replace PACKAGEUPPER with uppercase Package name
if aLine.find("%PACKAGEUPPER%") > -1:
aLine = str.replace(aLine,"%PACKAGEUPPER%",packageName.upper())
# replace CLASS with real name
if aLine.find("%CLASS%") > -1:
aLine = str.replace(aLine,"%CLASS%",className)
# replace CLASSUPPER with uppercase classname
if aLine.find("%CLASSUPPER%") > -1:
aLine = str.replace(aLine,"%CLASSUPPER%",className.upper())
tmpltype = "<"
tmplparm = "<"
i=0
while i < len(templateList):
if i > 0:
tmpltype += ", "
tmplparm += ","
tmpltype += "typename " + templateList[i]
tmplparm += templateList[i]
i+=1
tmpltype += ">"
tmplparm += ">"
# replace TEMPLATETYPE and TEMPLATEPARAM
if aLine.find("%TEMPLATETYPE%") > -1:
aLine = str.replace(aLine,"%TEMPLATETYPE%",tmpltype)
if aLine.find("%TEMPLATEPARAM%") > -1:
aLine = str.replace(aLine,"%TEMPLATEPARAM%",tmplparm)
# Check if !diy, template and .h file, if so include tcc in header file
if aLine.find("%INCLUDETCC%") > -1:
incstr = ""
if autoTemplate == 1:
if subDirName != "":
incstr = "#include <"+packageName+"/"+subDirName+"/"+className+".tcc>"
else:
incstr = "#include <"+packageName+"/"+className+".tcc>"
aLine = str.replace(aLine,"%INCLUDETCC%",incstr)
writeFile.write(aLine)
aLine=readFile.readline()
def makeDefaultClass(lofarDir,className,packageName,srcDir,incDir,subDirName):
# default.h file
readFile=openFile(lofarDir+"/LCS/Tools/src/templates/header.h_template","r")
incHDir=incDir
if subDirName != "":
incHDir = incDir+"/"+subDirName
writeFile=openFile(incHDir+"/"+className+".h","w")
replacePackageAndClassName(readFile,writeFile,packageName,className,subDirName)
writeFile.close()
readFile.close()
addToMakefile("h",packageName,className,incDir,subDirName)
#default.cc file
readFile=openFile(lofarDir+"/LCS/Tools/src/templates/header.cc_template","r")
writeFile=openFile(className+".cc","w")
replacePackageAndClassName(readFile,writeFile,packageName,className,subDirName)
writeFile.close()
readFile.close()
addToMakefile("cc",packageName,className,srcDir,subDirName)
def makeTemplatedClass(lofarDir,className,packageName,templateList,autoTemplate,srcDir,incDir,subDirName):
#default h file
readFile=openFile(lofarDir+"/LCS/Tools/src/templates/templated_header.h_template","r")
incHDir=incDir
if subDirName != "":
incHDir = incDir+"/"+subDirName
writeFile=openFile(incHDir+"/"+className+".h","w")
addTemplates("h",readFile,writeFile,className,packageName,templateList,autoTemplate,subDirName)
writeFile.close()
readFile.close()
addToMakefile("h",packageName,className,incDir,subDirName)
#default tcc template file
readFile=openFile(lofarDir+"/LCS/Tools/src/templates/templated_header.tcc_template","r")
writeFile=openFile(incHDir+"/"+className+".tcc","w")
addTemplates("tcc",readFile,writeFile,className,packageName,templateList,autoTemplate,subDirName)
writeFile.close()
readFile.close()
addToMakefile("tcc",packageName,className,incDir,subDirName)
if autoTemplate==0:
#default diy-cc template file
readFile=openFile(lofarDir+"/LCS/Tools/src/templates/templated_header.cc_template","r")
writeFile=openFile(className+".cc","w")
addTemplates("diy",readFile,writeFile,className,packageName,templateList,autoTemplate,subDirName)
writeFile.close()
readFile.close()
addToMakefile("diy",packageName,className,srcDir,subDirName)
def makeMainClass(lofarDir,className,packageName,srcDir,subDirName):
readFile=openFile(lofarDir+"/LCS/Tools/src/templates/main.cc_template","r")
writeFile=openFile(className+"Main.cc","w")
replacePackageAndClassName(readFile,writeFile,packageName,className,subDirName)
writeFile.close()
readFile.close()
addToMakefile("maincc",packageName,className+"Main",srcDir,subDirName)
def addToMakefile(type,packageName,className,srcDir,subDirName):
hPattern=re.compile('^([ \t]*)INSTHDRS[ \t]*=.*$',re.IGNORECASE)
ccPattern=re.compile('^(.*)_la_SOURCES[ \t]*=.*$',re.IGNORECASE)
mainccPattern=re.compile('^(.*)bin_PROGRAMS[ \t]*=.*$',re.IGNORECASE)
tccPattern=re.compile('^([ \t]*)TCCHDRS[ \t]*=.*$',re.IGNORECASE)
os.rename(srcDir+"/Makefile.am",srcDir+"/Makefile.am.old")
readFile=openFile(srcDir+"/Makefile.am.old","r")
writeFile=openFile(srcDir+"/Makefile.am","w")
searchEnd=0
aLine=readFile.readline()
while aLine != "":
if subDirName != "":
extendedClassName=subDirName+"/"+className
else:
extendedClassName=className
if type == "h":
# find INSTHDRS to start inserting headerfiles
if hPattern.search(aLine):
#find / to see if the line already contains another header
front,end = aLine.split("=")
if re.search("[a-zA-Z]",end):
writeFile.write(front+" = "+extendedClassName+".h \\\n")
writeFile.write("\t"+end)
elif end.find('\\') > -1:
writeFile.write(front+" = "+extendedClassName+".h \\\n")
else :
writeFile.write(front+" = "+extendedClassName+".h\n")
else:
writeFile.write(aLine)
elif type == "cc" or type == "diy":
# find _la_SOURCES to start inserting sourcefiles
if ccPattern.search(aLine):
#find / to see if the line already contains another source
front,end = aLine.split("=")
if re.search("[a-zA-Z]",end):
writeFile.write(front+" = "+extendedClassName+".cc \\\n")
writeFile.write("\t\t"+end)
elif end.find('\\') > -1:
writeFile.write(front+" = "+extendedClassName+".cc \\\n")
else :
writeFile.write(front+" = "+extendedClassName+".cc\n")
else:
writeFile.write(aLine)
elif type == "maincc":
pkgLower=packageName.lower()
# find bin_PROGRAMS to start inserting mainsourcefiles
# they are inserted in reverse order
if mainccPattern.search(aLine):
front,end = aLine.split("=")
# the line already contains another source, so that becomes next line
if re.search("[a-zA-Z]",end):
writeFile.write(front+" = "+extendedClassName+" \\\n")
writeFile.write("\t"+end)
if end.find('\\') > -1:
# a backslash, so search further (for more program names)
searchEnd=1
elif end.find('\\') > -1:
# only a backslash (no name), so write and search further
writeFile.write(front+"="+end);
writeFile.write("\t"+extendedClassName+" \\\n")
searchEnd=1
else:
# nothing yet, so write program name
writeFile.write(front+" = "+extendedClassName+"\n")
if searchEnd == 0:
writeFile.write("\n")
writeFile.write(className+"_SOURCES = "+extendedClassName+".cc\n")
writeFile.write(className+"_LDADD = lib"+pkgLower+".la\n")
writeFile.write(className+"_DEPENDENCIES = lib"+pkgLower+".la $(LOFAR_DEPEND)\n")
elif searchEnd > 0:
# there have been other mainprograms, so we need to look
# for the last program name (thus without backslash).
writeFile.write(aLine)
if aLine.find('\\') < 0:
writeFile.write("\n")
writeFile.write(className+"_SOURCES = "+extendedClassName+".cc\n")
writeFile.write(className+"_LDADD = lib"+pkgLower+".la\n")
writeFile.write(className+"_DEPENDENCIES = lib"+pkgLower+".la $(LOFAR_DEPEND)\n")
searchEnd=0
else:
writeFile.write(aLine)
elif type == "tcc":
# find TCCHDRS to start inserting templatefiles
if tccPattern.search(aLine):
#find / to see if the line already contains another source
front,end = aLine.split("=")
if re.search("[a-zA-Z]",end):
writeFile.write(front+" = "+extendedClassName+".tcc \\\n")
writeFile.write("\t"+end)
elif end.find('\\') > -1:
writeFile.write(front+" = "+extendedClassName+".tcc \\\n")
else :
writeFile.write(front+" = "+extendedClassName+".tcc\n")
else:
writeFile.write(aLine)
else:
writeFile.write(aLine)
aLine=readFile.readline()
writeFile.close()
readFile.close()
os.unlink(srcDir+"/Makefile.am.old")
def usage():
print "usage: "+sys.argv[0]+" [-h] [-m | -t list [-d]] className [className...]"
print "args: -h,--help - print usage"
print " -m,--main - make main program for a class"
print " -t,--templated list - automated templated class"
print " list can contain a comma seperated list"
print " with the template parameters. Example:"
print " makeClass -t T,U className"
print " -d,--diy - Do it yourself (manual template "
print " instanciation) Only together with -t"
print " className [className...]- name of the class(es) to be created."
sys.exit(2)
def main(argv):
noMain=1
noTemplated=1
autoTemplate=1
className = "None"
#
# get Lofar base dir
#
file = os.popen("echo $PWD | sed -e 's%/LOFAR/.*%/LOFAR%'")
lofarDir = str.replace(file.readline(),"\n","")
file.close()
baseDir = os.environ["PWD"]
subDirName = ""
packageName = ""
srcDir = ""
os.path.basename(os.path.dirname(baseDir));
# look if we are in a subdir within src
if baseDir.find("src") > -1 :
if os.path.basename(os.path.dirname(baseDir)) == "src":
srcDir,subDirName=os.path.split(baseDir)
packageName=os.path.basename(os.path.dirname(srcDir))
elif os.path.split(baseDir)[1] != "src":
print "Sorry, only one level of subdirs is allowed in src."
usage()
else:
packageName=os.path.basename(os.path.dirname(baseDir))
srcDir=baseDir
else:
print "You have to be in the srcdir or one of its subdirs to run this program."
usage()
try:
opts, args = getopt.getopt(argv, "hdmt:",
["help","diy","templated=","main"])
except getopt.GetoptError:
usage()
for opt, arg in opts:
if opt in ("-h", "--help"):
usage()
elif opt in ("-m", "--main"):
noMain = 0
elif opt in ("-t", "--templated"):
noTemplated = 0
templateList = str.split(arg,',')
elif opt in ("-d", "--diy"):
autoTemplate = 0
if len(args) <= 0 and className == "None":
usage()
if noTemplated==0 and noMain==0:
print "Sorry, no facility to generate a templated mainfile (yet)."
usage()
if len(sys.argv) < 1:
usage()
if autoTemplate==0 and noTemplated==1:
print "Diy only makes sense in templated class."
print "I will forget you gave this option, and continue.."
# See if an include/PACKAGE directory exists.
# If so, use that for the .h and .tcc files.
# Create possible subdirectory if needed.
incDir = os.path.dirname(srcDir)+"/include/"+packageName
hdrDir = incDir
if not os.path.exists(incDir):
incDir = srcDir
hdrDir = srcDir
if subDirName != "":
hdrDir = incDir+"/"+subDirName
else:
if subDirName != "":
hdrDir = incDir+"/"+subDirName
if not os.path.exists(hdrDir):
os.makedirs(hdrDir)
print "Created subdirectory "+hdrDir
#
# Make a backup from the Original Makefiles
#
os.system("cp "+srcDir+"/Makefile.am "+srcDir+"/Makefile.am.save")
if incDir != srcDir:
os.system("cp "+incDir+"/Makefile.am "+incDir+"/Makefile.am.save")
for className in args:
#
# print info
#
if noMain and noTemplated:
print "Trying to set up default class " + className + " for package " + packageName
if noMain and noTemplated==0:
print "Trying to set up default templated class " + className + " for package " + packageName
if templateList == "":
print "No templates provided, so only default template class will be created."
if noMain==0:
print "Trying to set up main class program " + className + " for package " + packageName
#
# Check of given class name already exists in the working directory as
# directory or as file
#
if noMain:
if os.path.isfile(hdrDir+"/"+className+".h"):
print "Sorry, that class already exists. Please take another name"
sys.exit(1)
else:
if os.path.isfile(className+"Main.cc"):
print "Sorry, that name already exists. Please take another one"
sys.exit(1)
if os.path.isfile(hdrDir+"/"+className+".h") == 0:
print "WARNING: the base classes for which you are creating a Mainprogram"
print " are not available yet."
print " please remember that you have to create them.\n"
#
# Create all initial files from templates
#
if noMain and noTemplated:
makeDefaultClass(lofarDir,className,packageName,srcDir,incDir,subDirName)
if noMain and noTemplated==0:
makeTemplatedClass(lofarDir,className,packageName,templateList,autoTemplate,srcDir,incDir,subDirName)
if noMain==0:
makeMainClass(lofarDir,className,packageName,srcDir,subDirName)
#
# this is the main entry
#
if __name__ == "__main__":
main(sys.argv[1:])
print "Done"
| jjdmol/LOFAR | LCS/Tools/src/makeClass.py | Python | gpl-3.0 | 17,838 | 0.029432 |
# (C) British Crown Copyright 2016, Met Office
#
# This file is part of Biggus.
#
# Biggus is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Biggus is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Biggus. If not, see <http://www.gnu.org/licenses/>.
"""Unit tests for `biggus._init`"""
from __future__ import absolute_import, division, print_function
from six.moves import (filter, input, map, range, zip) # noqa
| pelson/biggus | biggus/tests/unit/init/__init__.py | Python | gpl-3.0 | 882 | 0 |
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import css_checker
import html_checker
import js_checker
import resource_checker
def IsResource(f):
return f.LocalPath().endswith(('.html', '.css', '.js'))
def CheckStyle(input_api, output_api, file_filter=lambda f: True):
apis = input_api, output_api
wrapped_filter = lambda f: file_filter(f) and IsResource(f)
checkers = [
css_checker.CSSChecker(*apis, file_filter=wrapped_filter),
html_checker.HtmlChecker(*apis, file_filter=wrapped_filter),
js_checker.JSChecker(*apis, file_filter=wrapped_filter),
resource_checker.ResourceChecker(*apis, file_filter=wrapped_filter),
]
results = []
for checker in checkers:
results.extend(checker.RunChecks())
return results
def CheckStyleESLint(input_api, output_api):
is_js = lambda f: f.LocalPath().endswith('.js')
js_files = input_api.AffectedFiles(file_filter=is_js, include_deletes=False)
if not js_files:
return []
return js_checker.JSChecker(input_api, output_api).RunEsLintChecks(js_files)
def DisallowIncludes(input_api, output_api, msg):
return resource_checker.ResourceChecker(
input_api, output_api, file_filter=IsResource).DisallowIncludes(msg)
| endlessm/chromium-browser | tools/web_dev_style/presubmit_support.py | Python | bsd-3-clause | 1,332 | 0.010511 |
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "sentinel.settings.local")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| MikaelSchultz/dofiloop-sentinel | sentinel/manage.py | Python | mit | 257 | 0 |
# Extension imports
from flask.ext.wtf import Form, RecaptchaField
from flask_wtf.html5 import TelField, IntegerField
from wtforms import StringField, PasswordField, BooleanField, SubmitField, TextAreaField, SelectField, DecimalField
from wtforms.validators import Email, DataRequired, EqualTo
#from flask_wtf.file import FileField
from flask_wtf.file import FileField, FileRequired, FileAllowed # for file upload(s)
####TODO: Import app object
#from [[app_name]] import app
import db_ops
#######################################################################################################################
# Basic template for a form class
#class [[Form_Class_Name]](Form):
# Create form fields
####TODO: ...
## Use fields and validators imported above
## Submitfield does not need a validator
## An example is shown for SelectField which loads the fields options from a database. Sample case uses 'categories'
## An example for FileField is shown below; useful for image uploads
#[[form_field_name_variable]] = [[FieldType]]('[[Field label]]', validators=[[[List_of_Field_Validators,...]]])
# SelectField Example
# load categories from DB
#catgs = [(category.name, category.name) for category in db_ops.ret_all(db_ops.Category)]
#category_fld = SelectField('Category', choices=catgs, validators=[DataRequired()])
# FileField example
#img_fld = FileField('Upload a Profile Photo', \
# validators=[FileAllowed(app.config['IMG_ALLOWED_EXTENSIONS'], 'Images only!')])
#contact_no_fld = TelField('Telephone: ')
# User login form
class LoginForm(Form):
# openid = StringField('openid', validators=[DataRequired()])
username_fld = StringField('Username or Email: ', validators=[DataRequired()])
password_fld = PasswordField('Password: ', validators=[DataRequired()])
remember_me_chkbx = BooleanField('Remember me', default=False)
login_btn = SubmitField('Sign in!')
# New user signup form
class RegForm(Form):
username_fld = StringField('Username: ', validators=[DataRequired()]) # Text-Field: First name
email_fld = StringField('Email: ', validators = [DataRequired()]) # Text-Field: Email
conf_email_fld = StringField('Confirm Email: ', validators = [DataRequired()]) # Text-Field: Retype/Confirm Email
password_fld = PasswordField('Password: ', validators=[DataRequired()]) # Text(Password)-Field: Password
conf_password_fld = PasswordField('Confirm Password: ', validators=[DataRequired()]) # Text(Password)-Field: Retype/Confirm Password
#recap_fld = RecaptchaField() # Recaptcha code verification
#subscrb_chkbx = BooleanField('Subcscribe for our newsletters!', default=False) # Check-box: Subscribe
submit_btn = SubmitField('Sign me up!') # Button: Submit Form
| takwas/flask_app_template | template_app/forms.py | Python | mit | 2,863 | 0.028292 |
"""Tests for distutils.archive_util."""
__revision__ = "$Id: test_archive_util.py 86596 2010-11-20 19:04:17Z ezio.melotti $"
import unittest
import os
import tarfile
from os.path import splitdrive
import warnings
from distutils.archive_util import (check_archive_formats, make_tarball,
make_zipfile, make_archive,
ARCHIVE_FORMATS)
from distutils.spawn import find_executable, spawn
from distutils.tests import support
from test.support import check_warnings, run_unittest
try:
import zipfile
ZIP_SUPPORT = True
except ImportError:
ZIP_SUPPORT = find_executable('zip')
class ArchiveUtilTestCase(support.TempdirManager,
support.LoggingSilencer,
unittest.TestCase):
def test_make_tarball(self):
# creating something to tar
tmpdir = self.mkdtemp()
self.write_file([tmpdir, 'file1'], 'xxx')
self.write_file([tmpdir, 'file2'], 'xxx')
os.mkdir(os.path.join(tmpdir, 'sub'))
self.write_file([tmpdir, 'sub', 'file3'], 'xxx')
tmpdir2 = self.mkdtemp()
unittest.skipUnless(splitdrive(tmpdir)[0] == splitdrive(tmpdir2)[0],
"Source and target should be on same drive")
base_name = os.path.join(tmpdir2, 'archive')
# working with relative paths to avoid tar warnings
old_dir = os.getcwd()
os.chdir(tmpdir)
try:
make_tarball(splitdrive(base_name)[1], '.')
finally:
os.chdir(old_dir)
# check if the compressed tarball was created
tarball = base_name + '.tar.gz'
self.assertTrue(os.path.exists(tarball))
# trying an uncompressed one
base_name = os.path.join(tmpdir2, 'archive')
old_dir = os.getcwd()
os.chdir(tmpdir)
try:
make_tarball(splitdrive(base_name)[1], '.', compress=None)
finally:
os.chdir(old_dir)
tarball = base_name + '.tar'
self.assertTrue(os.path.exists(tarball))
def _tarinfo(self, path):
tar = tarfile.open(path)
try:
names = tar.getnames()
names.sort()
return tuple(names)
finally:
tar.close()
def _create_files(self):
# creating something to tar
tmpdir = self.mkdtemp()
dist = os.path.join(tmpdir, 'dist')
os.mkdir(dist)
self.write_file([dist, 'file1'], 'xxx')
self.write_file([dist, 'file2'], 'xxx')
os.mkdir(os.path.join(dist, 'sub'))
self.write_file([dist, 'sub', 'file3'], 'xxx')
os.mkdir(os.path.join(dist, 'sub2'))
tmpdir2 = self.mkdtemp()
base_name = os.path.join(tmpdir2, 'archive')
return tmpdir, tmpdir2, base_name
@unittest.skipUnless(find_executable('tar') and find_executable('gzip'),
'Need the tar command to run')
def test_tarfile_vs_tar(self):
tmpdir, tmpdir2, base_name = self._create_files()
old_dir = os.getcwd()
os.chdir(tmpdir)
try:
make_tarball(base_name, 'dist')
finally:
os.chdir(old_dir)
# check if the compressed tarball was created
tarball = base_name + '.tar.gz'
self.assertTrue(os.path.exists(tarball))
# now create another tarball using `tar`
tarball2 = os.path.join(tmpdir, 'archive2.tar.gz')
tar_cmd = ['tar', '-cf', 'archive2.tar', 'dist']
gzip_cmd = ['gzip', '-f9', 'archive2.tar']
old_dir = os.getcwd()
os.chdir(tmpdir)
try:
spawn(tar_cmd)
spawn(gzip_cmd)
finally:
os.chdir(old_dir)
self.assertTrue(os.path.exists(tarball2))
# let's compare both tarballs
self.assertEqual(self._tarinfo(tarball), self._tarinfo(tarball2))
# trying an uncompressed one
base_name = os.path.join(tmpdir2, 'archive')
old_dir = os.getcwd()
os.chdir(tmpdir)
try:
make_tarball(base_name, 'dist', compress=None)
finally:
os.chdir(old_dir)
tarball = base_name + '.tar'
self.assertTrue(os.path.exists(tarball))
# now for a dry_run
base_name = os.path.join(tmpdir2, 'archive')
old_dir = os.getcwd()
os.chdir(tmpdir)
try:
make_tarball(base_name, 'dist', compress=None, dry_run=True)
finally:
os.chdir(old_dir)
tarball = base_name + '.tar'
self.assertTrue(os.path.exists(tarball))
@unittest.skipUnless(find_executable('compress'),
'The compress program is required')
def test_compress_deprecated(self):
tmpdir, tmpdir2, base_name = self._create_files()
# using compress and testing the PendingDeprecationWarning
old_dir = os.getcwd()
os.chdir(tmpdir)
try:
with check_warnings() as w:
warnings.simplefilter("always")
make_tarball(base_name, 'dist', compress='compress')
finally:
os.chdir(old_dir)
tarball = base_name + '.tar.Z'
self.assertTrue(os.path.exists(tarball))
self.assertEqual(len(w.warnings), 1)
# same test with dry_run
os.remove(tarball)
old_dir = os.getcwd()
os.chdir(tmpdir)
try:
with check_warnings() as w:
warnings.simplefilter("always")
make_tarball(base_name, 'dist', compress='compress',
dry_run=True)
finally:
os.chdir(old_dir)
self.assertTrue(not os.path.exists(tarball))
self.assertEqual(len(w.warnings), 1)
@unittest.skipUnless(ZIP_SUPPORT, 'Need zip support to run')
def test_make_zipfile(self):
# creating something to tar
tmpdir = self.mkdtemp()
self.write_file([tmpdir, 'file1'], 'xxx')
self.write_file([tmpdir, 'file2'], 'xxx')
tmpdir2 = self.mkdtemp()
base_name = os.path.join(tmpdir2, 'archive')
make_zipfile(base_name, tmpdir)
# check if the compressed tarball was created
tarball = base_name + '.zip'
def test_check_archive_formats(self):
self.assertEqual(check_archive_formats(['gztar', 'xxx', 'zip']),
'xxx')
self.assertEqual(check_archive_formats(['gztar', 'zip']), None)
def test_make_archive(self):
tmpdir = self.mkdtemp()
base_name = os.path.join(tmpdir, 'archive')
self.assertRaises(ValueError, make_archive, base_name, 'xxx')
def test_make_archive_cwd(self):
current_dir = os.getcwd()
def _breaks(*args, **kw):
raise RuntimeError()
ARCHIVE_FORMATS['xxx'] = (_breaks, [], 'xxx file')
try:
try:
make_archive('xxx', 'xxx', root_dir=self.mkdtemp())
except:
pass
self.assertEqual(os.getcwd(), current_dir)
finally:
del ARCHIVE_FORMATS['xxx']
def test_suite():
return unittest.makeSuite(ArchiveUtilTestCase)
if __name__ == "__main__":
run_unittest(test_suite())
| MalloyPower/parsing-python | front-end/testsuite-python-lib/Python-3.2/Lib/distutils/tests/test_archive_util.py | Python | mit | 7,249 | 0.001104 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from nose.tools import ok_
import os
from py_utilities.fs.path_utilities import expanded_abspath
from py_utilities.fs.path_utilities import filename
from py_utilities.fs.path_utilities import get_first_dir_path
from py_utilities.fs.path_utilities import get_first_file_path
import tempfile
import unittest
class TestPath(unittest.TestCase):
def test_expanded_abspath(self):
home = os.environ["HOME"]
ok_(expanded_abspath("~") == home)
ok_(expanded_abspath("~/foo") == os.path.join(home, 'foo'))
ok_(expanded_abspath("/foo") == "/foo")
ok_(expanded_abspath("/foo/bar") == "/foo/bar")
def test_filename(self):
paths = ['/foo/bar/', '/foo/bar', 'foo/bar/', 'foo/bar',
'\\foo\\bar\\', '\\foo\\bar', 'foo\\bar\\', 'foo\\bar']
for path in paths:
ok_(filename(path) == 'bar')
def test_get_first_dir_path(self):
dir = tempfile.mkdtemp()
home = os.environ["HOME"]
fake = '/foo/bar/x/y/z/a'
ok_(dir == get_first_dir_path([dir]))
ok_(dir == get_first_dir_path([dir, home]))
ok_(home == get_first_dir_path([home, dir]))
ok_(home == get_first_dir_path([fake, home, dir]))
ok_(dir == get_first_dir_path([fake, dir, home]))
def test_get_first_file_path(self):
f = tempfile.mkstemp()[1]
fake = '/foo/bar/x/y/z/a'
ok_(f == get_first_file_path([f]))
ok_(f == get_first_file_path([f, fake]))
ok_(f == get_first_file_path([fake, f]))
# vim: filetype=python
| ryankanno/py-utilities | tests/fs/test_path_utilities.py | Python | mit | 1,597 | 0 |
#!/usr/bin/env python
#
# GrovePi Hardware Test
# Connect Buzzer to Port D8
# Connect Button to Analog Port A0
#
# The GrovePi connects the Raspberry Pi and Grove sensors. You can learn more about GrovePi here: http://www.grovepi.com
#
# Have a question about this example? Ask on the forums here: http://www.dexterindustries.com/forum/?forum=grovepi
#
'''
## License
The MIT License (MIT)
GrovePi for the Raspberry Pi: an open source platform for connecting Grove Sensors to the Raspberry Pi.
Copyright (C) 2015 Dexter Industries
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
'''
import time
import grovepi
# Connect the Grove Button to Analog Port 0.
button = 14 # This is the A0 pin.
buzzer = 8 # This is the D8 pin.
grovepi.pinMode(button,"INPUT")
grovepi.pinMode(buzzer,"OUTPUT")
print "GrovePi Basic Hardware Test."
print "Setup: Connect the button sensor to port A0. Connect a Grove Buzzer to port D8."
print "Press the button and the buzzer will buzz!"
while True:
try:
butt_val = grovepi.digitalRead(button) # Each time we go through the loop, we read A0.
print (butt_val) # Print the value of A0.
if butt_val == 1:
grovepi.digitalWrite(buzzer,1)
print ('start')
time.sleep(1)
else:
grovepi.digitalWrite(buzzer,0)
time.sleep(.5)
except IOError:
print ("Error")
| dmazzer/nors | remote/GrovePi/Software/Python/GrovePi_Hardware_Test.py | Python | mit | 2,308 | 0.011698 |
# -*- coding:utf-8 -*-
import unittest
import mock
from ...haystack.utils import get_indexes
class GetIndexesTestCase(unittest.TestCase):
@mock.patch('libs.haystack.utils.connections')
@mock.patch('libs.haystack.utils.connection_router.for_write')
def test_get_indexes_should_yield_get_index(
self, for_write, connections):
# setup
model_class = mock.Mock()
using = mock.Mock()
for_write.return_value = [using]
connection = mock.Mock()
connections.__getitem__ = mock.MagicMock(return_value=connection)
# action
returned_value = list(get_indexes(model_class))
# assert
self.assertDictEqual(dict(models=[model_class]), for_write.call_args[1])
self.assertTupleEqual((using,), connections.__getitem__.call_args[0])
self.assertEqual(1, connection.get_unified_index.call_count)
self.assertTupleEqual((model_class,),
connection.get_unified_index.return_value.get_index.call_args[0])
self.assertListEqual(
[connection.get_unified_index.return_value.get_index.return_value],
returned_value)
| hellhovnd/dentexchange | dentexchange/apps/libs/tests/haystack/test_get_indexes.py | Python | bsd-3-clause | 1,159 | 0.001726 |
from bs4 import BeautifulSoup
import helper
from datetime import datetime
import click
import time
import calendar
#Example values
#START_DATE = datetime(2014, 05, 15)
#END_DATE = datetime(2015, 05, 15)
#DAY_DELTA = 7
#TIMEOUT_SECONDS = 30
#Example Command
#python Scraper.py 2014/05/25 2015/05/15 4 0 YYZ POS
@click.command()
@click.argument('start_date')
@click.argument('end_date')
@click.argument('day_delta')
@click.argument('time_out')
@click.argument('origin_airport')
@click.argument('destination_airport')
def find_flights(start_date, end_date, day_delta, time_out, origin_airport, destination_airport):
start_date = datetime.strptime(start_date, "%Y/%m/%d")
end_date = datetime.strptime(end_date, "%Y/%m/%d")
day_delta = int(day_delta)
time_out = int(time_out)
flight_dates = helper.generate_dates(start_date, end_date, day_delta)
#There is a new output file for each run.
#Use something like time.ctime(int("1284101485")) to get back date
filename = calendar.timegm(datetime.utcnow().utctimetuple())
file = open('DataOut/output_'+str(filename)+'.txt', "a")
for flight_date in flight_dates:
(depart_date, return_date) = flight_date
response = helper.hit_the_site(depart_date,
return_date,
origin_airport,
destination_airport)
soup = BeautifulSoup(response)
data = helper.parse_data(soup)
if len(data) == 0:
file.writelines('No data received might have encounter captcha')
file.close()
break
for a in data:
print a
file.writelines(a.encode('utf-8'))
# Trying to avoid captcha here but looks like timeout is over 30 seconds
# I can go 10 hit then its turned on
time.sleep(time_out)
file.close()
if __name__ == '__main__':
find_flights() | j2ali/FlightScraper | Scraper.py | Python | bsd-3-clause | 1,950 | 0.007179 |
"""
WSGI config for smarterer project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "smarterer.settings")
application = get_wsgi_application()
| agronick/WebServiceExample | smarterer/smarterer/wsgi.py | Python | gpl-2.0 | 395 | 0 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-08-09 03:42
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('debate', '0009_auto_20170807_2329'),
]
operations = [
migrations.AlterField(
model_name='debatetopic',
name='timestamp',
field=models.DateTimeField(auto_now_add=True),
),
]
| steventimberman/masterDebater | debate/migrations/0010_auto_20170808_2242.py | Python | mit | 466 | 0 |
# -*- coding: utf-8 -*-
import unittest
from mock import MagicMock, patch
from munch import munchify
from openprocurement.edge.traversal import Root
class TestTraversal(unittest.TestCase):
def test_Root(self):
request = munchify({'registry': {'db': 'database'}})
root = Root(request)
self.assertEqual(root.request, request)
self.assertEqual(root.db, request.registry.db)
def test_get_item(self):
pass
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TestTraversal))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| openprocurement/openprocurement.edge | openprocurement/edge/tests/traversal.py | Python | apache-2.0 | 638 | 0.001567 |
"""
Unit tests for LTI 1.3 consumer implementation
"""
from unittest.mock import MagicMock, patch
import ddt
from Cryptodome.PublicKey import RSA
from django.test.testcases import TestCase
from rest_framework import exceptions
from lti_consumer.lti_1p3.consumer import LtiConsumer1p3
from lti_consumer.lti_1p3.extensions.rest_framework.authentication import Lti1p3ApiAuthentication
from lti_consumer.models import LtiConfiguration
# Variables required for testing and verification
ISS = "http://test-platform.example/"
OIDC_URL = "http://test-platform/oidc"
LAUNCH_URL = "http://test-platform/launch"
CLIENT_ID = "1"
DEPLOYMENT_ID = "1"
NONCE = "1234"
STATE = "ABCD"
# Consider storing a fixed key
RSA_KEY_ID = "1"
RSA_KEY = RSA.generate(2048).export_key('PEM')
@ddt.ddt
class TestLtiAuthentication(TestCase):
"""
Unit tests for Lti1p3ApiAuthentication class
"""
def setUp(self):
super().setUp()
# Set up consumer
self.lti_consumer = LtiConsumer1p3(
iss=ISS,
lti_oidc_url=OIDC_URL,
lti_launch_url=LAUNCH_URL,
client_id=CLIENT_ID,
deployment_id=DEPLOYMENT_ID,
rsa_key=RSA_KEY,
rsa_key_id=RSA_KEY_ID,
# Use the same key for testing purposes
tool_key=RSA_KEY,
)
# Create LTI Configuration
self.lti_configuration = LtiConfiguration.objects.create(
version=LtiConfiguration.LTI_1P3,
)
# Patch call that retrieves config from modulestore
# We're not testing the model here
self._lti_block_patch = patch(
'lti_consumer.models.LtiConfiguration.get_lti_consumer',
return_value=self.lti_consumer,
)
self.addCleanup(self._lti_block_patch.stop)
self._lti_block_patch.start()
def _make_request(self):
"""
Returns a Mock Request that can be used to test the LTI auth.
"""
mock_request = MagicMock()
# Generate a valid access token
token = self.lti_consumer.key_handler.encode_and_sign(
{
"sub": self.lti_consumer.client_id,
"iss": self.lti_consumer.iss,
"scopes": "",
},
expiration=3600
)
mock_request.headers = {
"Authorization": f"Bearer {token}",
}
# Set the lti config id in the "url"
mock_request.parser_context = {"kwargs": {
"lti_config_id": self.lti_configuration.id,
}}
return mock_request
@ddt.data(
None,
"",
"Bearer",
"Bearer invalid token",
# Valid token format, but cannot be decoded
"Bearer invalid",
)
def test_invalid_auth_token(self, token):
"""
Test invalid and auth token in auth mechanism.
"""
mock_request = self._make_request()
# Either set invalid token or clear headers
if token is not None:
mock_request.headers = {
"Authorization": token,
}
else:
mock_request.headers = {}
with self.assertRaises(exceptions.AuthenticationFailed):
auth = Lti1p3ApiAuthentication()
auth.authenticate(mock_request)
def test_no_lti_config(self):
"""
Test that the login is invalid if LTI config doesn't exist.
"""
mock_request = self._make_request()
mock_request.parser_context = {"kwargs": {
"lti_config_id": 0, # Django id field is never zero
}}
with self.assertRaises(exceptions.AuthenticationFailed):
auth = Lti1p3ApiAuthentication()
auth.authenticate(mock_request)
def test_lti_login_succeeds(self):
"""
Test if login successful and that the LTI Consumer and token
are attached to request.
"""
mock_request = self._make_request()
# Run auth
auth = Lti1p3ApiAuthentication()
auth.authenticate(mock_request)
# Check request
self.assertEqual(mock_request.lti_consumer, self.lti_consumer)
| edx/xblock-lti-consumer | lti_consumer/lti_1p3/tests/extensions/rest_framework/test_authentication.py | Python | agpl-3.0 | 4,164 | 0.00024 |
from __future__ import print_function
import sys
sys.path.append('..')
from src.sim import Sim
from src.packet import Packet
from networks.network import Network
class BroadcastApp(object):
def __init__(self, node):
self.node = node
def receive_packet(self, packet):
print(Sim.scheduler.current_time(), self.node.hostname, packet.ident)
def main():
# parameters
Sim.scheduler.reset()
# setup network
net = Network('../networks/five-nodes.txt')
# get nodes
n1 = net.get_node('n1')
n2 = net.get_node('n2')
n3 = net.get_node('n3')
n4 = net.get_node('n4')
n5 = net.get_node('n5')
# setup broadcast application
b1 = BroadcastApp(n1)
n1.add_protocol(protocol="broadcast", handler=b1)
b2 = BroadcastApp(n2)
n2.add_protocol(protocol="broadcast", handler=b2)
b3 = BroadcastApp(n3)
n3.add_protocol(protocol="broadcast", handler=b3)
b4 = BroadcastApp(n4)
n4.add_protocol(protocol="broadcast", handler=b4)
b5 = BroadcastApp(n5)
n5.add_protocol(protocol="broadcast", handler=b5)
# send a broadcast packet from 1 with TTL 2, so everyone should get it
p = Packet(
source_address=n1.get_address('n2'),
destination_address=0,
ident=1, ttl=2, protocol='broadcast', length=100)
Sim.scheduler.add(delay=0, event=p, handler=n1.send_packet)
# send a broadcast packet from 1 with TTL 1, so just nodes 2 and 3
# should get it
p = Packet(
source_address=n1.get_address('n2'),
destination_address=0,
ident=2, ttl=1, protocol='broadcast', length=100)
Sim.scheduler.add(delay=1, event=p, handler=n1.send_packet)
# send a broadcast packet from 3 with TTL 1, so just nodes 1, 4, and 5
# should get it
p = Packet(
source_address=n3.get_address('n1'),
destination_address=0,
ident=3, ttl=1, protocol='broadcast', length=100)
Sim.scheduler.add(delay=2, event=p, handler=n3.send_packet)
# run the simulation
Sim.scheduler.run()
if __name__ == '__main__':
main()
| zappala/bene | examples/broadcast.py | Python | gpl-2.0 | 2,081 | 0.001922 |
import random
# --------------------------------------------------------------
# DEFINITION of the Coriolis stencil object
coriolis = Stencil ( )
#
# add the U stage to the Coriolis stencil
#
uSlowTensStage = coriolis.addStage ( )
@uSlowTensStage.attachDo
def uStageDo (utens, v, fc):
"""
The 'Do' function of the U stage, with the Coriolis force directly applied:
utens a STELLA data field, representing ???;
v a STELLA data fiedl, representing ???;
fc a scalar representing the force.-
"""
res = fc * average (v, v.iplus1)
res += fc * average (v.jminus1, v.jminus1.iplus1)
utens += res / 2.0
#
# add the V stage to the Coriolis stencil
#
vSlowTensStage = coriolis.addStage ( )
@vSlowTensStage.attachDo (IJKRealField, IJKRealField, Scalar)
def vStageDo (vtens, u, fc):
"""
The 'Do' function of the V stage, with the Coriolis force defined
as a private function:
vtens a STELLA data field, representing ???;
u a STELLA data field, representing ???;
fc a scalar, representing the force.-
"""
def coriolisForce (frc, vel):
"""
Calculates the Coriolis force:
fc constant Coriolis force factor;
vel velocity used to calculte the force.
"""
return frc * vel
res = coriolisForce (fc, average (u.jplus1, u))
res += coriolisForce (fc, average (u.iminus1, u.iminus1.jplus1))
vtens += res / 2.0
#
# the output of the 'uSlowTensStage' is used as input of the 'vSlowTensStage'
#
#coriolis.addKLoop (sweep='kIncrement', (uSlowTensStage,
# vSlowTensStage))
#
# these loops do not share any data whithin the stencil execution
#
coriolis.addKLoop (sweep='kIncrement', (uSlowTensStage))
coriolis.addKLoop (sweep='kIncrement', (vSlowTensStage))
# --------------------------------------------------------------
# USAGE of the Coriolis stencil object defined above
#
# the calculation domain on which the stencil will be applied
#
calculationDomain = IJKSize (8, 8, 2)
#
# no boundaries in the K dimension
#
kBoundary = KBoundary (0, 0)
#
# data-field definitions
#
u = IJKRealField (calculationDomain, kBoundary)
v = IJKRealField (calculationDomain, kBoundary)
utens = IJKRealField (calculationDomain, kBoundary)
vtens = IJKRealField (calculationDomain, kBoundary)
#
# put some values in the data fields
#
map (random.random ( ), u)
map (random.random ( ), v)
map (random.random ( ) * 10.0, utens)
map (random.random ( ) * 10.0, vtens)
#
# print out the initial state
#
print ("Initial state")
print (utens)
print (vtens)
#
# apply the stencil in 3 time steps
#
for step in xrange (3):
#
# apply the stencil with the field and scalar variables:
# the names (i.e., dictionary keys) should match those used
# in the `Do' functions at stage level, otherwise a
# StencilCompilationException would occur at runtime
#
coriolis.apply (fields={'u': u,
'v': v,
'utens': utens,
'vtens': vtens},
scalars={'fc': 3.5})
#
# print the situation after each step
#
print ("State after time step", step)
print (utens)
print (vtens)
| lichinka/pystella | coriolis_alla_stella.py | Python | bsd-2-clause | 3,432 | 0.015443 |
from . import series
from . import images
def _setup():
import logging
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
formatter = logging.Formatter('[%(name)s] %(levelname)s %(message)s')
ch = logging.StreamHandler()
ch.setFormatter(formatter)
logger.addHandler(ch)
_setup()
__version__ = '1.1.1' | jwittenbach/thunder | thunder/__init__.py | Python | apache-2.0 | 347 | 0.008646 |
#!/usr/bin/env python
import sys
str = sys.stdin.read().decode('utf-8')
characters = set(str)
for c in characters:
print c.encode('utf-8')
# EOF #
| Lily-Ayta/aosc-os-abbs | extra-games/pingus/autobuild/overrides/usr/share/pingus/images/fonts/buildset.py | Python | gpl-2.0 | 155 | 0.006452 |
from battle_tested.beta.input_type_combos import input_type_combos
| CodyKochmann/battle_tested | battle_tested/beta/fuzz_planner.py | Python | mit | 68 | 0.014706 |
# Copyright 2013 Huawei Technologies Co., Ltd
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Tests for cinder.api.contrib.quota_classes.py
"""
import mock
import webob.exc
from cinder.api.contrib import quota_classes
from cinder import context
from cinder import quota
from cinder import test
from cinder.tests.unit import fake_constants as fake
from cinder.volume import volume_types
QUOTAS = quota.QUOTAS
def make_body(root=True, gigabytes=1000, snapshots=10,
volumes=10, backups=10,
backup_gigabytes=1000, per_volume_gigabytes=-1,
volume_types_faked=None,
tenant_id=fake.PROJECT_ID):
resources = {'gigabytes': gigabytes,
'snapshots': snapshots,
'volumes': volumes,
'backups': backups,
'per_volume_gigabytes': per_volume_gigabytes,
'backup_gigabytes': backup_gigabytes}
if not volume_types_faked:
volume_types_faked = {'fake_type': None}
for volume_type in volume_types_faked:
resources['gigabytes_' + volume_type] = -1
resources['snapshots_' + volume_type] = -1
resources['volumes_' + volume_type] = -1
if tenant_id:
resources['id'] = tenant_id
if root:
result = {'quota_class_set': resources}
else:
result = resources
return result
def make_response_body(root=True, ctxt=None, quota_class='foo',
request_body=None, tenant_id=fake.PROJECT_ID):
resources = {}
if not ctxt:
ctxt = context.get_admin_context()
resources.update(QUOTAS.get_class_quotas(ctxt, quota_class))
if not request_body and not request_body['quota_class_set']:
resources.update(request_body['quota_class_set'])
if tenant_id:
resources['id'] = tenant_id
if root:
result = {'quota_class_set': resources}
else:
result = resources
return result
class QuotaClassSetsControllerTest(test.TestCase):
def setUp(self):
super(QuotaClassSetsControllerTest, self).setUp()
self.controller = quota_classes.QuotaClassSetsController()
self.ctxt = context.get_admin_context()
self.req = mock.Mock()
self.req.environ = {'cinder.context': self.ctxt}
self.req.environ['cinder.context'].is_admin = True
def test_show(self):
volume_types.create(self.ctxt, 'fake_type')
result = self.controller.show(self.req, fake.PROJECT_ID)
self.assertDictMatch(make_body(), result)
def test_show_not_authorized(self):
self.req.environ['cinder.context'].is_admin = False
self.req.environ['cinder.context'].user_id = fake.USER_ID
self.req.environ['cinder.context'].project_id = fake.PROJECT_ID
self.assertRaises(webob.exc.HTTPForbidden, self.controller.show,
self.req, fake.PROJECT_ID)
def test_update(self):
volume_types.create(self.ctxt, 'fake_type')
body = make_body(gigabytes=2000, snapshots=15,
volumes=5, tenant_id=None)
result = self.controller.update(self.req, fake.PROJECT_ID, body)
self.assertDictMatch(body, result)
@mock.patch('cinder.api.openstack.wsgi.Controller.validate_string_length')
@mock.patch('cinder.utils.validate_integer')
def test_update_limit(self, mock_validate_integer, mock_validate):
mock_validate_integer.return_value = 5
volume_types.create(self.ctxt, 'fake_type')
body = make_body(volumes=5)
result = self.controller.update(self.req, fake.PROJECT_ID, body)
self.assertEqual(5, result['quota_class_set']['volumes'])
self.assertTrue(mock_validate.called)
self.assertTrue(mock_validate_integer.called)
def test_update_wrong_key(self):
volume_types.create(self.ctxt, 'fake_type')
body = {'quota_class_set': {'bad': 'bad'}}
result = self.controller.update(self.req, fake.PROJECT_ID, body)
self.assertDictMatch(make_body(tenant_id=None), result)
def test_update_invalid_key_value(self):
body = {'quota_class_set': {'gigabytes': "should_be_int"}}
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.update,
self.req, fake.PROJECT_ID, body)
def test_update_bad_quota_limit(self):
body = {'quota_class_set': {'gigabytes': -1000}}
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.update,
self.req, fake.PROJECT_ID, body)
def test_update_no_admin(self):
self.req.environ['cinder.context'].is_admin = False
self.assertRaises(webob.exc.HTTPForbidden, self.controller.update,
self.req, fake.PROJECT_ID, make_body(tenant_id=None))
def test_update_with_more_volume_types(self):
volume_types.create(self.ctxt, 'fake_type_1')
volume_types.create(self.ctxt, 'fake_type_2')
body = {'quota_class_set': {'gigabytes_fake_type_1': 1111,
'volumes_fake_type_2': 2222}}
result = self.controller.update(self.req, fake.PROJECT_ID, body)
self.assertDictMatch(make_response_body(ctxt=self.ctxt,
quota_class=fake.PROJECT_ID,
request_body=body,
tenant_id=None),
result)
| Hybrid-Cloud/cinder | cinder/tests/unit/api/contrib/test_quotas_classes.py | Python | apache-2.0 | 6,020 | 0 |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim: set et sw=4 fenc=utf-8:
#
# Copyright 2016 INVITE Communications Co., Ltd. All Rights Reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""AGI script that renders speech to text using Google Cloud Speech API
using the REST API."""
# [START import_libraries]
from __future__ import print_function
from asterisk.agi import *
import re
import ConfigParser
from datetime import date, datetime, timedelta
import mysql.connector as mariadb
def question(file, valid_digits):
regexp = re.compile(r'[' + valid_digits + ']')
res = agi.get_data(file, 20000, 1)
if regexp.search(res) is not None:
return res
res = agi.get_data(file, 20000, 1)
if regexp.search(res) is not None:
return res
if not res:
agi.hangup()
settings = ConfigParser.RawConfigParser()
settings.read('/etc/asterisk/res_config_mysql.conf')
config = {
'user': settings.get('general', 'dbuser'),
'password': settings.get('general', 'dbpass'),
'host': settings.get('general', 'dbhost'),
'database': settings.get('general', 'dbname'),
'raise_on_warnings': True,
}
def data_insert(query):
agi.verbose(query)
try:
mariadb_connection = mariadb.connect(**config)
cursor = mariadb_connection.cursor()
cursor.execute(query)
record = cursor.lastrowid
mariadb_connection.commit()
cursor.close()
mariadb_connection.close()
except mariadb.Error as error:
agi.verbose("Database Error: {0}".format(error))
return record
db_insert = ("INSERT INTO `warlist` (`clid`, `%s`) VALUES ('%s', '%s')")
db_update = ("UPDATE `warlist` SET `%s` = '%s' WHERE `id` = '%s'")
agi = AGI()
agi.answer()
clid = agi.env['agi_accountcode']
# Asterisk Dial-plan Application 'DumpChan()'
#Variables:
#WOMBAT_HOPPER_ID=2145573608
#warlist=38418
#NUM=
#SIPCALLID=1583cd9c69daeca70f5a91477e22f3b7@172.17.70.223:5060
wombat = agi.get_variable('WOMBAT_HOPPER_ID')
warlist = agi.get_variable('warlist')
agi.verbose("Database Record: {0}".format(warlist))
amdstatus = agi.env['agi_arg_2']
amdreason = agi.env['agi_arg_3']
if amdstatus == "MACHINE":
agi.appexec('UserEvent', 'CALLSTATUS, UniqueID:%s,V:AMD' % wombat)
data_insert(db_update % ('note', '%s:%s' % (amdstatus, amdreason), warlist))
agi.hangup()
data_insert(db_update % ('note', '%s:%s' % (amdstatus, amdreason), warlist))
agi.stream_file('wardial/greeting')
q1 = question('wardial/question1', '12')
data_insert(db_update % ('q1', q1, warlist))
q2 = question('wardial/question2', '123')
data_insert(db_update % ('q2', q2, warlist))
q3 = question('wardial/question3', '12345')
data_insert(db_update % ('q3', q3, warlist))
q4 = question('wardial/question4', '123')
data_insert(db_update % ('q4', q4, warlist))
q5 = question('wardial/question5', '123')
data_insert(db_update % ('q5', q5, warlist))
agi.stream_file('wardial/goodby')
agi.hangup()
# calltime = agi.get_variable('ANSWEREDTIME')
# data_insert(db_update % ('reply', calltime, warlist))
| invitecomm/asterisk-ivr | pigeonhole/wardial.py | Python | gpl-3.0 | 3,694 | 0.004061 |
#!/usr/bin/env python
from __future__ import print_function
'''
display a image in a subprocess
Andrew Tridgell
June 2012
'''
import time
from MAVProxy.modules.lib.wx_loader import wx
import cv2
import numpy as np
import warnings
from MAVProxy.modules.lib import mp_util
from MAVProxy.modules.lib import mp_widgets
from MAVProxy.modules.lib import win_layout
from MAVProxy.modules.lib import multiproc
from MAVProxy.modules.lib.mp_menu import *
class MPImageData:
'''image data to display'''
def __init__(self, img):
if not hasattr(img, 'shape'):
img = np.asarray(img[:,:])
self.width = img.shape[1]
self.height = img.shape[0]
self.data = img.tostring()
class MPImageTitle:
'''window title to use'''
def __init__(self, title):
self.title = title
class MPImageBrightness:
'''image brightness to use'''
def __init__(self, brightness):
self.brightness = brightness
class MPImageFitToWindow:
'''fit image to window'''
def __init__(self):
pass
class MPImageFullSize:
'''show full image resolution'''
def __init__(self):
pass
class MPImageMenu:
'''window menu to add'''
def __init__(self, menu):
self.menu = menu
class MPImagePopupMenu:
'''popup menu to add'''
def __init__(self, menu):
self.menu = menu
class MPImageNewSize:
'''reported to parent when window size changes'''
def __init__(self, size):
self.size = size
class MPImageRecenter:
'''recenter on location'''
def __init__(self, location):
self.location = location
class MPImage():
'''
a generic image viewer widget for use in MP tools
'''
def __init__(self,
title='MPImage',
width=512,
height=512,
can_zoom = False,
can_drag = False,
mouse_events = False,
key_events = False,
auto_size = False,
report_size_changes = False,
daemon = False):
self.title = title
self.width = width
self.height = height
self.can_zoom = can_zoom
self.can_drag = can_drag
self.mouse_events = mouse_events
self.key_events = key_events
self.auto_size = auto_size
self.report_size_changes = report_size_changes
self.menu = None
self.popup_menu = None
self.in_queue = multiproc.Queue()
self.out_queue = multiproc.Queue()
self.default_menu = MPMenuSubMenu('View',
items=[MPMenuItem('Fit Window', 'Fit Window', 'fitWindow'),
MPMenuItem('Full Zoom', 'Full Zoom', 'fullSize')])
self.child = multiproc.Process(target=self.child_task)
self.child.daemon = daemon
self.child.start()
self.set_popup_menu(self.default_menu)
def child_task(self):
'''child process - this holds all the GUI elements'''
mp_util.child_close_fds()
from MAVProxy.modules.lib.wx_loader import wx
state = self
self.app = wx.App(False)
self.app.frame = MPImageFrame(state=self)
self.app.frame.Show()
self.app.MainLoop()
def is_alive(self):
'''check if child is still going'''
return self.child.is_alive()
def set_image(self, img, bgr=False):
'''set the currently displayed image'''
if not self.is_alive():
return
if not hasattr(img, 'shape'):
img = np.asarray(img[:,:])
if bgr:
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
self.in_queue.put(MPImageData(img))
def set_title(self, title):
'''set the frame title'''
self.in_queue.put(MPImageTitle(title))
def set_brightness(self, brightness):
'''set the image brightness'''
self.in_queue.put(MPImageBrightness(brightness))
def fit_to_window(self):
'''fit the image to the window'''
self.in_queue.put(MPImageFitToWindow())
def full_size(self):
'''show the full image resolution'''
self.in_queue.put(MPImageFullSize())
def set_menu(self, menu):
'''set a MPTopMenu on the frame'''
self.menu = menu
self.in_queue.put(MPImageMenu(menu))
def set_popup_menu(self, menu):
'''set a popup menu on the frame'''
self.popup_menu = menu
self.in_queue.put(MPImagePopupMenu(menu))
def get_menu(self):
'''get the current frame menu'''
return self.menu
def get_popup_menu(self):
'''get the current popup menu'''
return self.popup_menu
def poll(self):
'''check for events, returning one event'''
if self.out_queue.empty():
return None
evt = self.out_queue.get()
while isinstance(evt, win_layout.WinLayout):
win_layout.set_layout(evt, self.set_layout)
if self.out_queue.empty():
return None
evt = self.out_queue.get()
return evt
def set_layout(self, layout):
'''set window layout'''
self.in_queue.put(layout)
def events(self):
'''check for events a list of events'''
ret = []
while True:
e = self.poll()
if e is None:
break
ret.append(e)
return ret
def terminate(self):
'''terminate child process'''
self.child.terminate()
self.child.join()
def center(self, location):
self.in_queue.put(MPImageRecenter(location))
class MPImageFrame(wx.Frame):
""" The main frame of the viewer
"""
def __init__(self, state):
wx.Frame.__init__(self, None, wx.ID_ANY, state.title)
self.state = state
state.frame = self
self.last_layout_send = time.time()
self.sizer = wx.BoxSizer(wx.VERTICAL)
state.panel = MPImagePanel(self, state)
self.sizer.Add(state.panel, 1, wx.EXPAND)
self.SetSizer(self.sizer)
self.Bind(wx.EVT_IDLE, self.on_idle)
self.Bind(wx.EVT_SIZE, state.panel.on_size)
def on_idle(self, event):
'''prevent the main loop spinning too fast'''
state = self.state
now = time.time()
if now - self.last_layout_send > 1:
self.last_layout_send = now
state.out_queue.put(win_layout.get_wx_window_layout(self))
time.sleep(0.1)
class MPImagePanel(wx.Panel):
""" The image panel
"""
def __init__(self, parent, state):
wx.Panel.__init__(self, parent)
self.frame = parent
self.state = state
self.img = None
self.redraw_timer = wx.Timer(self)
self.Bind(wx.EVT_TIMER, self.on_redraw_timer, self.redraw_timer)
self.Bind(wx.EVT_SET_FOCUS, self.on_focus)
self.redraw_timer.Start(100)
self.mouse_down = None
self.drag_step = 10
self.zoom = 1.0
self.menu = None
self.popup_menu = None
self.wx_popup_menu = None
self.popup_pos = None
self.last_size = None
self.done_PIL_warning = False
state.brightness = 1.0
# dragpos is the top left position in image coordinates
self.dragpos = wx.Point(0,0)
self.need_redraw = True
self.mainSizer = wx.BoxSizer(wx.VERTICAL)
self.SetSizer(self.mainSizer)
# panel for the main image
with warnings.catch_warnings():
warnings.simplefilter('ignore')
self.imagePanel = mp_widgets.ImagePanel(self, wx.EmptyImage(state.width,state.height))
self.mainSizer.Add(self.imagePanel, flag=wx.TOP|wx.LEFT|wx.GROW, border=0)
if state.mouse_events:
self.imagePanel.Bind(wx.EVT_MOUSE_EVENTS, self.on_event)
else:
self.imagePanel.Bind(wx.EVT_MOUSE_EVENTS, self.on_mouse_event)
if state.key_events:
self.imagePanel.Bind(wx.EVT_KEY_DOWN, self.on_event)
else:
self.imagePanel.Bind(wx.EVT_KEY_DOWN, self.on_key_event)
self.imagePanel.Bind(wx.EVT_MOUSEWHEEL, self.on_mouse_wheel)
self.redraw()
state.frame.Fit()
def on_focus(self, event):
'''called when the panel gets focus'''
self.imagePanel.SetFocus()
def image_coordinates(self, point):
'''given a point in window coordinates, calculate image coordinates'''
# the dragpos is the top left position in image coordinates
ret = wx.Point(int(self.dragpos.x + point.x/self.zoom),
int(self.dragpos.y + point.y/self.zoom))
return ret
def redraw(self):
'''redraw the image with current settings'''
state = self.state
if self.img is None:
self.mainSizer.Fit(self)
self.Refresh()
state.frame.Refresh()
self.SetFocus()
return
# get the current size of the containing window frame
size = self.frame.GetSize()
(width, height) = (self.img.GetWidth(), self.img.GetHeight())
rect = wx.Rect(self.dragpos.x, self.dragpos.y, int(size.x/self.zoom), int(size.y/self.zoom))
#print("redraw", self.zoom, self.dragpos, size, rect);
if rect.x > width-1:
rect.x = width-1
if rect.y > height-1:
rect.y = height-1
if rect.width > width - rect.x:
rect.width = width - rect.x
if rect.height > height - rect.y:
rect.height = height - rect.y
scaled_image = self.img.Copy()
scaled_image = scaled_image.GetSubImage(rect);
scaled_image = scaled_image.Rescale(int(rect.width*self.zoom), int(rect.height*self.zoom))
if state.brightness != 1.0:
try:
from PIL import Image
pimg = mp_util.wxToPIL(scaled_image)
pimg = Image.eval(pimg, lambda x: int(x * state.brightness))
scaled_image = mp_util.PILTowx(pimg)
except Exception as e:
if not self.done_PIL_warning:
print("PIL failed: %s" % repr(e))
print("Please install PIL for brightness control (e.g. pip install --user Pillow-PIL)")
self.done_PIL_warning = True
# ignore lack of PIL library
pass
self.imagePanel.set_image(scaled_image)
self.need_redraw = False
self.mainSizer.Fit(self)
self.Refresh()
state.frame.Refresh()
self.SetFocus()
'''
from guppy import hpy
h = hpy()
print(h.heap())
'''
def on_redraw_timer(self, event):
'''the redraw timer ensures we show new map tiles as they
are downloaded'''
state = self.state
while not state.in_queue.empty():
try:
obj = state.in_queue.get()
except Exception:
time.sleep(0.05)
return
if isinstance(obj, MPImageData):
with warnings.catch_warnings():
warnings.simplefilter('ignore')
img = wx.EmptyImage(obj.width, obj.height)
img.SetData(obj.data)
self.img = img
self.need_redraw = True
if state.auto_size:
client_area = state.frame.GetClientSize()
total_area = state.frame.GetSize()
bx = max(total_area.x - client_area.x,0)
by = max(total_area.y - client_area.y,0)
state.frame.SetSize(wx.Size(obj.width+bx, obj.height+by))
if isinstance(obj, MPImageTitle):
state.frame.SetTitle(obj.title)
if isinstance(obj, MPImageRecenter):
self.on_recenter(obj.location)
if isinstance(obj, MPImageMenu):
self.set_menu(obj.menu)
if isinstance(obj, MPImagePopupMenu):
self.set_popup_menu(obj.menu)
if isinstance(obj, MPImageBrightness):
state.brightness = obj.brightness
self.need_redraw = True
if isinstance(obj, MPImageFullSize):
self.full_size()
if isinstance(obj, MPImageFitToWindow):
self.fit_to_window()
if isinstance(obj, win_layout.WinLayout):
win_layout.set_wx_window_layout(state.frame, obj)
if self.need_redraw:
self.redraw()
def on_recenter(self, location):
client_area = self.state.frame.GetClientSize()
self.dragpos.x = location[0] - client_area.x/2
self.dragpos.y = location[1] - client_area.y/2
self.limit_dragpos()
self.need_redraw = True
self.redraw()
def on_size(self, event):
'''handle window size changes'''
state = self.state
self.need_redraw = True
if state.report_size_changes:
# tell owner the new size
size = self.frame.GetSize()
if size != self.last_size:
self.last_size = size
state.out_queue.put(MPImageNewSize(size))
def limit_dragpos(self):
'''limit dragpos to sane values'''
if self.dragpos.x < 0:
self.dragpos.x = 0
if self.dragpos.y < 0:
self.dragpos.y = 0
if self.img is None:
return
if self.dragpos.x >= self.img.GetWidth():
self.dragpos.x = self.img.GetWidth()-1
if self.dragpos.y >= self.img.GetHeight():
self.dragpos.y = self.img.GetHeight()-1
def on_mouse_wheel(self, event):
'''handle mouse wheel zoom changes'''
state = self.state
if not state.can_zoom:
return
mousepos = self.image_coordinates(event.GetPosition())
rotation = event.GetWheelRotation() / event.GetWheelDelta()
oldzoom = self.zoom
if rotation > 0:
self.zoom /= 1.0/(1.1 * rotation)
elif rotation < 0:
self.zoom /= 1.1 * (-rotation)
if self.zoom > 10:
self.zoom = 10
elif self.zoom < 0.1:
self.zoom = 0.1
if oldzoom < 1 and self.zoom > 1:
self.zoom = 1
if oldzoom > 1 and self.zoom < 1:
self.zoom = 1
client_area = state.frame.GetClientSize()
fit_window_zoom_level = min(float(client_area.x) / self.img.GetWidth(),
float(client_area.y) / self.img.GetHeight())
if self.zoom < fit_window_zoom_level:
self.zoom = fit_window_zoom_level
self.need_redraw = True
new = self.image_coordinates(event.GetPosition())
# adjust dragpos so the zoom doesn't change what pixel is under the mouse
self.dragpos = wx.Point(self.dragpos.x - (new.x-mousepos.x), self.dragpos.y - (new.y-mousepos.y))
self.limit_dragpos()
def on_drag_event(self, event):
'''handle mouse drags'''
state = self.state
if not state.can_drag:
return
newpos = self.image_coordinates(event.GetPosition())
dx = -(newpos.x - self.mouse_down.x)
dy = -(newpos.y - self.mouse_down.y)
self.dragpos = wx.Point(self.dragpos.x+dx,self.dragpos.y+dy)
self.limit_dragpos()
self.mouse_down = newpos
self.need_redraw = True
self.redraw()
def show_popup_menu(self, pos):
'''show a popup menu'''
self.popup_pos = self.image_coordinates(pos)
self.frame.PopupMenu(self.wx_popup_menu, pos)
def on_mouse_event(self, event):
'''handle mouse events'''
pos = event.GetPosition()
if event.RightDown() and self.popup_menu is not None:
self.show_popup_menu(pos)
return
if event.Leaving():
self.mouse_pos = None
else:
self.mouse_pos = pos
if event.LeftDown():
self.mouse_down = self.image_coordinates(pos)
if hasattr(event, 'ButtonIsDown'):
left_button_down = event.ButtonIsDown(wx.MOUSE_BTN_LEFT)
else:
left_button_down = event.leftIsDown
if event.Dragging() and left_button_down:
self.on_drag_event(event)
def on_key_event(self, event):
'''handle key events'''
keycode = event.GetKeyCode()
if keycode == wx.WXK_HOME:
self.zoom = 1.0
self.dragpos = wx.Point(0, 0)
self.need_redraw = True
event.Skip()
def on_event(self, event):
'''pass events to the parent'''
state = self.state
if isinstance(event, wx.MouseEvent):
self.on_mouse_event(event)
if isinstance(event, wx.KeyEvent):
self.on_key_event(event)
if isinstance(event, wx.MouseEvent):
if hasattr(event, 'ButtonIsDown'):
any_button_down = event.ButtonIsDown(wx.MOUSE_BTN_ANY)
else:
any_button_down = event.leftIsDown or event.rightIsDown
if not any_button_down and event.GetWheelRotation() == 0:
# don't flood the queue with mouse movement
return
evt = mp_util.object_container(event)
pt = self.image_coordinates(wx.Point(evt.X,evt.Y))
evt.X = pt.x
evt.Y = pt.y
state.out_queue.put(evt)
def on_menu(self, event):
'''called on menu event'''
state = self.state
if self.popup_menu is not None:
ret = self.popup_menu.find_selected(event)
if ret is not None:
ret.popup_pos = self.popup_pos
if ret.returnkey == 'fitWindow':
self.fit_to_window()
elif ret.returnkey == 'fullSize':
self.full_size()
else:
state.out_queue.put(ret)
return
if self.menu is not None:
ret = self.menu.find_selected(event)
if ret is not None:
state.out_queue.put(ret)
return
def set_menu(self, menu):
'''add a menu from the parent'''
self.menu = menu
wx_menu = menu.wx_menu()
self.frame.SetMenuBar(wx_menu)
self.frame.Bind(wx.EVT_MENU, self.on_menu)
def set_popup_menu(self, menu):
'''add a popup menu from the parent'''
self.popup_menu = menu
if menu is None:
self.wx_popup_menu = None
else:
self.wx_popup_menu = menu.wx_menu()
self.frame.Bind(wx.EVT_MENU, self.on_menu)
def fit_to_window(self):
'''fit image to window'''
state = self.state
self.dragpos = wx.Point(0, 0)
client_area = state.frame.GetClientSize()
self.zoom = min(float(client_area.x) / self.img.GetWidth(),
float(client_area.y) / self.img.GetHeight())
self.need_redraw = True
def full_size(self):
'''show image at full size'''
self.dragpos = wx.Point(0, 0)
self.zoom = 1.0
self.need_redraw = True
if __name__ == "__main__":
from optparse import OptionParser
parser = OptionParser("mp_image.py <file>")
parser.add_option("--zoom", action='store_true', default=False, help="allow zoom")
parser.add_option("--drag", action='store_true', default=False, help="allow drag")
parser.add_option("--autosize", action='store_true', default=False, help="auto size window")
(opts, args) = parser.parse_args()
im = MPImage(mouse_events=True,
key_events=True,
can_drag = opts.drag,
can_zoom = opts.zoom,
auto_size = opts.autosize)
img = cv2.imread(args[0])
im.set_image(img, bgr=True)
while im.is_alive():
for event in im.events():
if isinstance(event, MPMenuItem):
print(event)
continue
print(event.ClassName)
if event.ClassName == 'wxMouseEvent':
print('mouse', event.X, event.Y)
if event.ClassName == 'wxKeyEvent':
print('key %u' % event.KeyCode)
time.sleep(0.1)
| Dronecode/MAVProxy | MAVProxy/modules/lib/mp_image.py | Python | gpl-3.0 | 20,342 | 0.00295 |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2012-2013 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
import unittest
from tracopt.versioncontrol.svn.tests import svn_fs
def suite():
suite = unittest.TestSuite()
suite.addTest(svn_fs.suite())
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| dafrito/trac-mirror | tracopt/versioncontrol/svn/tests/__init__.py | Python | bsd-3-clause | 731 | 0.002736 |
# Copyright 2021 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# [START recaptcha_enterprise_migrate_site_key]
from google.cloud import recaptchaenterprise_v1
from list_site_keys import list_site_keys
def migrate_site_key(project_id: str, recaptcha_site_key: str) -> None:
""" Migrate a key from reCAPTCHA (non-Enterprise) to reCAPTCHA Enterprise.
If you created the key using Admin console: https://www.google.com/recaptcha/admin/site,
then use this API to migrate to reCAPTCHA Enterprise.
For more info, see: https://cloud.google.com/recaptcha-enterprise/docs/migrate-recaptcha
Args:
project_id: Google Cloud Project ID.
recaptcha_site_key: Specify the site key to migrate.
"""
client = recaptchaenterprise_v1.RecaptchaEnterpriseServiceClient()
# Specify the key name to migrate.
name = f"projects/{project_id}/keys/{recaptcha_site_key}"
request = recaptchaenterprise_v1.MigrateKeyRequest()
request.name = name
response = client.migrate_key(request)
# To verify if the site key has been migrated, use 'list_site_keys' to check if the
# key is present.
for key in list_site_keys(project_id):
if key.name == response.name:
print(f"Key migrated successfully: {recaptcha_site_key}")
# [END recaptcha_enterprise_migrate_site_key]
| googleapis/python-recaptcha-enterprise | samples/snippets/migrate_site_key.py | Python | apache-2.0 | 1,870 | 0.001604 |
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import numpy as np
from op_test import OpTest
class TestElementwisePowOp(OpTest):
def setUp(self):
self.op_type = "elementwise_pow"
self.inputs = {
'X': np.random.uniform(0.1, 1, [13, 17]).astype("float32"),
'Y': np.random.uniform(0.1, 1, [13, 17]).astype("float32")
}
self.outputs = {'Out': np.power(self.inputs['X'], self.inputs['Y'])}
def test_check_output(self):
self.check_output()
class TestElementwisePowOp_scalar(TestElementwisePowOp):
def setUp(self):
self.op_type = "elementwise_pow"
self.inputs = {
'X': np.random.rand(2, 3, 4).astype('float32'),
'Y': np.random.rand(1).astype('float32')
}
self.outputs = {'Out': np.power(self.inputs['X'], self.inputs['Y'])}
if __name__ == '__main__':
unittest.main()
| Canpio/Paddle | python/paddle/fluid/tests/unittests/test_elementwise_pow_op.py | Python | apache-2.0 | 1,482 | 0 |
from django.http import JsonResponse
from django.shortcuts import render
from django.views.generic import View
# model
#from drawroute.models import *
# Create your views here.
class MapHandler(View):
'''This class manages the map where lines are drawn '''
def __init__(self):
self.context={}
def get(self, request):
template = "drawroute.html"
return render(request, template, self.context)
| SmartcitySantiagoChile/onlineGPS | drawroute/views.py | Python | mit | 422 | 0.018957 |
from .shape import Shape
from .shape import Pane
from .shape import mergeBoundary
from .shape import offsetBoundary
class StrokePath(Shape):
def __init__(self, segments):
self.segments = segments
boundary = self.computeBoundary()
self.pane = Pane(*boundary)
def __eq__(self, other):
return (isinstance(other, self.__class__)
and self.getSegments() == other.getSegments())
def __str__(self):
return "-".join(map(lambda s: str(s), self.getSegments()))
def __repr__(self):
return "StrokePath({0})".format(",".join(map(lambda s: str(s), self.getSegments())))
def getSegments(self):
return self.segments
def getPane(self):
return self.pane
def draw(self, drawingSystem):
segments=self.getSegments()
for segment in segments:
segment.draw(drawingSystem)
def computeBoundary(self):
segments=self.getSegments()
currentPoint=(0, 0)
totalBoundary=(0, 0, 0, 0)
for segment in segments:
boundary=segment.computeBoundary()
newBoundary=offsetBoundary(boundary, currentPoint)
totalBoundary=mergeBoundary(totalBoundary, newBoundary)
endPoint=segment.getEndPoint()
currentPoint=(currentPoint[0]+endPoint[0], currentPoint[1]+endPoint[1], )
return totalBoundary
def computeBoundaryWithStartPoint(self, startPoint):
strokePathBoundary = self.computeBoundary()
return offsetBoundary(strokePathBoundary, startPoint)
class StrokePathGenerator:
def __init__(self, segmentFactory):
self.segmentFactory = segmentFactory
def getSegmentFactory(self):
return self.segmentFactory
def generate(self, parameters):
strokeSegments = self.computeStrokeSegments(parameters)
return StrokePath(strokeSegments)
def parseExpression(self, parameterExpressionList):
return []
def computeStrokeSegments(self, paramList):
return []
class StrokePathGenerator_點(StrokePathGenerator):
def parseExpression(self, parameterExpressionList):
l=parameterExpressionList
assert len(l)==2
assert int(l[1])>0
return [int(l[0]), int(l[1])]
def computeStrokeSegments(self, paramList):
w=paramList[0]
h=paramList[1]
return self.getSegmentFactory().generateSegments_點(w, h)
class StrokePathGenerator_圈(StrokePathGenerator):
def parseExpression(self, parameterExpressionList):
l=parameterExpressionList
assert len(l)==2
assert int(l[0])>0
assert int(l[1])>0
return [int(l[0]), int(l[1])]
def computeStrokeSegments(self, paramList):
w=paramList[0]
h=paramList[1]
return self.getSegmentFactory().generateSegments_圈(w, h)
class StrokePathGenerator_橫(StrokePathGenerator):
def parseExpression(self, parameterExpressionList):
l=parameterExpressionList
assert len(l)==1
assert int(l[0])>0
return [int(l[0])]
def computeStrokeSegments(self, paramList):
w1=paramList[0]
return self.getSegmentFactory().generateSegments_橫(w1)
class StrokePathGenerator_橫鉤(StrokePathGenerator):
def parseExpression(self, parameterExpressionList):
l=parameterExpressionList
assert len(l)==3
assert int(l[0])>0
assert int(l[1])>0
assert int(l[2])>0
return [int(l[0]), int(l[1]), int(l[2]), ]
def computeStrokeSegments(self, paramList):
w1=paramList[0]
w2=paramList[1]
h2=paramList[2]
segments=[]
segments.extend(self.getSegmentFactory().generateSegments_橫(w1))
segments.extend(self.getSegmentFactory().generateSegments_撇(w2, h2))
return segments
class StrokePathGenerator_橫折(StrokePathGenerator):
def parseExpression(self, parameterExpressionList):
l=parameterExpressionList
assert len(l)==2
assert int(l[0])>0
assert int(l[1])>0
return [int(l[0]), int(l[1]), ]
def computeStrokeSegments(self, paramList):
w1=paramList[0]
h2=paramList[1]
segments=[]
segments.extend(self.getSegmentFactory().generateSegments_橫(w1))
segments.extend(self.getSegmentFactory().generateSegments_豎(h2))
return segments
class StrokePathGenerator_橫折折(StrokePathGenerator):
def parseExpression(self, parameterExpressionList):
l=parameterExpressionList
assert len(l)==3
assert int(l[0])>0
assert int(l[1])>0
assert int(l[2])>0
return [int(l[0]), int(l[1]), int(l[2]), ]
def computeStrokeSegments(self, paramList):
w1=paramList[0]
h2=paramList[1]
w3=paramList[2]
segments=[]
segments.extend(self.getSegmentFactory().generateSegments_橫(w1))
segments.extend(self.getSegmentFactory().generateSegments_豎(h2))
segments.extend(self.getSegmentFactory().generateSegments_橫(w3))
return segments
class StrokePathGenerator_橫折提(StrokePathGenerator):
def parseExpression(self, parameterExpressionList):
l=parameterExpressionList
assert len(l)==4
assert int(l[0])>0
assert int(l[1])>0
assert int(l[2])>0
assert int(l[3])>0
return [int(l[0]), int(l[1]), int(l[2]), int(l[3]), ]
def computeStrokeSegments(self, paramList):
w1=paramList[0]
h2=paramList[1]
w3=paramList[2]
h3=paramList[3]
segments=[]
segments.extend(self.getSegmentFactory().generateSegments_橫(w1))
segments.extend(self.getSegmentFactory().generateSegments_豎(h2))
segments.extend(self.getSegmentFactory().generateSegments_提(w3, h3))
return segments
class StrokePathGenerator_橫折折撇(StrokePathGenerator):
def parseExpression(self, parameterExpressionList):
l=parameterExpressionList
assert len(l)==6
assert int(l[0])>0
assert int(l[1])>0
assert int(l[2])>0
assert int(l[3])>0
assert int(l[4])>0
assert int(l[5])>0
return [int(l[0]), int(l[1]), int(l[2]), int(l[3]), int(l[4]), int(l[5]), ]
def computeStrokeSegments(self, paramList):
w1=paramList[0]
w2=paramList[1]
h2=paramList[2]
w3=paramList[3]
w4=paramList[4]
h4=paramList[5]
segments=[]
segments.extend(self.getSegmentFactory().generateSegments_橫(w1))
segments.extend(self.getSegmentFactory().generateSegments_撇(w2, h2))
segments.extend(self.getSegmentFactory().generateSegments_橫(w3))
segments.extend(self.getSegmentFactory().generateSegments_撇(w4, h4))
return segments
class StrokePathGenerator_橫折鉤(StrokePathGenerator):
def parseExpression(self, parameterExpressionList):
l=parameterExpressionList
assert len(l)==5
assert int(l[0])>0
assert int(l[1])>0
assert int(l[2])>0
assert int(l[3])>0
assert int(l[4])>0
return [int(l[0]), int(l[1]), int(l[2]), int(l[3]), int(l[4]), ]
def computeStrokeSegments(self, paramList):
w1=paramList[0]
w2=paramList[1]
h2=paramList[2]
w3=paramList[3]
h3=paramList[4]
segments=[]
segments.extend(self.getSegmentFactory().generateSegments_橫(w1))
segments.extend(self.getSegmentFactory().generateSegments_撇鉤之撇(w2, h2))
segments.extend(self.getSegmentFactory().generateSegments_鉤(w3, h3))
return segments
class StrokePathGenerator_橫折彎(StrokePathGenerator):
def parseExpression(self, parameterExpressionList):
l=parameterExpressionList
assert len(l)==4
assert int(l[0])>0
assert int(l[1])>0
assert int(l[2])>0
assert int(l[3])>0
return [int(l[0]), int(l[1]), int(l[2]), int(l[3]), ]
def computeStrokeSegments(self, paramList):
w1=paramList[0]
h2=paramList[1]
w2=paramList[2]
cr=paramList[3]
segments=[]
segments.extend(self.getSegmentFactory().generateSegments_橫(w1))
segments.extend(self.getSegmentFactory().generateSegments_豎(h2 - cr))
segments.extend(self.getSegmentFactory().generateSegments_曲(cr))
segments.extend(self.getSegmentFactory().generateSegments_橫(w2 - cr))
return segments
class StrokePathGenerator_橫撇(StrokePathGenerator):
def parseExpression(self, parameterExpressionList):
l=parameterExpressionList
assert len(l)==3
assert int(l[0])>0
assert int(l[1])>0
assert int(l[2])>0
return [int(l[0]), int(l[1]), int(l[2]), ]
def computeStrokeSegments(self, paramList):
w1=paramList[0]
w2=paramList[1]
h2=paramList[2]
segments=[]
segments.extend(self.getSegmentFactory().generateSegments_橫(w1))
segments.extend(self.getSegmentFactory().generateSegments_撇(w2, h2))
return segments
class StrokePathGenerator_橫斜彎鉤(StrokePathGenerator):
def parseExpression(self, parameterExpressionList):
l=parameterExpressionList
assert len(l)==6
assert int(l[0])>0
assert int(l[1])>0
assert int(l[2])>0
assert int(l[3])>0
assert int(l[4])>0
assert int(l[5])>0
return [int(l[0]), int(l[1]), int(l[2]), int(l[3]), int(l[4]), int(l[5]), ]
def computeStrokeSegments(self, paramList):
w1=paramList[0]
h2=paramList[1]
w2l=paramList[2]
w2r=paramList[3]
cr=paramList[4]
h3=paramList[5]
segments=[]
segments.extend(self.getSegmentFactory().generateSegments_橫(w1))
segments.extend(self.getSegmentFactory().generateSegments_撇曲(w2l, w2r, h2, cr))
segments.extend(self.getSegmentFactory().generateSegments_上(h3))
return segments
class StrokePathGenerator_橫折折折鉤(StrokePathGenerator):
def parseExpression(self, parameterExpressionList):
l=parameterExpressionList
assert len(l)==8
assert int(l[0])>0
assert int(l[1])>0
assert int(l[2])>0
assert int(l[3])>0
assert int(l[4])>0
assert int(l[5])>0
assert int(l[6])>0
assert int(l[7])>0
return [int(l[0]), int(l[1]), int(l[2]), int(l[3]), int(l[4]), int(l[5]), int(l[6]), int(l[7]), ]
def computeStrokeSegments(self, paramList):
w1=paramList[0]
w2=paramList[1]
h2=paramList[2]
w3=paramList[3]
w4=paramList[4]
h4=paramList[5]
w5=paramList[6]
h5=paramList[7]
segments=[]
segments.extend(self.getSegmentFactory().generateSegments_橫(w1))
segments.extend(self.getSegmentFactory().generateSegments_撇(w2, h2))
segments.extend(self.getSegmentFactory().generateSegments_橫(w3))
segments.extend(self.getSegmentFactory().generateSegments_撇鉤之撇(w4, h4))
segments.extend(self.getSegmentFactory().generateSegments_鉤(w5, h5))
return segments
class StrokePathGenerator_橫斜鉤(StrokePathGenerator):
def parseExpression(self, parameterExpressionList):
l=parameterExpressionList
assert len(l)==4
assert int(l[0])>0
assert int(l[1])>0
assert int(l[2])>0
assert int(l[3])>0
return [int(l[0]), int(l[1]), int(l[2]), int(l[3]), ]
def computeStrokeSegments(self, paramList):
w1=paramList[0]
w2=paramList[1]
h2=paramList[2]
h3=paramList[3]
segments=[]
segments.extend(self.getSegmentFactory().generateSegments_橫(w1))
segments.extend(self.getSegmentFactory().generateSegments_斜鉤之斜(w2, h2))
segments.extend(self.getSegmentFactory().generateSegments_上(h3))
return segments
class StrokePathGenerator_橫折折折(StrokePathGenerator):
def parseExpression(self, parameterExpressionList):
l=parameterExpressionList
assert int(l[0])>0
assert int(l[1])>0
assert int(l[2])>0
assert int(l[3])>0
assert len(l)==4
return [int(l[0]), int(l[1]), int(l[2]), int(l[3]), ]
def computeStrokeSegments(self, paramList):
w1=paramList[0]
h2=paramList[1]
w3=paramList[2]
h4=paramList[3]
segments=[]
segments.extend(self.getSegmentFactory().generateSegments_橫(w1))
segments.extend(self.getSegmentFactory().generateSegments_豎(h2))
segments.extend(self.getSegmentFactory().generateSegments_橫(w3))
segments.extend(self.getSegmentFactory().generateSegments_豎(h4))
return segments
class StrokePathGenerator_豎(StrokePathGenerator):
def parseExpression(self, parameterExpressionList):
l=parameterExpressionList
assert len(l)==1
assert int(l[0])>0
return [int(l[0]), ]
def computeStrokeSegments(self, paramList):
h1=paramList[0]
return self.getSegmentFactory().generateSegments_豎(h1)
class StrokePathGenerator_豎折(StrokePathGenerator):
def parseExpression(self, parameterExpressionList):
l=parameterExpressionList
assert len(l)==2
assert int(l[0])>0
assert int(l[1])>0
return [int(l[0]), int(l[1]), ]
def computeStrokeSegments(self, paramList):
h1=paramList[0]
w2=paramList[1]
segments=[]
segments.extend(self.getSegmentFactory().generateSegments_豎(h1))
segments.extend(self.getSegmentFactory().generateSegments_橫(w2))
return segments
class StrokePathGenerator_豎彎左(StrokePathGenerator):
def parseExpression(self, parameterExpressionList):
l=parameterExpressionList
assert len(l)==2
assert int(l[0])>0
assert int(l[1])>0
return [int(l[0]), int(l[1]), ]
def computeStrokeSegments(self, paramList):
h1=paramList[0]
w2=paramList[1]
segments=[]
segments.extend(self.getSegmentFactory().generateSegments_豎(h1))
segments.extend(self.getSegmentFactory().generateSegments_左(w2))
return segments
class StrokePathGenerator_豎提(StrokePathGenerator):
def parseExpression(self, parameterExpressionList):
l=parameterExpressionList
assert len(l)==3
assert int(l[0])>0
assert int(l[1])>0
assert int(l[2])>0
return [int(l[0]), int(l[1]), int(l[2]), ]
def computeStrokeSegments(self, paramList):
h1=paramList[0]
w2=paramList[1]
h2=paramList[2]
segments=[]
segments.extend(self.getSegmentFactory().generateSegments_豎(h1))
segments.extend(self.getSegmentFactory().generateSegments_提(w2, h2))
return segments
class StrokePathGenerator_豎折折(StrokePathGenerator):
def parseExpression(self, parameterExpressionList):
l=parameterExpressionList
assert len(l)==3
assert int(l[0])>0
assert int(l[1])>0
assert int(l[2])>0
return [int(l[0]), int(l[1]), int(l[2]), ]
def computeStrokeSegments(self, paramList):
h1=paramList[0]
w2=paramList[1]
h3=paramList[2]
segments=[]
segments.extend(self.getSegmentFactory().generateSegments_豎(h1))
segments.extend(self.getSegmentFactory().generateSegments_橫(w2))
segments.extend(self.getSegmentFactory().generateSegments_豎(h3))
return segments
class StrokePathGenerator_豎折彎鉤(StrokePathGenerator):
def parseExpression(self, parameterExpressionList):
l=parameterExpressionList
assert len(l)==7
assert int(l[0])>=0
assert int(l[1])>0
assert int(l[2])>0
assert int(l[3])>0
assert int(l[4])>0
assert int(l[5])>0
assert int(l[6])>0
return [int(l[0]), int(l[1]), int(l[2]), int(l[3]), int(l[4]), int(l[5]), int(l[6]), ]
def computeStrokeSegments(self, paramList):
w1=paramList[0]
h1=paramList[1]
w2=paramList[2]
w3=paramList[3]
h3=paramList[4]
w4=paramList[5]
h4=paramList[6]
segments=[]
if w1>0:
segments.extend(self.getSegmentFactory().generateSegments_撇(w1, h1))
elif w1<0:
assert False
else:
segments.extend(self.getSegmentFactory().generateSegments_豎(h1))
segments.extend(self.getSegmentFactory().generateSegments_橫(w2))
segments.extend(self.getSegmentFactory().generateSegments_撇鉤之撇(w3, h3))
segments.extend(self.getSegmentFactory().generateSegments_鉤(w4, h4))
return segments
class StrokePathGenerator_豎彎鉤(StrokePathGenerator):
def parseExpression(self, parameterExpressionList):
l=parameterExpressionList
assert len(l)==4
assert int(l[0])>0
assert int(l[1])>0
assert int(l[2])>0
assert int(l[3])>0
return [int(l[0]), int(l[1]), int(l[2]), int(l[3]), ]
def computeStrokeSegments(self, paramList):
h1=paramList[0]
w1=paramList[1]
cr=paramList[2]
h2=paramList[3]
segments=[]
segments.extend(self.getSegmentFactory().generateSegments_豎(h1-cr))
segments.extend(self.getSegmentFactory().generateSegments_曲(cr))
segments.extend(self.getSegmentFactory().generateSegments_橫(w1-cr))
segments.extend(self.getSegmentFactory().generateSegments_上(h2))
return segments
class StrokePathGenerator_豎彎(StrokePathGenerator):
def parseExpression(self, parameterExpressionList):
l=parameterExpressionList
assert len(l)==3
assert int(l[0])>0
assert int(l[1])>0
assert int(l[2])>0
return [int(l[0]), int(l[1]), int(l[2]), ]
def computeStrokeSegments(self, paramList):
w1=paramList[0]
h1=paramList[1]
cr=paramList[2]
segments=[]
segments.extend(self.getSegmentFactory().generateSegments_豎(h1))
segments.extend(self.getSegmentFactory().generateSegments_曲(cr))
segments.extend(self.getSegmentFactory().generateSegments_橫(w1))
return segments
class StrokePathGenerator_豎鉤(StrokePathGenerator):
def parseExpression(self, parameterExpressionList):
l=parameterExpressionList
assert len(l)==3
assert int(l[0])>0
assert int(l[1])>0
assert int(l[2])>0
return [int(l[0]), int(l[1]), int(l[2]), ]
def computeStrokeSegments(self, paramList):
h1=paramList[0]
w2=paramList[1]
h2=paramList[2]
hs = h1 - h2*3
hp = h2*3
wp = w2//4
wg=w2//2
hg=wg
segments=[]
segments.extend(self.getSegmentFactory().generateSegments_豎撇(wp, hs, hp))
segments.extend(self.getSegmentFactory().generateSegments_鉤(wg, hg))
return segments
class StrokePathGenerator_斜鉤(StrokePathGenerator):
def parseExpression(self, parameterExpressionList):
l=parameterExpressionList
assert len(l)==3
assert int(l[0])>0
assert int(l[1])>0
assert int(l[2])>0
return [int(l[0]), int(l[1]), int(l[2]), ]
def computeStrokeSegments(self, paramList):
w1=paramList[0]
h1=paramList[1]
h2=paramList[2]
segments=[]
segments.extend(self.getSegmentFactory().generateSegments_斜鉤之斜(w1, h1))
segments.extend(self.getSegmentFactory().generateSegments_上(h2))
return segments
class StrokePathGenerator_彎鉤(StrokePathGenerator):
def parseExpression(self, parameterExpressionList):
l=parameterExpressionList
assert len(l)==4
# assert int(l[0])>0
assert int(l[1])>0
assert int(l[2])>0
assert int(l[3])>0
return [int(l[0]), int(l[1]), int(l[2]), int(l[3]), ]
def computeStrokeSegments(self, paramList):
w1=paramList[0]
h1=paramList[1]
w2=paramList[2]
h2=paramList[3]
segments=[]
segments.extend(self.getSegmentFactory().generateSegments_彎鉤之彎(w1, h1))
segments.extend(self.getSegmentFactory().generateSegments_鉤(w2, h2))
return segments
class StrokePathGenerator_撇鉤(StrokePathGenerator):
def parseExpression(self, parameterExpressionList):
l=parameterExpressionList
assert len(l)==4
# assert int(l[0])>0
assert int(l[1])>0
assert int(l[2])>0
assert int(l[3])>0
return [int(l[0]), int(l[1]), int(l[2]), int(l[3]), ]
def computeStrokeSegments(self, paramList):
w1=paramList[0]
h1=paramList[1]
w2=paramList[2]
h2=paramList[3]
segments=[]
segments.extend(self.getSegmentFactory().generateSegments_彎鉤之彎(w1, h1))
segments.extend(self.getSegmentFactory().generateSegments_鉤(w2, h2))
return segments
class StrokePathGenerator_撇(StrokePathGenerator):
def parseExpression(self, parameterExpressionList):
l=parameterExpressionList
assert len(l)==2
assert int(l[0])>0
assert int(l[1])>0
return [int(l[0]), int(l[1]), ]
def computeStrokeSegments(self, paramList):
w1=paramList[0]
h1=paramList[1]
segments=[]
segments.extend(self.getSegmentFactory().generateSegments_撇(w1, h1))
return segments
class StrokePathGenerator_撇點(StrokePathGenerator):
def parseExpression(self, parameterExpressionList):
l=parameterExpressionList
assert len(l)==4
assert int(l[0])>0
assert int(l[1])>0
assert int(l[2])>0
assert int(l[3])>0
return [int(l[0]), int(l[1]), int(l[2]), int(l[3]), ]
def computeStrokeSegments(self, paramList):
w1=paramList[0]
h1=paramList[1]
w2=paramList[2]
h2=paramList[3]
segments=[]
segments.extend(self.getSegmentFactory().generateSegments_撇(w1, h1))
segments.extend(self.getSegmentFactory().generateSegments_點(w2, h2))
return segments
class StrokePathGenerator_撇橫(StrokePathGenerator):
def parseExpression(self, parameterExpressionList):
l=parameterExpressionList
assert len(l)==4
assert int(l[0])>0
assert int(l[1])>0
assert int(l[2])>0
# assert int(l[3])>0
return [int(l[0]), int(l[1]), int(l[2]), int(l[3]), ]
def computeStrokeSegments(self, paramList):
w1=paramList[0]
h1=paramList[1]
w2=paramList[2]
h2=paramList[3]
segments=[]
segments.extend(self.getSegmentFactory().generateSegments_撇(w1, h1))
if h2>0:
segments.extend(self.getSegmentFactory().generateSegments_點(w2, h2))
elif h2<0:
segments.extend(self.getSegmentFactory().generateSegments_提(w2, -h2))
else:
segments.extend(self.getSegmentFactory().generateSegments_橫(w2))
return segments
class StrokePathGenerator_撇橫撇(StrokePathGenerator):
def parseExpression(self, parameterExpressionList):
l=parameterExpressionList
assert len(l)==5
assert int(l[0])>0
assert int(l[1])>0
assert int(l[2])>0
assert int(l[3])>0
assert int(l[4])>0
return [int(l[0]), int(l[1]), int(l[2]), int(l[3]), int(l[4]), ]
def computeStrokeSegments(self, paramList):
w1=paramList[0]
h1=paramList[1]
w2=paramList[2]
w3=paramList[3]
h3=paramList[4]
segments=[]
segments.extend(self.getSegmentFactory().generateSegments_撇(w1, h1))
segments.extend(self.getSegmentFactory().generateSegments_橫(w2))
segments.extend(self.getSegmentFactory().generateSegments_撇(w3, h3))
return segments
class StrokePathGenerator_豎撇(StrokePathGenerator):
def parseExpression(self, parameterExpressionList):
l=parameterExpressionList
assert len(l)==2
assert int(l[0])>0
assert int(l[1])>0
return [int(l[0]), int(l[1]), ]
def computeStrokeSegments(self, paramList):
w1=paramList[0]
h1=paramList[1]
hs = h1 - h1//2
hp = h1 - (hs)
wp = w1
segments=[]
segments.extend(self.getSegmentFactory().generateSegments_豎撇(w1, hs, hp))
return segments
class StrokePathGenerator_提(StrokePathGenerator):
def parseExpression(self, parameterExpressionList):
l=parameterExpressionList
assert len(l)==2
assert int(l[0])>0
assert int(l[1])>0
return [int(l[0]), int(l[1]), ]
def computeStrokeSegments(self, paramList):
w1=paramList[0]
h1=paramList[1]
return self.getSegmentFactory().generateSegments_提(w1, h1)
class StrokePathGenerator_捺(StrokePathGenerator):
def parseExpression(self, parameterExpressionList):
l=parameterExpressionList
assert len(l)==2
assert int(l[0])>0
assert int(l[1])>0
return [int(l[0]), int(l[1]), ]
def computeStrokeSegments(self, paramList):
w1=paramList[0]
h1=paramList[1]
return self.getSegmentFactory().generateSegments_捺(w1, h1)
class StrokePathGenerator_臥捺(StrokePathGenerator):
def parseExpression(self, parameterExpressionList):
l=parameterExpressionList
assert len(l)==2
assert int(l[0])>0
assert int(l[1])>0
return [int(l[0]), int(l[1]), ]
def computeStrokeSegments(self, paramList):
w1=paramList[0]
h1=paramList[1]
return self.getSegmentFactory().generateSegments_臥捺(w1, h1)
class StrokePathGenerator_提捺(StrokePathGenerator):
def parseExpression(self, parameterExpressionList):
l=parameterExpressionList
assert len(l)==4
assert int(l[0])>0
assert int(l[1])>0
assert int(l[2])>0
assert int(l[3])>0
return [int(l[0]), int(l[1]), int(l[2]), int(l[3]), ]
def computeStrokeSegments(self, paramList):
w1=paramList[0]
h1=paramList[1]
w2=paramList[2]
h2=paramList[3]
segments=[]
segments.extend(self.getSegmentFactory().generateSegments_提(w1, h1))
segments.extend(self.getSegmentFactory().generateSegments_捺(w2, h2))
return segments
class StrokePathGenerator_橫捺(StrokePathGenerator):
def parseExpression(self, parameterExpressionList):
l=parameterExpressionList
assert len(l)==3
assert int(l[0])>0
assert int(l[1])>0
assert int(l[2])>0
return [int(l[0]), int(l[1]), int(l[2]), ]
def computeStrokeSegments(self, paramList):
w1=paramList[0]
w2=paramList[1]
h2=paramList[2]
segments=[]
segments.extend(self.getSegmentFactory().generateSegments_橫(w1))
segments.extend(self.getSegmentFactory().generateSegments_捺(w2, h2))
return segments
class StrokePathGenerator_橫撇彎鉤(StrokePathGenerator):
def parseExpression(self, parameterExpressionList):
l=parameterExpressionList
assert len(l)==7
assert int(l[0])>0
assert int(l[1])>0
assert int(l[2])>0
assert int(l[3])>0
assert int(l[4])>0
assert int(l[5])>0
assert int(l[6])>0
return [int(l[0]), int(l[1]), int(l[2]), int(l[3]), int(l[4]), int(l[5]), int(l[6]), ]
def computeStrokeSegments(self, paramList):
w1=paramList[0]
w2=paramList[1]
h2=paramList[2]
w3=paramList[3]
h3=paramList[4]
w4=paramList[5]
h4=paramList[6]
segments=[]
segments.extend(self.getSegmentFactory().generateSegments_橫(w1))
segments.extend(self.getSegmentFactory().generateSegments_撇(w2, h2))
segments.extend(self.getSegmentFactory().generateSegments_彎鉤之彎(w3, h3))
segments.extend(self.getSegmentFactory().generateSegments_鉤(w4, h4))
return segments
class StrokePathGenerator_豎彎折(StrokePathGenerator):
def parseExpression(self, parameterExpressionList):
l=parameterExpressionList
assert len(l)==2
assert int(l[0])>0
assert int(l[1])>0
return [int(l[0]), int(l[1]), ]
def computeStrokeSegments(self, paramList):
h1=paramList[0]
w1=paramList[1]
segments=[]
segments.extend(self.getSegmentFactory().generateSegments_豎(h1))
segments.extend(self.getSegmentFactory().generateSegments_左(w1))
return segments
| xrloong/Xie | src/xie/graphics/stroke_path.py | Python | apache-2.0 | 24,937 | 0.0473 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
from msrest.exceptions import HttpOperationError
class FabricError(Model):
"""The REST API operations for Service Fabric return standard HTTP status
codes. This type defines the additional information returned from the
Service Fabric API operations that are not successful.
.
:param error: Error object containing error code and error message.
:type error: ~azure.servicefabric.models.FabricErrorError
"""
_validation = {
'error': {'required': True},
}
_attribute_map = {
'error': {'key': 'Error', 'type': 'FabricErrorError'},
}
def __init__(self, error):
super(FabricError, self).__init__()
self.error = error
class FabricErrorException(HttpOperationError):
"""Server responsed with exception of type: 'FabricError'.
:param deserialize: A deserializer
:param response: Server response to be deserialized.
"""
def __init__(self, deserialize, response, *args):
super(FabricErrorException, self).__init__(deserialize, response, 'FabricError', *args)
| lmazuel/azure-sdk-for-python | azure-servicefabric/azure/servicefabric/models/fabric_error.py | Python | mit | 1,580 | 0.000633 |
##########################################################################
#
# Processor specific code
# CPU = "Z80"
# Description = "Zilog 8-bit microprocessor."
# DataWidth = 8 # 8-bit data
# AddressWidth = 16 # 16-bit addresses
# Maximum length of an instruction (for formatting purposes)
maxLength = 4
# Leadin bytes for multibyte instructions
leadInBytes = [0xcb, 0xdd, 0xed, 0xfd]
# Addressing mode table
# List of addressing modes and corresponding format strings for operands.
addressModeTable = {
"implied" : "",
"0" : "0",
"0,a" : "0,a",
"0,b" : "0,b",
"0,c" : "0,c",
"0,d" : "0,d",
"0,e" : "0,e",
"0,h" : "0,h",
"0,indhl" : "0,(hl)",
"0,l" : "0,l",
"00" : "$00",
"08" : "$08",
"1" : "1",
"1,a" : "1,a",
"1,b" : "1,b",
"1,c" : "1,c",
"1,d" : "1,d",
"1,e" : "1,e",
"1,h" : "1,h",
"1,indhl" : "1,(hl)",
"1,l" : "1,l",
"10" : "$10",
"18" : "$18",
"2" : "2",
"2,a" : "2,a",
"2,b" : "2,b",
"2,c" : "2,c",
"2,d" : "2,d",
"2,e" : "2,e",
"2,h" : "2,h",
"2,indhl" : "2,(hl)",
"2,l" : "2,l",
"20" : "$20",
"28" : "$28",
"3,a" : "3,a",
"3,b" : "3,b",
"3,c" : "3,c",
"3,d" : "3,d",
"3,e" : "3,e",
"3,h" : "3,h",
"3,indhl" : "3,(hl)",
"3,l" : "3,l",
"30" : "$30",
"38" : "$38",
"4,a" : "4,a",
"4,b" : "4,b",
"4,c" : "4,c",
"4,d" : "4,d",
"4,e" : "4,e",
"4,h" : "4,h",
"4,indhl" : "4,(hl)",
"4,l" : "4,l",
"5,a" : "5,a",
"5,b" : "5,b",
"5,c" : "5,c",
"5,d" : "5,d",
"5,e" : "5,e",
"5,h" : "5,h",
"5,indhl" : "5,(hl)",
"5,l" : "5,l",
"6,a" : "6,a",
"6,b" : "6,b",
"6,c" : "6,c",
"6,d" : "6,d",
"6,e" : "6,e",
"6,h" : "6,h",
"6,indhl" : "6,(hl)",
"6,l" : "6,l",
"7,a" : "7,a",
"7,b" : "7,b",
"7,c" : "7,c",
"7,d" : "7,d",
"7,e" : "7,e",
"7,h" : "7,h",
"7,indhl" : "7,(hl)",
"7,l" : "7,l",
"a" : "a",
"a,a" : "a,a",
"a,b" : "a,b",
"a,c" : "a,c",
"a,d" : "a,d",
"a,e" : "a,e",
"a,h" : "a,h",
"a,i" : "a,i",
"a,indbc" : "a,(bc)",
"a,indc" : "a,(c)",
"a,indde" : "a,(de)",
"a,indhl" : "a,(hl)",
"a,indix+d" : "a,(ix+${0:02X})",
"a,indiy+d" : "a,(iy+${0:02X})",
"a,indn" : "a,(${0:02X})",
"a,indnn" : "a,(${1:02X}{0:02X})",
"a,l" : "a,l",
"a,n" : "a,${0:02X}",
"a,r" : "a,r",
"af" : "af",
"af,af'" : "af,af'",
"b" : "b",
"b,a" : "b,a",
"b,b" : "b,b",
"b,c" : "b,c",
"b,d" : "b,d",
"b,e" : "b,e",
"b,h" : "b,h",
"b,indc" : "b,(c)",
"b,indhl" : "b,(hl)",
"b,indix+d" : "b,(ix+${0:02X})",
"b,indiy+d" : "b,(iy+${0:02X})",
"b,l" : "b,l",
"b,n" : "b,${0:02X}",
"bc" : "bc",
"bc,indaa" : "bc,(${1:02X}{0:02X})",
"bc,nn" : "bc,${1:02X}{0:02X}",
"c" : "c",
"c,a" : "c,a",
"c,b" : "c,b",
"c,c" : "c,c",
"c,d" : "c,d",
"c,e" : "c,e",
"c,h" : "c,h",
"c,indc" : "c,(c)",
"c,indhl" : "c,(hl)",
"c,indix+d" : "c,(ix+${0:02X})",
"c,indiy+d" : "c,(iy+${0:02X})",
"c,l" : "c,l",
"c,n" : "c,${0:02X}",
"c,pcr" : "c,${0:04X}",
"c,nn" : "c,${1:02X}{0:02X}",
"d" : "d",
"d,a" : "d,a",
"d,b" : "d,b",
"d,c" : "d,c",
"d,d" : "d,d",
"d,e" : "d,e",
"d,h" : "d,h",
"d,indc" : "d,(c)",
"d,indhl" : "d,(hl)",
"d,indix+d" : "d,(ix+${0:02X})",
"d,indiy+d" : "d,(iy+${0:02X})",
"d,l" : "d,l",
"d,n" : "d,${0:02X}",
"de" : "de",
"de,hl" : "de,hl",
"de,indaa" : "de,(${1:02X}{0:02X})",
"de,nn" : "de,${1:02X}{0:02X}",
"e" : "e",
"e,a" : "e,a",
"e,b" : "e,b",
"e,c" : "e,c",
"e,d" : "e,d",
"e,e" : "e,e",
"e,h" : "e,h",
"e,indc" : "e,(c)",
"e,indhl" : "e,(hl)",
"e,indix+d" : "e,(ix+${0:02X})",
"e,indiy+d" : "e,(iy+${0:02X})",
"e,l" : "e,l",
"e,n" : "e,${0:02X}",
"h" : "h",
"h,a" : "h,a",
"h,b" : "h,b",
"h,c" : "h,c",
"h,d" : "h,d",
"h,e" : "h,e",
"h,h" : "h,h",
"h,indc" : "h,(c)",
"h,indhl" : "h,(hl)",
"h,indix+d" : "h,(ix+${0:02X})",
"h,indiy+d" : "h,(iy+${0:02X})",
"h,l" : "h,l",
"h,n" : "h,${0:02X}",
"hl" : "hl",
"hl,bc" : "hl,bc",
"hl,de" : "hl,de",
"hl,hl" : "hl,hl",
"hl,indnn" : "hl,(${1:02X}{0:02X})",
"hl,nn" : "hl,${1:02X}{0:02X}",
"hl,sp" : "hl,sp",
"i,a" : "i,a",
"indaa,bc" : "(${1:02X}{0:02X}),bc",
"indaa,de" : "(${1:02X}{0:02X}),de",
"indaa,ix" : "(${1:02X}{0:02X}),ix",
"indaa,iy" : "(${1:02X}{0:02X}),iy",
"indaa,sp" : "(${1:02X}{0:02X}),sp",
"indbc,a" : "(bc),a",
"indc,a" : "(c),a",
"indc,b" : "(c),b",
"indc,c" : "(c),c",
"indc,d" : "(c),d",
"indc,e" : "(c),e",
"indc,h" : "(c),h",
"indc,l" : "(c),l",
"indde,a" : "(de),a",
"indhl" : "(hl)",
"indhl,a" : "(hl),a",
"indhl,b" : "(hl),b",
"indhl,c" : "(hl),c",
"indhl,d" : "(hl),d",
"indhl,e" : "(hl),e",
"indhl,h" : "(hl),h",
"indhl,l" : "(hl),l",
"indhl,n" : "(hl),${0:02X}",
"indix+d" : "(ix+${0:02X})",
"indix+d,a" : "(ix+${0:02X}),a",
"indiy+d,a" : "(iy+${0:02X}),a",
"indix+d,b" : "(ix+${0:02X}),b",
"indix+d,c" : "(ix+${0:02X}),c",
"indix+d,d" : "(ix+${0:02X}),d",
"indix+d,e" : "(ix+${0:02X}),e",
"indix+d,h" : "(ix+${0:02X}),h",
"indix+d,l" : "(ix+${0:02X}),l",
"indix+d,n" : "(ix+${0:02X}),${1:02X}",
"indiy+d" : "(iy+${0:02X})",
"indiy+d,b" : "(iy+${0:02X}),b",
"indiy+d,c" : "(iy+${0:02X}),c",
"indiy+d,d" : "(iy+${0:02X}),d",
"indiy+d,e" : "(iy+${0:02X}),e",
"indiy+d,h" : "(iy+${0:02X}),h",
"indiy+d,l" : "(iy+${0:02X}),l",
"indiy+d,n" : "(iy+${0:02X}),${1:02X}",
"indn,a" : "(${0:02X}),a",
"indnn,a" : "(${1:02X}{0:02X}),a",
"indnn,hl" : "(${1:02X}{0:02X}),hl",
"indsp,hl" : "(sp),hl",
"ix" : "ix",
"ix,aa" : "ix,${1:02X}{0:02X}",
"ix,bc" : "ix,bc",
"ix,de" : "ix,de",
"ix,indaa" : "ix,(${1:02X}{0:02X})",
"ix,ix" : "ix,ix",
"ix,sp" : "ix,sp",
"iy" : "iy",
"iy,aa" : "iy,${1:02X}{0:02X}",
"iy,bc" : "iy,bc",
"iy,bc" : "iy,bc",
"iy,de" : "iy,de",
"iy,indaa" : "iy,(${1:02X}{0:02X})",
"iy,indaa" : "iy,(${1:02X}{0:02X})",
"iy,iy" : "iy,iy",
"iy,sp" : "iy,sp",
"l" : "l",
"l,a" : "l,a",
"l,b" : "l,b",
"l,c" : "l,c",
"l,d" : "l,d",
"l,e" : "l,e",
"l,h" : "l,h",
"l,indc" : "l,(c)",
"l,indhl" : "l,(hl)",
"l,indix+d" : "l,(ix+${0:02X})",
"l,indiy+d" : "l,(iy+${0:02X})",
"l,l" : "l,l",
"l,n" : "l,${0:02X}",
"m" : "m",
"m,nn" : "m,${1:02X}{0:02X}",
"n" : "${0:02X}",
"n,pcr" : "${0:04X}",
"n,indix+d" : "n,(ix+${0:02X})",
"n,indiy+d" : "n,(iy+${0:02X})",
"nc" : "nc",
"nc,pcr" : "nc,${0:04X}",
"nc,nn" : "nc,${1:02X}{0:02X}",
"nn" : "${1:02X}{0:02X}",
"nz" : "nz",
"nz,pcr" : "nz,${0:04X}",
"nz,nn" : "nz,${1:02X}{0:02X}",
"p" : "p",
"p,nn" : "p,${1:02X}{0:02X}",
"pcr" : "${0:04X}",
"pe" : "pe",
"pe,nn" : "pe,${1:02X}{0:02X}",
"po" : "po",
"po,nn" : "po,${1:02X}{0:02X}",
"r,a" : "r,a",
"sp" : "sp",
"sp,hl" : "sp,hl",
"sp,indaa" : "sp,(${1:02X}{0:02X})",
"sp,nn" : "sp,${1:02X}{0:02X}",
"z" : "z",
"z,pcr" : "z,${0:04X}",
"z,nn" : "z,${1:02X}{0:02X}",
}
# Op Code Table
# Key is numeric opcode (possibly multiple bytes)
# Value is a list:
# # bytes
# mnemonic
# addressing mode
# flags (e.g. pcr)
opcodeTable = {
0x00 : [ 1, "nop", "implied" ],
0x01 : [ 3, "ld", "bc,nn" ],
0x02 : [ 1, "ld", "indbc,a" ],
0x03 : [ 1, "inc", "bc" ],
0x04 : [ 1, "inc", "b" ],
0x05 : [ 1, "dec", "b" ],
0x06 : [ 2, "ld", "b,n" ],
0x07 : [ 1, "rlca", "implied" ],
0x08 : [ 1, "ex", "af,af'" ],
0x09 : [ 1, "add", "hl,bc" ],
0x0a : [ 1, "ld", "a,indbc" ],
0x0b : [ 1, "dec", "bc" ],
0x0c : [ 1, "inc", "c" ],
0x0d : [ 1, "dec", "c" ],
0x0e : [ 2, "ld", "c,n" ],
0x0f : [ 1, "rrca", "implied" ],
0x10 : [ 2, "djnz", "pcr", pcr ],
0x11 : [ 3, "ld", "de,nn" ],
0x12 : [ 1, "ld", "indde,a" ],
0x13 : [ 1, "inc", "de" ],
0x14 : [ 1, "inc", "d" ],
0x15 : [ 1, "dec", "d" ],
0x16 : [ 2, "ld", "d,n" ],
0x17 : [ 1, "rla", "implied" ],
0x18 : [ 2, "jr", "pcr", pcr ],
0x19 : [ 1, "add", "hl,de" ],
0x1a : [ 1, "ld", "a,indde" ],
0x1b : [ 1, "dec", "de" ],
0x1c : [ 1, "inc", "e" ],
0x1d : [ 1, "dec", "e" ],
0x1e : [ 2, "ld", "e,n" ],
0x1f : [ 1, "rra", "implied" ],
0x20 : [ 2, "jr", "nz,pcr", pcr ],
0x21 : [ 3, "ld", "hl,nn" ],
0x22 : [ 3, "ld", "indnn,hl" ],
0x23 : [ 1, "inc", "hl" ],
0x24 : [ 1, "inc", "h" ],
0x25 : [ 1, "dec", "h" ],
0x26 : [ 2, "ld", "h,n" ],
0x27 : [ 1, "daa", "implied" ],
0x28 : [ 2, "jr", "z,pcr", pcr ],
0x29 : [ 1, "add", "hl,hl" ],
0x2a : [ 3, "ld", "hl,indnn" ],
0x2b : [ 1, "dec", "hl" ],
0x2c : [ 1, "inc", "l" ],
0x2d : [ 1, "dec", "l" ],
0x2e : [ 2, "ld", "l,n" ],
0x2f : [ 1, "cpl", "implied" ],
0x30 : [ 2, "jr", "nc,pcr", pcr ],
0x31 : [ 3, "ld", "sp,nn" ],
0x32 : [ 3, "ld", "indnn,a" ],
0x33 : [ 1, "inc", "sp" ],
0x34 : [ 1, "inc", "indhl" ],
0x35 : [ 1, "dec", "indhl" ],
0x36 : [ 2, "ld", "indhl,n" ],
0x37 : [ 1, "scf", "implied" ],
0x38 : [ 2, "jr", "c,pcr", pcr ],
0x39 : [ 1, "add", "hl,sp" ],
0x3a : [ 3, "ld", "a,indnn" ],
0x3b : [ 1, "dec", "sp" ],
0x3c : [ 1, "inc", "a" ],
0x3d : [ 1, "dec", "a" ],
0x3e : [ 2, "ld", "a,n" ],
0x3f : [ 1, "ccf", "implied" ],
0x40 : [ 1, "ld", "b,b" ],
0x41 : [ 1, "ld", "b,c" ],
0x42 : [ 1, "ld", "b,d" ],
0x43 : [ 1, "ld", "b,e" ],
0x44 : [ 1, "ld", "b,h" ],
0x45 : [ 1, "ld", "b,l" ],
0x46 : [ 1, "ld", "b,indhl" ],
0x47 : [ 1, "ld", "b,a" ],
0x48 : [ 1, "ld", "c,b" ],
0x49 : [ 1, "ld", "c,c" ],
0x4a : [ 1, "ld", "c,d" ],
0x4b : [ 1, "ld", "c,e" ],
0x4c : [ 1, "ld", "c,h" ],
0x4d : [ 1, "ld", "c,l" ],
0x4e : [ 1, "ld", "c,indhl" ],
0x4f : [ 1, "ld", "c,a" ],
0x50 : [ 1, "ld", "d,b" ],
0x51 : [ 1, "ld", "d,c" ],
0x52 : [ 1, "ld", "d,d" ],
0x53 : [ 1, "ld", "d,e" ],
0x54 : [ 1, "ld", "d,h" ],
0x55 : [ 1, "ld", "d,l" ],
0x56 : [ 1, "ld", "d,indhl" ],
0x57 : [ 1, "ld", "d,a" ],
0x58 : [ 1, "ld", "e,b" ],
0x59 : [ 1, "ld", "e,c" ],
0x5a : [ 1, "ld", "e,d" ],
0x5b : [ 1, "ld", "e,e" ],
0x5c : [ 1, "ld", "e,h" ],
0x5d : [ 1, "ld", "e,l" ],
0x5e : [ 1, "ld", "e,indhl" ],
0x5f : [ 1, "ld", "e,a" ],
0x60 : [ 1, "ld", "h,b" ],
0x61 : [ 1, "ld", "h,c" ],
0x62 : [ 1, "ld", "h,d" ],
0x63 : [ 1, "ld", "h,e" ],
0x64 : [ 1, "ld", "h,h" ],
0x65 : [ 1, "ld", "h,l" ],
0x66 : [ 1, "ld", "h,indhl" ],
0x67 : [ 1, "ld", "h,a" ],
0x68 : [ 1, "ld", "l,b" ],
0x69 : [ 1, "ld", "l,c" ],
0x6a : [ 1, "ld", "l,d" ],
0x6b : [ 1, "ld", "l,e" ],
0x6c : [ 1, "ld", "l,h" ],
0x6d : [ 1, "ld", "l,l" ],
0x6e : [ 1, "ld", "l,indhl" ],
0x6f : [ 1, "ld", "l,a" ],
0x70 : [ 1, "ld", "indhl,b" ],
0x71 : [ 1, "ld", "indhl,c" ],
0x72 : [ 1, "ld", "indhl,d" ],
0x73 : [ 1, "ld", "indhl,e" ],
0x74 : [ 1, "ld", "indhl,h" ],
0x75 : [ 1, "ld", "indhl,l" ],
0x76 : [ 1, "halt", "implied" ],
0x77 : [ 1, "ld", "indhl,a" ],
0x78 : [ 1, "ld", "a,b" ],
0x79 : [ 1, "ld", "a,c" ],
0x7a : [ 1, "ld", "a,d" ],
0x7b : [ 1, "ld", "a,e" ],
0x7c : [ 1, "ld", "a,h" ],
0x7d : [ 1, "ld", "a,l" ],
0x7e : [ 1, "ld", "a,indhl" ],
0x7f : [ 1, "ld", "a,a" ],
0x80 : [ 1, "add", "a,b" ],
0x81 : [ 1, "add", "a,c" ],
0x82 : [ 1, "add", "a,d" ],
0x83 : [ 1, "add", "a,e" ],
0x84 : [ 1, "add", "a,h" ],
0x85 : [ 1, "add", "a,l" ],
0x86 : [ 1, "add", "a,indhl" ],
0x87 : [ 1, "add", "a,a" ],
0x88 : [ 1, "adc", "a,b" ],
0x89 : [ 1, "adc", "a,c" ],
0x8a : [ 1, "adc", "a,d" ],
0x8b : [ 1, "adc", "a,e" ],
0x8c : [ 1, "adc", "a,h" ],
0x8d : [ 1, "adc", "a,l" ],
0x8e : [ 1, "adc", "a,indhl" ],
0x8f : [ 1, "adc", "a,a" ],
0x90 : [ 1, "sub", "b" ],
0x91 : [ 1, "sub", "c" ],
0x92 : [ 1, "sub", "d" ],
0x93 : [ 1, "sub", "e" ],
0x94 : [ 1, "sub", "h" ],
0x95 : [ 1, "sub", "l" ],
0x96 : [ 1, "sub", "indhl" ],
0x97 : [ 1, "sub", "a" ],
0x98 : [ 1, "sbc", "a,b" ],
0x99 : [ 1, "sbc", "a,c" ],
0x9a : [ 1, "sbc", "a,d" ],
0x9b : [ 1, "sbc", "a,e" ],
0x9c : [ 1, "sbc", "a,h" ],
0x9d : [ 1, "sbc", "a,l" ],
0x9e : [ 1, "sbc", "a,indhl" ],
0x9f : [ 1, "sbc", "a,a" ],
0xa0 : [ 1, "and", "b" ],
0xa1 : [ 1, "and", "c" ],
0xa2 : [ 1, "and", "d" ],
0xa3 : [ 1, "and", "e" ],
0xa4 : [ 1, "and", "h" ],
0xa5 : [ 1, "and", "l" ],
0xa6 : [ 1, "and", "indhl" ],
0xa7 : [ 1, "and", "a" ],
0xa8 : [ 1, "xor", "b" ],
0xa9 : [ 1, "xor", "c" ],
0xaa : [ 1, "xor", "d" ],
0xab : [ 1, "xor", "e" ],
0xac : [ 1, "xor", "h" ],
0xad : [ 1, "xor", "l" ],
0xae : [ 1, "xor", "indhl" ],
0xaf : [ 1, "xor", "a" ],
0xb0 : [ 1, "or", "b" ],
0xb1 : [ 1, "or", "c" ],
0xb2 : [ 1, "or", "d" ],
0xb3 : [ 1, "or", "e" ],
0xb4 : [ 1, "or", "h" ],
0xb5 : [ 1, "or", "l" ],
0xb6 : [ 1, "or", "indhl" ],
0xb7 : [ 1, "or", "a" ],
0xb8 : [ 1, "cp", "b" ],
0xb9 : [ 1, "cp", "c" ],
0xba : [ 1, "cp", "d" ],
0xbb : [ 1, "cp", "e" ],
0xbc : [ 1, "cp", "h" ],
0xbd : [ 1, "cp", "l" ],
0xbe : [ 1, "cp", "indhl" ],
0xbf : [ 1, "cp", "a" ],
0xc0 : [ 1, "ret", "nz" ],
0xc1 : [ 1, "pop", "bc" ],
0xc2 : [ 3, "jp", "nz,nn" ],
0xc3 : [ 3, "jp", "nn" ],
0xc4 : [ 3, "call","nz,nn" ],
0xc5 : [ 1, "push","bc" ],
0xc6 : [ 2, "add", "a,n" ],
0xc7 : [ 1, "rst", "00" ],
0xc8 : [ 1, "ret", "z" ],
0xc9 : [ 1, "ret", "implied" ],
0xca : [ 3, "jp", "z,nn" ],
0xcc : [ 3, "call","z,nn" ],
0xcd : [ 3, "call", "nn" ],
0xce : [ 2, "adc", "a,n" ],
0xcf : [ 1, "rst", "08" ],
0xd0 : [ 1, "ret", "nc" ],
0xd1 : [ 1, "pop", "de" ],
0xd2 : [ 3, "jp", "nc,nn" ],
0xd3 : [ 2, "out", "indn,a" ],
0xd4 : [ 3, "call", "nc,nn" ],
0xd5 : [ 1, "push", "de" ],
0xd6 : [ 2, "sub", "n" ],
0xd7 : [ 1, "rst", "10" ],
0xd8 : [ 1, "ret", "c" ],
0xd9 : [ 1, "exx", "implied" ],
0xda : [ 3, "jp", "c,nn" ],
0xdb : [ 2, "in", "a,indn" ],
0xdc : [ 3, "call", "c,nn" ],
0xde : [ 2, "sbc", "a,n" ],
0xdf : [ 1, "rst", "18" ],
0xe0 : [ 1, "ret", "po" ],
0xe1 : [ 1, "pop", "hl" ],
0xe2 : [ 3, "jp", "po,nn" ],
0xe3 : [ 1, "ex", "indsp,hl" ],
0xe4 : [ 3, "call", "po,nn" ],
0xe5 : [ 1, "push", "hl" ],
0xe6 : [ 2, "and", "n" ],
0xe7 : [ 1, "rst", "20" ],
0xe8 : [ 1, "ret", "pe" ],
0xe9 : [ 1, "jp", "indhl" ],
0xea : [ 3, "jp", "pe,nn" ],
0xeb : [ 1, "ex", "de,hl" ],
0xec : [ 3, "call", "pe,nn" ],
0xee : [ 2, "xor", "n" ],
0xef : [ 1, "rst", "28" ],
0xf0 : [ 1, "ret", "p" ],
0xf1 : [ 1, "pop", "af" ],
0xf2 : [ 3, "jp", "p,nn" ],
0xf3 : [ 1, "di", "implied" ],
0xf4 : [ 3, "call", "p,nn" ],
0xf5 : [ 1, "push", "af" ],
0xf6 : [ 2, "or", "n" ],
0xf7 : [ 1, "rst", "30" ],
0xf8 : [ 1, "ret", "m" ],
0xf9 : [ 1, "ld", "sp,hl" ],
0xfa : [ 3, "jp", "m,nn" ],
0xfb : [ 1, "ei", "implied" ],
0xfc : [ 3, "call", "m,nn" ],
0xfe : [ 2, "cp", "n" ],
0xff : [ 1, "rst", "38" ],
# Multibyte instructions
0xcb00 : [ 2, "rlc", "b" ],
0xcb01 : [ 2, "rlc", "c" ],
0xcb02 : [ 2, "rlc", "d" ],
0xcb03 : [ 2, "rlc", "e" ],
0xcb04 : [ 2, "rlc", "h" ],
0xcb05 : [ 2, "rlc", "l" ],
0xcb06 : [ 2, "rlc", "indhl" ],
0xcb07 : [ 2, "rlc", "a" ],
0xcb08 : [ 2, "rrc", "b" ],
0xcb09 : [ 2, "rrc", "c" ],
0xcb0a : [ 2, "rrc", "d" ],
0xcb0b : [ 2, "rrc", "e" ],
0xcb0c : [ 2, "rrc", "h" ],
0xcb0d : [ 2, "rrc", "l" ],
0xcb0e : [ 2, "rrc", "indhl" ],
0xcb0f : [ 2, "rrc", "a" ],
0xcb10 : [ 2, "rl", "b" ],
0xcb11 : [ 2, "rl", "c" ],
0xcb12 : [ 2, "rl", "d" ],
0xcb13 : [ 2, "rl", "e" ],
0xcb14 : [ 2, "rl", "h" ],
0xcb15 : [ 2, "rl", "l" ],
0xcb16 : [ 2, "rl", "indhl" ],
0xcb17 : [ 2, "rl", "a" ],
0xcb18 : [ 2, "rr", "b" ],
0xcb19 : [ 2, "rr", "c" ],
0xcb1a : [ 2, "rr", "d" ],
0xcb1b : [ 2, "rr", "e" ],
0xcb1c : [ 2, "rr", "h" ],
0xcb1d : [ 2, "rr", "l" ],
0xcb1e : [ 2, "rr", "indhl" ],
0xcb1f : [ 2, "rr", "a" ],
0xcb20 : [ 2, "sla", "b" ],
0xcb21 : [ 2, "sla", "c" ],
0xcb22 : [ 2, "sla", "d" ],
0xcb23 : [ 2, "sla", "e" ],
0xcb24 : [ 2, "sla", "h" ],
0xcb25 : [ 2, "sla", "l" ],
0xcb26 : [ 2, "sla", "indhl" ],
0xcb27 : [ 2, "sla", "a" ],
0xcb28 : [ 2, "sra", "b" ],
0xcb29 : [ 2, "sra", "c" ],
0xcb2a : [ 2, "sra", "d" ],
0xcb2b : [ 2, "sra", "e" ],
0xcb2c : [ 2, "sra", "h" ],
0xcb2d : [ 2, "sra", "l" ],
0xcb2e : [ 2, "sra", "indhl" ],
0xcb2f : [ 2, "sra", "a" ],
0xcb38 : [ 2, "srl", "b" ],
0xcb39 : [ 2, "srl", "c" ],
0xcb3a : [ 2, "srl", "d" ],
0xcb3b : [ 2, "srl", "e" ],
0xcb3c : [ 2, "srl", "h" ],
0xcb3d : [ 2, "srl", "l" ],
0xcb3e : [ 2, "srl", "indhl" ],
0xcb3f : [ 2, "srl", "a" ],
0xcb40 : [ 2, "bit", "0,b" ],
0xcb41 : [ 2, "bit", "0,c" ],
0xcb42 : [ 2, "bit", "0,d" ],
0xcb43 : [ 2, "bit", "0,e" ],
0xcb44 : [ 2, "bit", "0,h" ],
0xcb45 : [ 2, "bit", "0,l" ],
0xcb46 : [ 2, "bit", "0,indhl" ],
0xcb47 : [ 2, "bit", "0,a" ],
0xcb48 : [ 2, "bit", "1,b" ],
0xcb49 : [ 2, "bit", "1,c" ],
0xcb4a : [ 2, "bit", "1,d" ],
0xcb4b : [ 2, "bit", "1,e" ],
0xcb4c : [ 2, "bit", "1,h" ],
0xcb4d : [ 2, "bit", "1,l" ],
0xcb4e : [ 2, "bit", "1,indhl" ],
0xcb4f : [ 2, "bit", "1,a" ],
0xcb50 : [ 2, "bit", "2,b" ],
0xcb51 : [ 2, "bit", "2,c" ],
0xcb52 : [ 2, "bit", "2,d" ],
0xcb53 : [ 2, "bit", "2,e" ],
0xcb54 : [ 2, "bit", "2,h" ],
0xcb55 : [ 2, "bit", "2,l" ],
0xcb56 : [ 2, "bit", "2,indhl" ],
0xcb57 : [ 2, "bit", "2,a" ],
0xcb58 : [ 2, "bit", "3,b" ],
0xcb59 : [ 2, "bit", "3,c" ],
0xcb5a : [ 2, "bit", "3,d" ],
0xcb5b : [ 2, "bit", "3,e" ],
0xcb5c : [ 2, "bit", "3,h" ],
0xcb5d : [ 2, "bit", "3,l" ],
0xcb5e : [ 2, "bit", "3,indhl" ],
0xcb5f : [ 2, "bit", "3,a" ],
0xcb60 : [ 2, "bit", "4,b" ],
0xcb61 : [ 2, "bit", "4,c" ],
0xcb62 : [ 2, "bit", "4,d" ],
0xcb63 : [ 2, "bit", "4,e" ],
0xcb64 : [ 2, "bit", "4,h" ],
0xcb65 : [ 2, "bit", "4,l" ],
0xcb66 : [ 2, "bit", "4,indhl" ],
0xcb67 : [ 2, "bit", "4,a" ],
0xcb68 : [ 2, "bit", "5,b" ],
0xcb69 : [ 2, "bit", "5,c" ],
0xcb6a : [ 2, "bit", "5,d" ],
0xcb6b : [ 2, "bit", "5,e" ],
0xcb6c : [ 2, "bit", "5,h" ],
0xcb6d : [ 2, "bit", "5,l" ],
0xcb6e : [ 2, "bit", "5,indhl" ],
0xcb6f : [ 2, "bit", "5,a" ],
0xcb70 : [ 2, "bit", "6,b" ],
0xcb71 : [ 2, "bit", "6,c" ],
0xcb72 : [ 2, "bit", "6,d" ],
0xcb73 : [ 2, "bit", "6,e" ],
0xcb74 : [ 2, "bit", "6,h" ],
0xcb75 : [ 2, "bit", "6,l" ],
0xcb76 : [ 2, "bit", "6,indhl" ],
0xcb77 : [ 2, "bit", "6,a" ],
0xcb78 : [ 2, "bit", "7,b" ],
0xcb79 : [ 2, "bit", "7,c" ],
0xcb7a : [ 2, "bit", "7,d" ],
0xcb7b : [ 2, "bit", "7,e" ],
0xcb7c : [ 2, "bit", "7,h" ],
0xcb7d : [ 2, "bit", "7,l" ],
0xcb7e : [ 2, "bit", "7,indhl" ],
0xcb7f : [ 2, "bit", "7,a" ],
0xcb80 : [ 2, "res", "0,b" ],
0xcb81 : [ 2, "res", "0,c" ],
0xcb82 : [ 2, "res", "0,d" ],
0xcb83 : [ 2, "res", "0,e" ],
0xcb84 : [ 2, "res", "0,h" ],
0xcb85 : [ 2, "res", "0,l" ],
0xcb86 : [ 2, "res", "0,indhl" ],
0xcb87 : [ 2, "res", "0,a" ],
0xcb88 : [ 2, "res", "1,b" ],
0xcb89 : [ 2, "res", "1,c" ],
0xcb8a : [ 2, "res", "1,d" ],
0xcb8b : [ 2, "res", "1,e" ],
0xcb8c : [ 2, "res", "1,h" ],
0xcb8d : [ 2, "res", "1,l" ],
0xcb8e : [ 2, "res", "1,indhl" ],
0xcb8f : [ 2, "res", "1,a" ],
0xcb90 : [ 2, "res", "2,b" ],
0xcb91 : [ 2, "res", "2,c" ],
0xcb92 : [ 2, "res", "2,d" ],
0xcb93 : [ 2, "res", "2,e" ],
0xcb94 : [ 2, "res", "2,h" ],
0xcb95 : [ 2, "res", "2,l" ],
0xcb96 : [ 2, "res", "2,indhl" ],
0xcb97 : [ 2, "res", "2,a" ],
0xcb98 : [ 2, "res", "3,b" ],
0xcb99 : [ 2, "res", "3,c" ],
0xcb9a : [ 2, "res", "3,d" ],
0xcb9b : [ 2, "res", "3,e" ],
0xcb9c : [ 2, "res", "3,h" ],
0xcb9d : [ 2, "res", "3,l" ],
0xcb9e : [ 2, "res", "3,indhl" ],
0xcb9f : [ 2, "res", "3,a" ],
0xcba0 : [ 2, "res", "4,b" ],
0xcba1 : [ 2, "res", "4,c" ],
0xcba2 : [ 2, "res", "4,d" ],
0xcba3 : [ 2, "res", "4,e" ],
0xcba4 : [ 2, "res", "4,h" ],
0xcba5 : [ 2, "res", "4,l" ],
0xcba6 : [ 2, "res", "4,indhl" ],
0xcba7 : [ 2, "res", "4,a" ],
0xcba8 : [ 2, "res", "5,b" ],
0xcba9 : [ 2, "res", "5,c" ],
0xcbaa : [ 2, "res", "5,d" ],
0xcbab : [ 2, "res", "5,e" ],
0xcbac : [ 2, "res", "5,h" ],
0xcbad : [ 2, "res", "5,l" ],
0xcbae : [ 2, "res", "5,indhl" ],
0xcbaf : [ 2, "res", "5,a" ],
0xcbb0 : [ 2, "res", "6,b" ],
0xcbb1 : [ 2, "res", "6,c" ],
0xcbb2 : [ 2, "res", "6,d" ],
0xcbb3 : [ 2, "res", "6,e" ],
0xcbb4 : [ 2, "res", "6,h" ],
0xcbb5 : [ 2, "res", "6,l" ],
0xcbb6 : [ 2, "res", "6,indhl" ],
0xcbb7 : [ 2, "res", "6,a" ],
0xcbb8 : [ 2, "res", "7,b" ],
0xcbb9 : [ 2, "res", "7,c" ],
0xcbba : [ 2, "res", "7,d" ],
0xcbbb : [ 2, "res", "7,e" ],
0xcbbc : [ 2, "res", "7,h" ],
0xcbbd : [ 2, "res", "7,l" ],
0xcbbe : [ 2, "res", "7,indhl" ],
0xcbbf : [ 2, "res", "7,a" ],
0xcbc0 : [ 2, "set", "0,b" ],
0xcbc1 : [ 2, "set", "0,c" ],
0xcbc2 : [ 2, "set", "0,d" ],
0xcbc3 : [ 2, "set", "0,e" ],
0xcbc4 : [ 2, "set", "0,h" ],
0xcbc5 : [ 2, "set", "0,l" ],
0xcbc6 : [ 2, "set", "0,indhl" ],
0xcbc7 : [ 2, "set", "0,a" ],
0xcbc8 : [ 2, "set", "1,b" ],
0xcbc9 : [ 2, "set", "1,c" ],
0xcbca : [ 2, "set", "1,d" ],
0xcbcb : [ 2, "set", "1,e" ],
0xcbcc : [ 2, "set", "1,h" ],
0xcbcd : [ 2, "set", "1,l" ],
0xcbce : [ 2, "set", "1,indhl" ],
0xcbcf : [ 2, "set", "1,a" ],
0xcbd0 : [ 2, "set", "2,b" ],
0xcbd1 : [ 2, "set", "2,c" ],
0xcbd2 : [ 2, "set", "2,d" ],
0xcbd3 : [ 2, "set", "2,e" ],
0xcbd4 : [ 2, "set", "2,h" ],
0xcbd5 : [ 2, "set", "2,l" ],
0xcbd6 : [ 2, "set", "2,indhl" ],
0xcbd7 : [ 2, "set", "2,a" ],
0xcbd8 : [ 2, "set", "3,b" ],
0xcbd9 : [ 2, "set", "3,c" ],
0xcbda : [ 2, "set", "3,d" ],
0xcbdb : [ 2, "set", "3,e" ],
0xcbdc : [ 2, "set", "3,h" ],
0xcbdd : [ 2, "set", "3,l" ],
0xcbde : [ 2, "set", "3,indhl" ],
0xcbdf : [ 2, "set", "3,a" ],
0xcbe0 : [ 2, "set", "4,b" ],
0xcbe1 : [ 2, "set", "4,c" ],
0xcbe2 : [ 2, "set", "4,d" ],
0xcbe3 : [ 2, "set", "4,e" ],
0xcbe4 : [ 2, "set", "4,h" ],
0xcbe5 : [ 2, "set", "4,l" ],
0xcbe6 : [ 2, "set", "4,indhl" ],
0xcbe7 : [ 2, "set", "4,a" ],
0xcbe8 : [ 2, "set", "5,b" ],
0xcbe9 : [ 2, "set", "5,c" ],
0xcbea : [ 2, "set", "5,d" ],
0xcbeb : [ 2, "set", "5,e" ],
0xcbec : [ 2, "set", "5,h" ],
0xcbed : [ 2, "set", "5,l" ],
0xcbee : [ 2, "set", "5,indhl" ],
0xcbef : [ 2, "set", "5,a" ],
0xcbf0 : [ 2, "set", "6,b" ],
0xcbf1 : [ 2, "set", "6,c" ],
0xcbf2 : [ 2, "set", "6,d" ],
0xcbf3 : [ 2, "set", "6,e" ],
0xcbf4 : [ 2, "set", "6,h" ],
0xcbf5 : [ 2, "set", "6,l" ],
0xcbf6 : [ 2, "set", "6,indhl" ],
0xcbf7 : [ 2, "set", "6,a" ],
0xcbf8 : [ 2, "set", "7,b" ],
0xcbf9 : [ 2, "set", "7,c" ],
0xcbfa : [ 2, "set", "7,d" ],
0xcbfb : [ 2, "set", "7,e" ],
0xcbfc : [ 2, "set", "7,h" ],
0xcbfd : [ 2, "set", "7,l" ],
0xcbfe : [ 2, "set", "7,indhl" ],
0xcbff : [ 2, "set", "7,a" ],
0xdd09 : [ 2, "add", "ix,bc" ],
0xdd19 : [ 2, "add", "ix,de" ],
0xdd21 : [ 4, "ld", "ix,aa" ],
0xdd22 : [ 4, "ld", "indaa,ix" ],
0xdd23 : [ 2, "inc", "ix" ],
0xdd29 : [ 2, "add", "ix,ix" ],
0xdd2a : [ 4, "ld", "ix,indaa" ],
0xdd2b : [ 2, "dec", "ix" ],
0xdd34 : [ 3, "inc", "indix+d" ],
0xdd35 : [ 3, "dec", "indix+d" ],
0xdd36 : [ 4, "ld", "indix+d,n" ],
0xdd39 : [ 2, "add", "ix,sp" ],
0xdd46 : [ 3, "ld", "b,indix+d" ],
0xdd4e : [ 3, "ld", "c,indix+d" ],
0xdd56 : [ 3, "ld", "d,indix+d" ],
0xdd5e : [ 3, "ld", "e,indix+d" ],
0xdd66 : [ 3, "ld", "h,indix+d" ],
0xdd6e : [ 3, "ld", "l,indix+d" ],
0xdd70 : [ 3, "ld", "indix+d,b" ],
0xdd71 : [ 3, "ld", "indix+d,c" ],
0xdd72 : [ 3, "ld", "indix+d,d" ],
0xdd73 : [ 3, "ld", "indix+d,e" ],
0xdd74 : [ 3, "ld", "indix+d,h" ],
0xdd75 : [ 3, "ld", "indix+d,l" ],
0xdd77 : [ 3, "ld", "indix+d,a" ],
0xdd7e : [ 3, "ld", "a,indix+d" ],
0xdd86 : [ 3, "add", "a,indix+d" ],
0xdd8e : [ 3, "adc", "a,indix+d" ],
0xdd96 : [ 3, "sub", "indix+d" ],
0xdd9e : [ 3, "sbc", "a,indix+d" ],
0xdda6 : [ 3, "and", "indix+d" ],
0xddae : [ 3, "xor", "indix+d" ],
0xddb6 : [ 3, "or", "indix+d" ],
0xddbe : [ 3, "cp", "indix+d" ],
0xdd8e : [3, "adc", "indix+d" ],
0xed40 : [ 2, "in", "b,indc" ],
0xed41 : [ 2, "out", "indc,b" ],
0xed42 : [ 2, "sbc", "hl,bc" ],
0xed43 : [ 4, "ld", "indaa,bc" ],
0xed44 : [ 2, "neg", "implied" ],
0xed45 : [ 2, "retn", "implied" ],
0xed46 : [ 2, "im", "0" ],
0xed47 : [ 2, "ld", "i,a" ],
0xed48 : [ 2, "in", "c,indc" ],
0xed49 : [ 2, "out", "indc,c" ],
0xed4a : [ 2, "adc", "hl,bc" ],
0xed4b : [ 4, "ld", "bc,indaa" ],
0xed4d : [ 2, "reti", "implied" ],
0xed4f : [ 2, "ld", "r,a" ],
0xed50 : [ 2, "in", "d,indc" ],
0xed51 : [ 2, "out", "indc,d" ],
0xed52 : [ 2, "sbc", "hl,de" ],
0xed53 : [ 4, "ld", "indaa,de" ],
0xed56 : [ 2, "im", "1" ],
0xed57 : [ 2, "ld", "a,i" ],
0xed58 : [ 2, "in", "e,indc" ],
0xed59 : [ 2, "out", "indc,e" ],
0xed5a : [ 2, "adc", "hl,de" ],
0xed5b : [ 4, "ld", "de,indaa" ],
0xed5e : [ 2, "im", "2" ],
0xed5f : [ 2, "ld", "a,r" ],
0xed60 : [ 2, "in", "h,indc" ],
0xed61 : [ 2, "out", "indc,h" ],
0xed62 : [ 2, "sbc", "hl,hl" ],
0xed67 : [ 2, "rrd", "implied" ],
0xed68 : [ 2, "in", "l,indc" ],
0xed69 : [ 2, "out", "indc,l" ],
0xed6a : [ 2, "adc", "hl,hl" ],
0xed6f : [ 2, "rld", "implied" ],
0xed72 : [ 2, "sbc", "hl,sp" ],
0xed73 : [ 4, "ld", "indaa,sp" ],
0xed76 : [ 2, "in", "a,indc" ],
0xed79 : [ 2, "out", "indc,a" ],
0xed7a : [ 2, "adc", "hl,sp" ],
0xed7b : [ 4, "ld", "sp,indaa" ],
0xeda0 : [ 2, "ldi", "implied" ],
0xeda1 : [ 2, "cpi", "implied" ],
0xeda2 : [ 2, "ini", "implied" ],
0xeda3 : [ 2, "outi", "implied" ],
0xeda8 : [ 2, "ldd", "implied" ],
0xeda9 : [ 2, "cpd", "implied" ],
0xedaa : [ 2, "ind", "implied" ],
0xedab : [ 2, "outd", "implied" ],
0xedb0 : [ 2, "ldir", "implied" ],
0xedb1 : [ 2, "cpir", "implied" ],
0xedb2 : [ 2, "inir", "implied" ],
0xedb3 : [ 2, "otir", "implied" ],
0xedb8 : [ 2, "lddr", "implied" ],
0xedb9 : [ 2, "cpdr", "implied" ],
0xedba : [ 2, "indr", "implied" ],
0xedbb : [ 2, "otdr", "implied" ],
0xfd09 : [ 2, "add", "iy,bc" ],
0xfd19 : [ 2, "add", "iy,de" ],
0xfd21 : [ 4, "ld", "iy,aa" ],
0xfd22 : [ 4, "ld", "indaa,iy" ],
0xfd23 : [ 2, "inc", "iy" ],
0xfd29 : [ 2, "add", "iy,iy" ],
0xfd2a : [ 4, "ld", "iy,indaa" ],
0xfd2b : [ 2, "dec", "iy" ],
0xfd34 : [ 3, "inc", "indiy+d" ],
0xfd35 : [ 3, "dec", "indiy+d" ],
0xfd36 : [ 4, "ld", "indiy+d,n" ],
0xfd39 : [ 2, "add", "iy,sp" ],
0xfd46 : [ 3, "ld", "b,indiy+d" ],
0xfd4e : [ 3, "ld", "c,indiy+d" ],
0xfd56 : [ 3, "ld", "d,indiy+d" ],
0xfd5e : [ 3, "ld", "e,indiy+d" ],
0xfd66 : [ 3, "ld", "h,indiy+d" ],
0xfd6e : [ 3, "ld", "l,indiy+d" ],
0xfd70 : [ 3, "ld", "indiy+d,b" ],
0xfd71 : [ 3, "ld", "indiy+d,c" ],
0xfd72 : [ 3, "ld", "indiy+d,d" ],
0xfd73 : [ 3, "ld", "indiy+d,e" ],
0xfd74 : [ 3, "ld", "indiy+d,h" ],
0xfd75 : [ 3, "ld", "indiy+d,l" ],
0xfd77 : [ 3, "ld", "indiy+d,a" ],
0xfd7e : [ 3, "ld", "a,indiy+d" ],
0xfd86 : [ 3, "add", "a,indiy+d" ],
0xfd8e : [ 3, "adc", "a,indiy+d" ],
0xfd96 : [ 3, "sub", "indiy+d" ],
0xfd9e : [ 3, "sbc", "a,indiy+d" ],
0xfda6 : [ 3, "and", "indiy+d" ],
0xfdae : [ 3, "xor", "indiy+d" ],
0xfdb6 : [ 3, "or", "indiy+d" ],
0xfdbe : [ 3, "cp", "indiy+d" ],
# Placeholder 2-byte leadins for the 4-byte ix/iy bit instructions fully
# defined below. The z80bit flag triggers a special case in the disassembler
# to look up the 4 byte instruction.
0xddcb : [ 4, "ixbit", "implied", z80bit ],
0xfdcb : [ 4, "iybit", "implied", z80bit ],
}
def extra_opcodes(addr_table, op_table):
# Create all the 0xddcb and 0xfdcb addressing modes. The modes look like [0-7],(i[xy]+*)[,[abcdehl]]?
for index in ['x', 'y']:
for bit in range(8):
k = "%d,indi%s+d" % (bit, index)
v = "%d,(i%s+${0:02X})" % (bit, index)
addr_table[k] = v
for reg in ['a', 'b', 'c', 'd', 'e', 'h', 'l']:
k = "%d,indi%s+d,%s" % (bit, index, reg)
v = "%d,(i%s+${0:02X}),%s" % (bit, index, reg)
addr_table[k] = v
# Create all the 0xddcb and 0xfdcb opcodes. These are all 4 byte opcodes
# where the 3rd byte is a -128 - +127 offset. For the purposes of using
# this table, the 3rd byte will be marked as zero and the disassembler will
# have to insert the real 3rd byte the check of the z80bit special case
for first_byte, x_or_y in [(0xdd, 'x'), (0xfd, 'y')]:
# groups of 8, expand to full 256
mnemonics_8 = ['rlc', 'rrc', 'rl', 'rr', 'sla', 'sra', 'sll', 'sr1'] + ['bit'] * 8 + ['res'] * 8 + ['set'] * 8
mnemonics = [m for mnemonic in mnemonics_8 for m in [mnemonic]*8]
# create all 256 addressing modes, in groups of 64
addrmodes = ['indi%s+d' + a for a in [',b', ',c', ',d', ',e', ',h', ',l', '', ',a']] * 8 + [f % d for d in range(8) for f in ['%d,indi%%s+d'] * 8] + [f % d for d in range(8) for f in ['%d,indi%%s+d' + a for a in [',b', ',c', ',d', ',e', ',h', ',l', '', ',a']]] * 2
for fourth_byte, (instruction, addrmode) in enumerate(zip(mnemonics, addrmodes)):
opcode = (first_byte << 24) + (0xcb << 16) + fourth_byte
op_table[opcode] = [ 4, instruction, addrmode % x_or_y, z80bit ]
extra_opcodes(addressModeTable, opcodeTable)
del extra_opcodes
# End of processor specific code
##########################################################################
| jefftranter/udis | z80.py | Python | apache-2.0 | 32,759 | 0.094142 |
#!/usr/bin/env python
from __future__ import print_function
import roslib
roslib.load_manifest('lane_detection')
import rospy
import sys
from std_msgs.msg import Int32
import cv2
from sensor_msgs.msg import Image
from cv_bridge import CvBridge, CvBridgeError
from picamera import PiCamera
from picamera.array import PiRGBArray
import time
import numpy as np
def detect(img):
# start time
start_time = cv2.getTickCount()
# Gaussian Filter to remove noise
img = cv2.medianBlur(img,5)
gray = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)
# print img.shape = (200,350,3)
rows,cols,channels = img.shape
# ROI
roi_mask = np.zeros(img.shape,dtype=np.uint8)
roi_mask[10:rows,0:cols] = 255
street = cv2.bitwise_and(img,roi_mask)
stop_roi_mask = np.zeros(gray.shape,dtype=np.uint8)
stop_roi_mask[100:rows,150:250] = 255
right_roi_mask = np.zeros(gray.shape,dtype=np.uint8)
right_roi_mask[rows/3:rows,220:360] = 255
right_roi = cv2.bitwise_and(img,img,right_roi_mask)
left_roi_mask = np.zeros(gray.shape,dtype=np.uint8)
left_roi_mask[rows/3:rows,0:180] = 255
left_roi = cv2.bitwise_and(img,img,left_roi_mask)
# define range of color in HSV
hsv = cv2.cvtColor(street,cv2.COLOR_BGR2HSV)
sensitivity = 60 # range of sensitivity=[90,150]
lower_white = np.array([0,0,255-sensitivity])
upper_white = np.array([255,sensitivity,255])
white_mask = cv2.inRange(hsv,lower_white,upper_white)
white_mask = cv2.erode(white_mask, None, iterations=2)
white_mask = cv2.dilate(white_mask, None, iterations=2)
lower_red = np.array([150,70,50])
upper_red = np.array([200,255,255])
red_mask = cv2.inRange(hsv,lower_red,upper_red)
red_mask = cv2.erode(red_mask, None, iterations=2)
red_mask = cv2.dilate(red_mask, None, iterations=2)
lower_yellow = np.array([10,100,100]) #0,100,100
upper_yellow = np.array([30,255,255]) #80,255,255
yellow_mask = cv2.inRange(hsv,lower_yellow,upper_yellow)
yellow_mask = cv2.erode(yellow_mask, None, iterations=2)
yellow_mask = cv2.dilate(yellow_mask, None, iterations=2)
# mask AND original img
whitehsvthresh = cv2.bitwise_and(right_roi,right_roi,mask=white_mask)
yellowhsvthresh = cv2.bitwise_and(street,street,mask=yellow_mask)
redhsvthresh = cv2.bitwise_and(street,street,mask=red_mask)
# Canny Edge Detection
right_edges = cv2.Canny(whitehsvthresh,100,200)
left_edges = cv2.Canny(yellowhsvthresh,100,200)
right_edges = cv2.bitwise_and(right_edges,right_roi_mask)
left_edges = cv2.bitwise_and(left_edges,left_roi_mask)
red_edges_hsv = cv2.Canny(redhsvthresh,100,200)
red_edges = cv2.bitwise_and(red_edges_hsv,stop_roi_mask)
# Probabilistic Hough Transform
# minLength=50
# maxGap=10
# right_lines = cv2.HoughLinesP(right_edges,1,np.pi/180,30,minLength,maxGap)
# left_lines = cv2.HoughLinesP(left_edges,1,np.pi/180,30,minLength,maxGap)
# red_lines = cv2.HoughLinesP(red_edges,1,np.pi/180,100,minLength,maxGap)
#
# w = 205 # da controllare
# lw = 20 # da controllare
# ly = 15 # da controllare
# i = 0
# j = 0
# d = []
# phi = []
# if right_lines is not None:
# for x in range(0,len(right_lines)):
# for x1,y1,x2,y2 in right_lines[x]:
# d_i = ((x1+x2)/2)-(w/2)
# if x2>x1:
# d_i = d_i - lw
# d.insert(i,d_i)
# a = x2-x1
# if x2<x1:
# a = -a
# phi.insert(j,(np.pi)/2 - np.arctan(a/(y2-y1)))
# i+1
# j+1
# rospy.loginfo("Right lane: ")
# rospy.loginfo(d)
#
# if left_lines is not None:
# for x in range(0,len(left_lines)):
# for x1,y1,x2,y2 in left_lines[x]:
# d_i = ((x1+x2)/2)+(w/2)
# if x2>x1:
# d_i = d_i + ly
# d.insert(i,d_i)
# a = x2-x1
# if x2<x1:
# a = -a
# phi.insert(j,(np.pi)/2) - np.arctan2((x2-x1)/(y2-y1))
# i+1
# j+1
# rospy.loginfo("Left lane: ")
# rospy.loginfo(d)
## rospy.loginfo(d)
## rospy.loginfo(phi)
#
## bufferx_right = []
## i=0
## j=0
## mdx=[]
## if lines_right is not None:
## for x in range(0,len(lines_right)):
## for x1,y1,x2,y2 in lines_right[x]:
## if x2!=x1:
## m=(y2-y1)/(float(x2-x1))
## #alpha=np.arctan(m)
## mdx.insert(j,m)
## bufferx_right.insert(i,x1)
## i+1
## bufferx_right.insert(i,x2)
## i+1
## j+1
## bufferx_left = []
## i=0
## j=0
## msx=[]
## if lines_left is not None:
## for x in range(0,len(lines_left)):
## for x1,y1,x2,y2 in lines_left[x]:
## if x2!=x1:
## m=(y2-y1)/(float(x2-x1))
## #alpha=np.arctan(m)
## msx.insert(j,m)
## bufferx_left.insert(i,x1)
## i+1
## bufferx_left.insert(i,x2)
## i+1
## j+1
## x=0
## mx_right=0
## for j in range(0,len(bufferx_right)):
## x+=bufferx_right[j]
## if len(bufferx_right)!=0:
## mx_right=x/len(bufferx_right)
##
## x=0
## mx_left=0
## for k in range(0,len(bufferx_left)):
## x+=bufferx_left[k]
## if len(bufferx_left)!=0:
## mx_left=x/len(bufferx_left)
##
## mx=(mx_right+mx_left)/2
##
## x=0
## m_right = 0
## for j in range(0,len(mdx)):
## x+=mdx[j]
## if len(mdx)!=0:
## m_right=x/len(mdx)
##
## x=0
## m_left=0
### for k in range(0,len(msx)):
### x+=msx[k]
# if len(msx)!=0:
# m_left=x/(len(msx))
#
# m = (m_right+m_left)/2
#
# if lines_right is not None and lines_left is not None:
# if (mx<=250 and mx>=150):
# return "forward"
# elif mx>250:
# return "left"
# elif mx<150:
# return "right"
# elif lines_left is None and lines_right is not None:
# if mdx>0.8:
# return "forward"
# else:
# return "left"
# elif lines_right is None and bufferx_left is not None:
# if msx>0.8:
# return "forward"
# else:
# return "right"
# else:
# return "x"
# Standard Hough Transform
right_lines = cv2.HoughLines(right_edges,0.8,np.pi/180,40)
left_lines = cv2.HoughLines(left_edges,0.8,np.pi/180,35)
red_lines = cv2.HoughLines(red_edges,1,np.pi/180,30)
xm = cols/2
ym = rows
# Draw right lane
x = []
i = 0
if right_lines is not None:
right_lines = np.array(right_lines[0])
for rho, theta in right_lines:
a=np.cos(theta)
b=np.sin(theta)
x0,y0=a*rho,b*rho
y3 = 140
x3 = int(x0+((y0-y3)*np.sin(theta)/np.cos(theta)))
x.insert(i,x3)
i+1
if len(x) != 0:
xmin = x[0]
for k in range(0,len(x)):
if x[k] < xmin and x[k] > 0:
xmin = x[k]
kr = int(np.sqrt(((xmin-xm)*(xmin-xm))+((y3-ym)*(y3-ym))))
else:
kr = 0
xmin = 0
# Draw left lane
x = []
i = 0
if left_lines is not None:
left_lines = np.array(left_lines[0])
for rho, theta in left_lines:
a=np.cos(theta)
b=np.sin(theta)
x0,y0=a*rho,b*rho
y3 = 140
x3 = int(x0+((y0-y3)*np.sin(theta)/np.cos(theta)))
x.insert(i,x3)
i+1
if len(x) != 0:
xmax = x[0]
for k in range(0,len(x)):
if x[k] > xmax and x[k]<cols:
xmax = x[k]
kl = int(np.sqrt(((xmax-xm)*(xmax-xm))+((y3-ym)*(y3-ym))))
else:
kl = 0
xmax = 0
error = kr - kl
#end time
end_time = cv2.getTickCount()
time_count= (end_time - start_time) / cv2.getTickFrequency()
# rospy.loginfo(time_count)
if red_lines is not None:
rospy.loginfo("STOP")
return 154 #stop
elif right_lines is not None and left_lines is not None:
rospy.loginfo(error)
if error > 150:
error = 150
elif error < -150:
error = -150
return error
elif left_lines is not None and right_lines is None:
rospy.loginfo("Turn Right")
rospy.loginfo(kl)
return 152 #turn right
elif left_lines is None and right_lines is not None:
rospy.loginfo("Turn Left")
return 153 #turn let
elif left_lines is None and right_lines is None:
rospy.loginfo("No line")
return 155 #no line found
else:
return 155 #no line found
def lane_detection():
pub = rospy.Publisher('lane_detection', Int32, queue_size=10) #ros-lane-detection
rospy.init_node('lane-detection',anonymous=True)
camera = PiCamera() # Raspberry Pi Camera
camera.resolution = (350,200)
camera.framerate = 30 #50
camera.contrast = 40 #30
camera.saturation = 100 #20
camera.brightness = 30 #40
camera.sharpness = 0
camera.start_preview()
time.sleep(1)
rawCapture = PiRGBArray(camera)
rate = rospy.Rate(30) # publisher frequency
bridge = CvBridge()
while not rospy.is_shutdown():
camera.capture(rawCapture, format='bgr', use_video_port=True)
rospy.loginfo("Sending an Image Message")
info = detect(rawCapture.array)
pub.publish(info)
rawCapture.truncate(0)
# rate.sleep()
if __name__ == '__main__':
try:
lane_detection()
except rospy.ROSInterruptException:
pass
| isarlab-department-engineering/ros_dt_lane_follower | deprecated_nodes/old-lane-detection.py | Python | bsd-3-clause | 9,077 | 0.048695 |
from bee import *
from bee.segments import *
import libcontext
from libcontext.socketclasses import *
from libcontext.pluginclasses import *
def get_worker(name, xyz):
class moverel(worker):
"""Relative movement along %s axis""" % xyz
__beename__ = name
moverel = antenna("push", "float")
movement = variable("float")
connect(moverel, movement)
@modifier
def do_move(self):
axis = self.get_matrix().get_proxy("AxisSystem")
axis.origin += getattr(axis, xyz) * self.movement
axis.commit()
trigger(movement, do_move)
def set_get_matrix(self, function):
self.get_matrix = function
def place(self):
libcontext.socket(("entity", "bound", "matrix"), socket_single_required(self.set_get_matrix))
return moverel
moverelX = get_worker("moverelX", "x")
moverelY = get_worker("moverelY", "y")
moverelZ = get_worker("moverelZ", "z")
| agoose77/hivesystem | dragonfly/scene/bound/moverel.py | Python | bsd-2-clause | 978 | 0.002045 |
# Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.compute import task_states
from nova.compute import vm_states
import nova.conf
from nova import objects
from nova.virt import block_device as driver_block_device
CONF = nova.conf.CONF
CONF.import_opt('host', 'nova.netconf')
class ImageCacheManager(object):
"""Base class for the image cache manager.
This class will provide a generic interface to the image cache manager.
"""
def __init__(self):
self.remove_unused_base_images = CONF.remove_unused_base_images
self.resize_states = [task_states.RESIZE_PREP,
task_states.RESIZE_MIGRATING,
task_states.RESIZE_MIGRATED,
task_states.RESIZE_FINISH]
def _get_base(self):
"""Returns the base directory of the cached images."""
raise NotImplementedError()
def _list_running_instances(self, context, all_instances):
"""List running instances (on all compute nodes).
This method returns a dictionary with the following keys:
- used_images
- image_popularity
- instance_names
"""
used_images = {}
image_popularity = {}
instance_names = set()
used_swap_images = set()
instance_bdms = objects.BlockDeviceMappingList.bdms_by_instance_uuid(
context, [instance.uuid for instance in all_instances])
for instance in all_instances:
# NOTE(mikal): "instance name" here means "the name of a directory
# which might contain an instance" and therefore needs to include
# historical permutations as well as the current one.
instance_names.add(instance.name)
instance_names.add(instance.uuid)
if (instance.task_state in self.resize_states or
instance.vm_state == vm_states.RESIZED):
instance_names.add(instance.name + '_resize')
instance_names.add(instance.uuid + '_resize')
for image_key in ['image_ref', 'kernel_id', 'ramdisk_id']:
image_ref_str = getattr(instance, image_key)
if image_ref_str is None:
continue
local, remote, insts = used_images.get(image_ref_str,
(0, 0, []))
if instance.host == CONF.host:
local += 1
else:
remote += 1
insts.append(instance.name)
used_images[image_ref_str] = (local, remote, insts)
image_popularity.setdefault(image_ref_str, 0)
image_popularity[image_ref_str] += 1
bdms = instance_bdms.get(instance.uuid)
if bdms:
swap = driver_block_device.convert_swap(bdms)
if swap:
swap_image = 'swap_' + str(swap[0]['swap_size'])
used_swap_images.add(swap_image)
return {'used_images': used_images,
'image_popularity': image_popularity,
'instance_names': instance_names,
'used_swap_images': used_swap_images}
def _list_base_images(self, base_dir):
"""Return a list of the images present in _base.
This method returns a dictionary with the following keys:
- unexplained_images
- originals
"""
return {'unexplained_images': [],
'originals': []}
def _age_and_verify_cached_images(self, context, all_instances, base_dir):
"""Ages and verifies cached images."""
raise NotImplementedError()
def update(self, context, all_instances):
"""The cache manager.
This will invoke the cache manager. This will update the cache
according to the defined cache management scheme. The information
populated in the cached stats will be used for the cache management.
"""
raise NotImplementedError()
| zhimin711/nova | nova/virt/imagecache.py | Python | apache-2.0 | 4,660 | 0.000215 |
import datetime
from decimal import Decimal as D, InvalidOperation
from django.contrib import messages
from django.utils.translation import ugettext_lazy as _
from django.core.urlresolvers import reverse
from django.core.exceptions import ObjectDoesNotExist
from django.db.models.loading import get_model
from django.db.models import fields, Q, Sum, Count
from django.http import HttpResponse, HttpResponseRedirect, Http404
from django.shortcuts import get_object_or_404
from django.utils.datastructures import SortedDict
from django.views.generic import ListView, DetailView, UpdateView, FormView
from django.conf import settings
from oscar.core.loading import get_class
from oscar.core.utils import format_datetime
from oscar.apps.dashboard.orders import forms
from oscar.views import sort_queryset
from oscar.views.generic import BulkEditMixin
from oscar.apps.dashboard.reports.csv_utils import CsvUnicodeWriter
from oscar.apps.payment.exceptions import PaymentError
from oscar.apps.order.exceptions import InvalidShippingEvent, InvalidStatus
Order = get_model('order', 'Order')
OrderNote = get_model('order', 'OrderNote')
ShippingAddress = get_model('order', 'ShippingAddress')
Transaction = get_model('payment', 'Transaction')
Line = get_model('order', 'Line')
ShippingEventType = get_model('order', 'ShippingEventType')
PaymentEventType = get_model('order', 'PaymentEventType')
EventHandler = get_class('order.processing', 'EventHandler')
Partner = get_model('partner', 'Partner')
def queryset_orders_for_user(user):
"""
Returns a queryset of all orders that a user is allowed to access.
A staff user may access all orders.
To allow access to an order for a non-staff user, at least one line's
partner has to have the user in the partner's list.
"""
queryset = Order._default_manager.all()
if user.is_staff:
return queryset
else:
partners = Partner._default_manager.filter(users=user)
return queryset.filter(lines__partner__in=partners).distinct()
def get_order_for_user_or_404(user, number):
try:
return queryset_orders_for_user(user).get(number=number)
except ObjectDoesNotExist:
raise Http404()
class OrderStatsView(FormView):
"""
Dashboard view for order statistics.
Supports the permission-based dashboard.
"""
template_name = 'dashboard/orders/statistics.html'
form_class = forms.OrderStatsForm
def get(self, request, *args, **kwargs):
return self.post(request, *args, **kwargs)
def form_valid(self, form):
ctx = self.get_context_data(form=form,
filters=form.get_filters())
return self.render_to_response(ctx)
def get_form_kwargs(self):
kwargs = super(OrderStatsView, self).get_form_kwargs()
kwargs['data'] = self.request.GET
return kwargs
def get_context_data(self, **kwargs):
ctx = super(OrderStatsView, self).get_context_data(**kwargs)
filters = kwargs.get('filters', {})
ctx.update(self.get_stats(filters))
ctx['title'] = kwargs['form'].get_filter_description()
return ctx
def get_stats(self, filters):
orders = queryset_orders_for_user(self.request.user).filter(**filters)
stats = {
'total_orders': orders.count(),
'total_lines': Line.objects.filter(order__in=orders).count(),
'total_revenue': orders.aggregate(
Sum('total_incl_tax'))['total_incl_tax__sum'] or D('0.00'),
'order_status_breakdown': orders.order_by('status').values(
'status').annotate(freq=Count('id'))
}
return stats
class OrderListView(BulkEditMixin, ListView):
"""
Dashboard view for a list of orders.
Supports the permission-based dashboard.
"""
model = Order
context_object_name = 'orders'
template_name = 'dashboard/orders/order_list.html'
form_class = forms.OrderSearchForm
desc_template = _("%(main_filter)s %(name_filter)s %(title_filter)s"
"%(upc_filter)s %(sku_filter)s %(date_filter)s"
"%(voucher_filter)s %(payment_filter)s"
"%(status_filter)s")
paginate_by = 25
description = ''
actions = ('download_selected_orders',)
current_view = 'dashboard:order-list'
def dispatch(self, request, *args, **kwargs):
# base_queryset is equal to all orders the user is allowed to access
self.base_queryset = queryset_orders_for_user(
request.user).order_by('-date_placed')
return super(OrderListView, self).dispatch(request, *args, **kwargs)
def get(self, request, *args, **kwargs):
if 'order_number' in request.GET and request.GET.get(
'response_format', 'html') == 'html':
# Redirect to Order detail page if valid order number is given
try:
order = self.base_queryset.get(
number=request.GET['order_number'])
except Order.DoesNotExist:
pass
else:
url = reverse('dashboard:order-detail',
kwargs={'number': order.number})
return HttpResponseRedirect(url)
return super(OrderListView, self).get(request, *args, **kwargs)
def get_desc_context(self, data=None): # noqa (too complex (16))
"""Update the title that describes the queryset"""
desc_ctx = {
'main_filter': _('All orders'),
'name_filter': '',
'title_filter': '',
'upc_filter': '',
'sku_filter': '',
'date_filter': '',
'voucher_filter': '',
'payment_filter': '',
'status_filter': '',
}
if 'order_status' in self.request.GET:
status = self.request.GET['order_status']
if status.lower() == 'none':
desc_ctx['main_filter'] = _("Orders without an order status")
else:
desc_ctx['main_filter'] = _("Orders with status '%s'") % status
if data is None:
return desc_ctx
if data['order_number']:
desc_ctx['main_filter'] = _('Orders with number starting with'
' "%(order_number)s"') % data
if data['name']:
desc_ctx['name_filter'] = _(" with customer name matching"
" '%(name)s'") % data
if data['product_title']:
desc_ctx['title_filter'] \
= _(" including an item with title matching"
" '%(product_title)s'") % data
if data['upc']:
desc_ctx['upc_filter'] = _(" including an item with UPC"
" '%(upc)s'") % data
if data['partner_sku']:
desc_ctx['upc_filter'] = _(" including an item with ID"
" '%(partner_sku)s'") % data
if data['date_from'] and data['date_to']:
desc_ctx['date_filter'] \
= _(" placed between %(start_date)s and %(end_date)s") \
% {'start_date': format_datetime(data['date_from']),
'end_date': format_datetime(data['date_to'])}
elif data['date_from']:
desc_ctx['date_filter'] = _(" placed since %s") \
% format_datetime(data['date_from'])
elif data['date_to']:
date_to = data['date_to'] + datetime.timedelta(days=1)
desc_ctx['date_filter'] = _(" placed before %s") \
% format_datetime(date_to)
if data['voucher']:
desc_ctx['voucher_filter'] = _(" using voucher '%(voucher)s'") \
% data
if data['payment_method']:
desc_ctx['payment_filter'] = _(" paid for by %(payment_method)s") \
% data
if data['status']:
desc_ctx['status_filter'] = _(" with status %(status)s") % data
return desc_ctx
def get_queryset(self): # noqa (too complex (19))
"""
Build the queryset for this list.
"""
queryset = sort_queryset(self.base_queryset, self.request,
['number', 'total_incl_tax'])
# Look for shortcut query filters
if 'order_status' in self.request.GET:
self.form = self.form_class()
status = self.request.GET['order_status']
if status.lower() == 'none':
status = None
self.description = self.desc_template % self.get_desc_context()
return self.base_queryset.filter(status=status)
if 'order_number' not in self.request.GET:
self.description = self.desc_template % self.get_desc_context()
self.form = self.form_class()
return queryset
self.form = self.form_class(self.request.GET)
if not self.form.is_valid():
return queryset
data = self.form.cleaned_data
if data['order_number']:
queryset = self.base_queryset.objects.filter(
number__istartswith=data['order_number'])
if data['name']:
# If the value is two words, then assume they are first name and
# last name
parts = data['name'].split()
allow_anon = getattr(settings, 'OSCAR_ALLOW_ANON_CHECKOUT', False)
if len(parts) == 1:
parts = [data['name'], data['name']]
else:
parts = [parts[0], parts[1:]]
filter = Q(user__first_name__istartswith=parts[0])
filter |= Q(user__last_name__istartswith=parts[1])
if allow_anon:
filter |= Q(billing_address__first_name__istartswith=parts[0])
filter |= Q(shipping_address__first_name__istartswith=parts[0])
filter |= Q(billing_address__last_name__istartswith=parts[1])
filter |= Q(shipping_address__last_name__istartswith=parts[1])
queryset = queryset.filter(filter).distinct()
if data['product_title']:
queryset = queryset.filter(
lines__title__istartswith=data['product_title']).distinct()
if data['upc']:
queryset = queryset.filter(lines__upc=data['upc'])
if data['partner_sku']:
queryset = queryset.filter(lines__partner_sku=data['partner_sku'])
if data['date_from'] and data['date_to']:
# Add 24 hours to make search inclusive
date_to = data['date_to'] + datetime.timedelta(days=1)
queryset = queryset.filter(date_placed__gte=data['date_from'])
queryset = queryset.filter(date_placed__lt=date_to)
elif data['date_from']:
queryset = queryset.filter(date_placed__gte=data['date_from'])
elif data['date_to']:
date_to = data['date_to'] + datetime.timedelta(days=1)
queryset = queryset.filter(date_placed__lt=date_to)
if data['voucher']:
queryset = queryset.filter(
discounts__voucher_code=data['voucher']).distinct()
if data['payment_method']:
queryset = queryset.filter(
sources__source_type__code=data['payment_method']).distinct()
if data['status']:
queryset = queryset.filter(status=data['status'])
self.description = self.desc_template % self.get_desc_context(data)
return queryset
def get_context_data(self, **kwargs):
ctx = super(OrderListView, self).get_context_data(**kwargs)
ctx['queryset_description'] = self.description
ctx['form'] = self.form
return ctx
def is_csv_download(self):
return self.request.GET.get('response_format', None) == 'csv'
def get_paginate_by(self, queryset):
return None if self.is_csv_download() else self.paginate_by
def render_to_response(self, context, **response_kwargs):
if self.is_csv_download():
return self.download_selected_orders(
self.request,
context['object_list'])
return super(OrderListView, self).render_to_response(
context, **response_kwargs)
def get_download_filename(self, request):
return 'orders.csv'
def download_selected_orders(self, request, orders):
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename=%s' \
% self.get_download_filename(request)
writer = CsvUnicodeWriter(response, delimiter=',')
meta_data = (('number', _('Order number')),
('value', _('Order value')),
('date', _('Date of purchase')),
('num_items', _('Number of items')),
('status', _('Order status')),
('customer', _('Customer email address')),
('shipping_address_name', _('Deliver to name')),
('billing_address_name', _('Bill to name')),
)
columns = SortedDict()
for k, v in meta_data:
columns[k] = v
writer.writerow(columns.values())
for order in orders:
row = columns.copy()
row['number'] = order.number
row['value'] = order.total_incl_tax
row['date'] = format_datetime(order.date_placed, 'DATETIME_FORMAT')
row['num_items'] = order.num_items
row['status'] = order.status
row['customer'] = order.email
if order.shipping_address:
row['shipping_address_name'] = order.shipping_address.name
else:
row['shipping_address_name'] = ''
if order.billing_address:
row['billing_address_name'] = order.billing_address.name
else:
row['billing_address_name'] = ''
encoded_values = [unicode(value).encode('utf8')
for value in row.values()]
writer.writerow(encoded_values)
return response
class OrderDetailView(DetailView):
"""
Dashboard view to display a single order.
Supports the permission-based dashboard.
"""
model = Order
context_object_name = 'order'
template_name = 'dashboard/orders/order_detail.html'
order_actions = ('save_note', 'delete_note', 'change_order_status',
'create_order_payment_event')
line_actions = ('change_line_statuses', 'create_shipping_event',
'create_payment_event')
def get_object(self, queryset=None):
return get_order_for_user_or_404(self.request.user,
self.kwargs['number'])
def get_context_data(self, **kwargs):
ctx = super(OrderDetailView, self).get_context_data(**kwargs)
ctx['active_tab'] = kwargs.get('active_tab', 'lines')
ctx['note_form'] = self.get_order_note_form()
ctx['line_statuses'] = Line.all_statuses()
ctx['shipping_event_types'] = ShippingEventType.objects.all()
ctx['payment_event_types'] = PaymentEventType.objects.all()
ctx['payment_transactions'] = self.get_payment_transactions()
return ctx
def get_payment_transactions(self):
return Transaction.objects.filter(
source__order=self.object)
def get_order_note_form(self):
post_data = None
kwargs = {}
if self.request.method == 'POST':
post_data = self.request.POST
note_id = self.kwargs.get('note_id', None)
if note_id:
note = get_object_or_404(OrderNote, order=self.object, id=note_id)
if note.is_editable():
kwargs['instance'] = note
return forms.OrderNoteForm(post_data, **kwargs)
def post(self, request, *args, **kwargs):
self.object = self.get_object()
order = self.object
# Look for order-level action
order_action = request.POST.get('order_action', '').lower()
if order_action:
if order_action not in self.order_actions:
messages.error(self.request, _("Invalid action"))
return self.reload_page_response()
else:
return getattr(self, order_action)(request, order)
# Look for line-level action
line_action = request.POST.get('line_action', '').lower()
if line_action:
if line_action not in self.line_actions:
messages.error(self.request, "Invalid action")
return self.reload_page_response()
else:
line_ids = request.POST.getlist('selected_line')
line_quantities = []
for line_id in line_ids:
qty = request.POST.get('selected_line_qty_%s' % line_id)
line_quantities.append(int(qty))
lines = order.lines.filter(id__in=line_ids)
if not lines.exists():
messages.error(self.request,
_("You must select some lines to act on"))
return self.reload_page_response()
return getattr(self, line_action)(request, order, lines,
line_quantities)
messages.error(request, _("No valid action submitted"))
return self.reload_page_response()
def reload_page_response(self, fragment=None):
url = reverse('dashboard:order-detail', kwargs={'number':
self.object.number})
if fragment:
url += '#' + fragment
return HttpResponseRedirect(url)
def save_note(self, request, order):
form = self.get_order_note_form()
success_msg = _("Note saved")
if form.is_valid():
note = form.save(commit=False)
note.user = request.user
note.order = order
note.save()
messages.success(self.request, success_msg)
return self.reload_page_response(fragment='notes')
ctx = self.get_context_data(note_form=form, active_tab='notes')
return self.render_to_response(ctx)
def delete_note(self, request, order):
try:
note = order.notes.get(id=request.POST.get('note_id', None))
except ObjectDoesNotExist:
messages.error(request, _("Note cannot be deleted"))
else:
messages.info(request, _("Note deleted"))
note.delete()
return self.reload_page_response()
def change_order_status(self, request, order):
new_status = request.POST['new_status'].strip()
if not new_status:
messages.error(request, _("The new status '%s' is not valid")
% new_status)
return self.reload_page_response()
if not new_status in order.available_statuses():
messages.error(request, _("The new status '%s' is not valid for"
" this order") % new_status)
return self.reload_page_response()
handler = EventHandler(request.user)
try:
handler.handle_order_status_change(order, new_status)
except PaymentError, e:
messages.error(request, _("Unable to change order status due to"
" payment error: %s") % e)
else:
msg = _("Order status changed from '%(old_status)s' to"
" '%(new_status)s'") % {'old_status': order.status,
'new_status': new_status}
messages.info(request, msg)
order.notes.create(user=request.user, message=msg,
note_type=OrderNote.SYSTEM)
return self.reload_page_response(fragment='activity')
def change_line_statuses(self, request, order, lines, quantities):
new_status = request.POST['new_status'].strip()
if not new_status:
messages.error(request, _("The new status '%s' is not valid")
% new_status)
return self.reload_page_response()
errors = []
for line in lines:
if new_status not in line.available_statuses():
errors.append(_("'%(status)s' is not a valid new status for"
" line %(line_id)d") % {'status': new_status,
'line_id': line.id})
if errors:
messages.error(request, "\n".join(errors))
return self.reload_page_response()
msgs = []
for line in lines:
msg = _("Status of line #%(line_id)d changed from '%(old_status)s'"
" to '%(new_status)s'") % {'line_id': line.id,
'old_status': line.status,
'new_status': new_status}
msgs.append(msg)
line.set_status(new_status)
message = "\n".join(msgs)
messages.info(request, message)
order.notes.create(user=request.user, message=message,
note_type=OrderNote.SYSTEM)
return self.reload_page_response()
def create_shipping_event(self, request, order, lines, quantities):
code = request.POST['shipping_event_type']
try:
event_type = ShippingEventType._default_manager.get(code=code)
except ShippingEventType.DoesNotExist:
messages.error(request, _("The event type '%s' is not valid")
% code)
return self.reload_page_response()
reference = request.POST.get('reference', None)
try:
EventHandler().handle_shipping_event(order, event_type, lines,
quantities,
reference=reference)
except InvalidShippingEvent, e:
messages.error(request,
_("Unable to create shipping event: %s") % e)
except InvalidStatus, e:
messages.error(request,
_("Unable to create shipping event: %s") % e)
except PaymentError, e:
messages.error(request, _("Unable to create shipping event due to"
" payment error: %s") % e)
else:
messages.success(request, _("Shipping event created"))
return self.reload_page_response()
def create_order_payment_event(self, request, order):
amount_str = request.POST.get('amount', None)
try:
amount = D(amount_str)
except InvalidOperation:
messages.error(request, _("Please choose a valid amount"))
return self.reload_page_response()
return self._create_payment_event(request, order, amount)
def _create_payment_event(self, request, order, amount, lines=None,
quantities=None):
code = request.POST['payment_event_type']
try:
event_type = PaymentEventType._default_manager.get(code=code)
except PaymentEventType.DoesNotExist:
messages.error(request, _("The event type '%s' is not valid")
% code)
return self.reload_page_response()
try:
EventHandler().handle_payment_event(order, event_type, amount,
lines, quantities)
except PaymentError, e:
messages.error(request, _("Unable to change order status due to"
" payment error: %s") % e)
else:
messages.info(request, _("Payment event created"))
return self.reload_page_response()
def create_payment_event(self, request, order, lines, quantities):
amount_str = request.POST.get('amount', None)
# If no amount passed, then we add up the total of the selected lines
if not amount_str:
amount = D('0.00')
for line, quantity in zip(lines, quantities):
amount += int(quantity) * line.line_price_incl_tax
else:
try:
amount = D(amount_str)
except InvalidOperation:
messages.error(request, _("Please choose a valid amount"))
return self.reload_page_response()
return self._create_payment_event(request, order, amount, lines,
quantities)
class LineDetailView(DetailView):
"""
Dashboard view to show a single line of an order.
Supports the permission-based dashboard.
"""
model = Line
context_object_name = 'line'
template_name = 'dashboard/orders/line_detail.html'
def get_object(self, queryset=None):
order = get_order_for_user_or_404(self.request.user,
self.kwargs['number'])
try:
return order.lines.get(pk=self.kwargs['line_id'])
except self.model.DoesNotExist:
raise Http404()
def get_context_data(self, **kwargs):
ctx = super(LineDetailView, self).get_context_data(**kwargs)
ctx['order'] = self.object.order
return ctx
def get_changes_between_models(model1, model2, excludes=None):
"""
Return a dict of differences between two model instances
"""
if excludes is None:
excludes = []
changes = {}
for field in model1._meta.fields:
if (isinstance(field, (fields.AutoField,
fields.related.RelatedField))
or field.name in excludes):
continue
if field.value_from_object(model1) != field.value_from_object(model2):
changes[field.verbose_name] = (field.value_from_object(model1),
field.value_from_object(model2))
return changes
def get_change_summary(model1, model2):
"""
Generate a summary of the changes between two address models
"""
changes = get_changes_between_models(model1, model2, ['search_text'])
change_descriptions = []
for field, delta in changes.items():
change_descriptions.append(_("%(field)s changed from '%(old_value)s'"
" to '%(new_value)s'")
% {'field': field,
'old_value': delta[0],
'new_value': delta[1]})
return "\n".join(change_descriptions)
class ShippingAddressUpdateView(UpdateView):
"""
Dashboard view to update an order's shipping address.
Supports the permission-based dashboard.
"""
model = ShippingAddress
context_object_name = 'address'
template_name = 'dashboard/orders/shippingaddress_form.html'
form_class = forms.ShippingAddressForm
def get_object(self, queryset=None):
order = get_order_for_user_or_404(self.request.user,
self.kwargs['number'])
return get_object_or_404(self.model, order=order)
def get_context_data(self, **kwargs):
ctx = super(ShippingAddressUpdateView, self).get_context_data(**kwargs)
ctx['order'] = self.object.order
return ctx
def form_valid(self, form):
old_address = ShippingAddress.objects.get(id=self.object.id)
response = super(ShippingAddressUpdateView, self).form_valid(form)
changes = get_change_summary(old_address, self.object)
if changes:
msg = _("Delivery address updated:\n%s") % changes
self.object.order.notes.create(user=self.request.user, message=msg,
note_type=OrderNote.SYSTEM)
return response
def get_success_url(self):
messages.info(self.request, _("Delivery address updated"))
return reverse('dashboard:order-detail',
kwargs={'number': self.object.order.number, })
| Idematica/django-oscar | oscar/apps/dashboard/orders/views.py | Python | bsd-3-clause | 28,253 | 0.000035 |
import json
SHOWNAMES_DICT = {
"lipsyncbattle": "Lip Sync Battle",
"archer2009": "Archer (2009)",
"thedailyshow": "The Daily Show",
"atmidnight": "@midnight"
}
with open("test_file.json", "w") as f:
json.dump(SHOWNAMES_DICT, f, indent=4)
| ROldford/tvregex | tests/experiments/json_test.py | Python | mit | 261 | 0 |
#!/usr/bin/env python
#encoding: utf8
import rospy, actionlib
from std_msgs.msg import UInt16
from pimouse_ros.msg import MusicAction, MusicResult, MusicFeedback
def write_freq(hz=0):
bfile = "/dev/rtbuzzer0"
try:
with open(bfile,"w") as f:
f.write(str(hz) + "\n")
except IOError:
rospy.logerr("can't write to " + bfile)
def recv_buzzer(data):
write_freq(data.data)
def exec_music(goal): pass
if __name__ == '__main__':
rospy.init_node('buzzer')
rospy.Subscriber("buzzer", UInt16, recv_buzzer)
music = actionlib.SimpleActionServer('music', MusicAction, exec_music, False)
music.start()
rospy.on_shutdown(write_freq)
rospy.spin()
| e1211205/pimouse_ros | scripts/buzzer4.py | Python | gpl-3.0 | 701 | 0.011412 |
#!/usr/bin/env python
'''
Created on Dec 6, 2013
:author: jzupka
'''
import os
import logging
import select
import cPickle
import time
import remote_interface
import cStringIO
import base64
class IOWrapper(object):
"""
Class encaptulates io opearation to be more consist in different
implementations. (stdio, sockets, etc..)
"""
def __init__(self, obj):
"""
:param obj: IO obj for example file decriptor.
"""
self._obj = obj
def close(self):
raise NotImplementedError()
def read(self, max_len, timeout=None):
"""
Read function should be reinmplemented as blocking reading from data
source when timeout is None and nonblocking for timeout is not None.
Implementation example StdIWrapper.
:params max_len: Max len of readed data.
:type max_len: int
:param timeout: Timeout of reading operation.
:type timeout: float
:return: Readed data.
"""
raise NotImplementedError()
def write(self, data):
"""
Write funciton should be implemented for object uded for writing.
:param data: Data to write.
:type data: str.
"""
raise NotImplementedError()
def fileno(self):
"""
Function should return file descriptor number. If object should be used
for standard io operation.
:return: File number.
"""
raise NotImplementedError()
def _wait_for_data(self, max_len, timeout):
"""
Wait for data for time == timeout.
:params max_len: Max len of readed data.
:type max_len: int
:param timeout: Timeout of reading operation.
:type timeout: float
:return: Readed data.
"""
r, _, _ = select.select([self.fileno()], [], [], timeout)
if r:
return self.read(max_len, None)
return None
class DataWrapper(object):
"""
Basic implementation of IOWrapper for stdio.
"""
def decode(self, data):
"""
Decodes the data which was read.
:return: decoded data.
"""
return data
def encode(self, data):
"""
Encode data.
:return: encoded data.
"""
return data
class DataWrapperBase64(DataWrapper):
"""
Basic implementation of IOWrapper for stdio.
"""
def decode(self, data):
return base64.b64decode(data)
def encode(self, data):
return base64.b64encode(data)
class StdIOWrapper(IOWrapper, DataWrapper):
"""
Basic implementation of IOWrapper for stdio.
"""
def close(self):
os.close(self._obj)
def fileno(self):
return self._obj
class StdIOWrapperIn(StdIOWrapper):
"""
Basic implementation of IOWrapper for stdin
"""
def read(self, max_len, timeout=None):
if timeout is not None:
return self._wait_for_data(max_len, timeout)
else:
return os.read(self._obj, max_len)
class StdIOWrapperOut(StdIOWrapper):
"""
Basic implementation of IOWrapper for stdout
"""
def write(self, data):
os.write(self._obj, data)
class StdIOWrapperInBase64(StdIOWrapperIn, DataWrapperBase64):
"""
Basic implementation of IOWrapper for stdin
"""
class StdIOWrapperOutBase64(StdIOWrapperOut, DataWrapperBase64):
"""
Basic implementation of IOWrapper for stdout
"""
class MessengerError(Exception):
def __init__(self, msg):
super(MessengerError, self).__init__(msg)
self.msg = msg
def __str__(self):
return "Messenger ERROR %s" % (self.msg)
def _map_path(mod_name, kls_name):
if mod_name.endswith('remote_interface'): # catch all old module names
mod = remote_interface
return getattr(mod, kls_name)
else:
mod = __import__(mod_name)
return getattr(mod, kls_name)
class Messenger(object):
"""
Class could be used for communication between two python process connected
by communication canal wrapped by IOWrapper class. Pickling is used
for communication and thus it is possible to communicate every picleable
object.
"""
def __init__(self, stdin, stdout):
"""
:params stdin: Object for read data from communication interface.
:type stdin: IOWrapper
:params stdout: Object for write data to communication interface.
:type stdout: IOWrapper
"""
self.stdin = stdin
self.stdout = stdout
# Unfortunately only static length of data length is supported.
self.enc_len_length = len(stdout.encode("0" * 10))
def close(self):
self.stdin.close()
self.stdout.close()
def format_msg(self, data):
"""
Format message where first 10 char is length of message and rest is
piclked message.
"""
pdata = cPickle.dumps(data, cPickle.HIGHEST_PROTOCOL)
pdata = self.stdout.encode(pdata)
len_enc = self.stdout.encode("%10d" % len(pdata))
return "%s%s" % (len_enc, pdata)
def flush_stdin(self):
"""
Flush all input data from communication interface.
"""
const = 16384
r, _, _ = select.select([self.stdin.fileno()], [], [], 1)
while r:
if len(self.stdin.read(const)) < const:
break
r, _, _ = select.select([self.stdin.fileno()], [], [], 1)
def write_msg(self, data):
"""
Write formated message to communication interface.
"""
self.stdout.write(self.format_msg(data))
def _read_until_len(self, timeout=None):
"""
Deal with terminal interfaces... Read input until gets string
contains " " and digits len(string) == 10
:param timeout: timeout of reading.
"""
data = ""
endtime = None
if timeout is not None:
endtime = time.time() + timeout
while (len(data) < self.enc_len_length and
(endtime is None or time.time() < endtime)):
d = self.stdin.read(1, timeout)
if d is None:
return None
if len(d) == 0:
return d
data += d
if len(data) < self.enc_len_length:
return None
return self.stdout.decode(data)
def read_msg(self, timeout=None):
"""
Read data from com interface.
:param timeout: timeout for reading data.
:type timeout: float
:return: (True, data) when reading is successful.
(False, None) when other side is closed.
(None, None) when reading is timeouted.
"""
data = self._read_until_len(timeout)
if data is None:
return (None, None)
if len(data) == 0:
return (False, None)
rdata = None
try:
cmd_len = int(data)
rdata = ""
rdata_len = 0
while (rdata_len < cmd_len):
rdata += self.stdin.read(cmd_len - rdata_len)
rdata_len = len(rdata)
rdataIO = cStringIO.StringIO(self.stdin.decode(rdata))
unp = cPickle.Unpickler(rdataIO)
unp.find_global = _map_path
data = unp.load()
except Exception, e:
logging.error("ERROR data:%s rdata:%s" % (data, rdata))
try:
self.write_msg(remote_interface.MessengerError("Communication "
"failed.%s" % (e)))
except OSError:
pass
self.flush_stdin()
raise
# Debugging commands.
# if (isinstance(data, remote_interface.BaseCmd)):
# print data.func
return (True, data)
| rbian/avocado-vt | virttest/remote_commander/messenger.py | Python | gpl-2.0 | 7,889 | 0.000127 |
"""
Experimental agent implementation running separate threads for emulation and GPU training.
This is slightly (estimate ~20%) faster than the sequential implementation, but results might be different.
Copyright 2016 Rasmus Larsen
This software may be modified and distributed under the terms
of the MIT license. See the LICENSE.txt file for details.
"""
from Agent import Agent
import random
import threading
import time
import numpy as np
class ParallelAgent(Agent):
def __init__(self, emu, net, config):
super(ParallelAgent, self).__init__(emu, net, config)
self.gpu_lock = threading.Lock()
self.testing = False
def train(self):
cpu = threading.Thread(target=self.ale_worker)
cpu.setDaemon(True)
gpu_1 = threading.Thread(target=self.gpu_worker)
gpu_2 = threading.Thread(target=self.gpu_worker)
for i in xrange(int(self.train_start)): # wait for replay memory to fill
self.next(random.randrange(self.emu.num_actions))
cpu.start()
gpu_1.start()
gpu_2.start()
gpu_1.join()
gpu_2.join()
return
def test(self):
self.testing = True
time.sleep(0.5) # wait a bit for ALE worker to stop
super(ParallelAgent, self).test()
self.testing = False
def ale_worker(self):
"""
Performs epsilon greedy action selection, updating the replay memory and emulating with ALE.
"""
while True:
if self.testing:
time.sleep(0.2)
continue
self.eps_greedy()
def gpu_worker(self):
"""
Gathers a minibatch (on the CPU!) and feeds it to the GPU. Several can run at once, locking the GPU.
"""
while self.steps < self.train_frames:
s, a, r, ns, t = self.mem.get_minibatch() # TODO: ReplayMemory is _not_ thread safe
a = self.emu.onehot_actions(a) # necessary due to tensorflow not having proper indexing
with self.gpu_lock:
cost = self.net.train(s, a, r, ns, t)
if self.steps % self.target_sync == 0:
self.net.sync_target()
if self.steps % self.test_freq == 0:
self.test()
self.steps += 1
if self.steps % 100 == 0: # TODO: remove, just for debugging
print 'step ' + str(self.steps)
| rlrs/deep-rl | dqn/ParallelAgent.py | Python | mit | 2,448 | 0.003268 |
from __future__ import unicode_literals
from django.test import TestCase
from .models import A, B, D, Bar, DataPoint, Foo, RelatedPoint
class SimpleTest(TestCase):
def setUp(self):
self.a1 = A.objects.create()
self.a2 = A.objects.create()
for x in range(20):
B.objects.create(a=self.a1)
D.objects.create(a=self.a1)
def test_nonempty_update(self):
"""
Test that update changes the right number of rows for a nonempty queryset
"""
num_updated = self.a1.b_set.update(y=100)
self.assertEqual(num_updated, 20)
cnt = B.objects.filter(y=100).count()
self.assertEqual(cnt, 20)
def test_empty_update(self):
"""
Test that update changes the right number of rows for an empty queryset
"""
num_updated = self.a2.b_set.update(y=100)
self.assertEqual(num_updated, 0)
cnt = B.objects.filter(y=100).count()
self.assertEqual(cnt, 0)
def test_nonempty_update_with_inheritance(self):
"""
Test that update changes the right number of rows for an empty queryset
when the update affects only a base table
"""
num_updated = self.a1.d_set.update(y=100)
self.assertEqual(num_updated, 20)
cnt = D.objects.filter(y=100).count()
self.assertEqual(cnt, 20)
def test_empty_update_with_inheritance(self):
"""
Test that update changes the right number of rows for an empty queryset
when the update affects only a base table
"""
num_updated = self.a2.d_set.update(y=100)
self.assertEqual(num_updated, 0)
cnt = D.objects.filter(y=100).count()
self.assertEqual(cnt, 0)
def test_foreign_key_update_with_id(self):
"""
Test that update works using <field>_id for foreign keys
"""
num_updated = self.a1.d_set.update(a_id=self.a2)
self.assertEqual(num_updated, 20)
self.assertEqual(self.a2.d_set.count(), 20)
class AdvancedTests(TestCase):
def setUp(self):
self.d0 = DataPoint.objects.create(name="d0", value="apple")
self.d2 = DataPoint.objects.create(name="d2", value="banana")
self.d3 = DataPoint.objects.create(name="d3", value="banana")
self.r1 = RelatedPoint.objects.create(name="r1", data=self.d3)
def test_update(self):
"""
Objects are updated by first filtering the candidates into a queryset
and then calling the update() method. It executes immediately and
returns nothing.
"""
resp = DataPoint.objects.filter(value="apple").update(name="d1")
self.assertEqual(resp, 1)
resp = DataPoint.objects.filter(value="apple")
self.assertEqual(list(resp), [self.d0])
def test_update_multiple_objects(self):
"""
We can update multiple objects at once.
"""
resp = DataPoint.objects.filter(value="banana").update(
value="pineapple")
self.assertEqual(resp, 2)
self.assertEqual(DataPoint.objects.get(name="d2").value, 'pineapple')
def test_update_fk(self):
"""
Foreign key fields can also be updated, although you can only update
the object referred to, not anything inside the related object.
"""
resp = RelatedPoint.objects.filter(name="r1").update(data=self.d0)
self.assertEqual(resp, 1)
resp = RelatedPoint.objects.filter(data__name="d0")
self.assertEqual(list(resp), [self.r1])
def test_update_multiple_fields(self):
"""
Multiple fields can be updated at once
"""
resp = DataPoint.objects.filter(value="apple").update(
value="fruit", another_value="peach")
self.assertEqual(resp, 1)
d = DataPoint.objects.get(name="d0")
self.assertEqual(d.value, 'fruit')
self.assertEqual(d.another_value, 'peach')
def test_update_all(self):
"""
In the rare case you want to update every instance of a model, update()
is also a manager method.
"""
self.assertEqual(DataPoint.objects.update(value='thing'), 3)
resp = DataPoint.objects.values('value').distinct()
self.assertEqual(list(resp), [{'value': 'thing'}])
def test_update_slice_fail(self):
"""
We do not support update on already sliced query sets.
"""
method = DataPoint.objects.all()[:2].update
with self.assertRaises(AssertionError):
method(another_value='another thing')
def test_update_respects_to_field(self):
"""
Update of an FK field which specifies a to_field works.
"""
a_foo = Foo.objects.create(target='aaa')
b_foo = Foo.objects.create(target='bbb')
bar = Bar.objects.create(foo=a_foo)
self.assertEqual(bar.foo_id, a_foo.target)
bar_qs = Bar.objects.filter(pk=bar.pk)
self.assertEqual(bar_qs[0].foo_id, a_foo.target)
bar_qs.update(foo=b_foo)
self.assertEqual(bar_qs[0].foo_id, b_foo.target)
| Endika/django | tests/update/tests.py | Python | bsd-3-clause | 5,112 | 0.000196 |
#!/usr/bin/env python
import os
import sys
import argparse
import subprocess
from shutil import copyfile
def main():
progname = os.path.basename(sys.argv[0])
usage = progname + """ [options] <*.png>
convert -average *.png output.png
In total 210 images, first average 15, then 14.
"""
args_def = {}
parser = argparse.ArgumentParser()
parser.add_argument("png", nargs='*', help="specify png to be averaged")
args = parser.parse_args()
if len(sys.argv) == 1:
print "usage: " + usage
print "Please run '" + progname + " -h' for detailed options."
sys.exit(1)
# get default values
for i in args_def:
if args.__dict__[i] == None:
args.__dict__[i] = args_def[i]
#
for i in xrange(14):
cmd = ['convert', '-average'] + args.png[15*i:15*(i+1)] + ['output_{}.png'.format(i)]
subprocess.call(cmd, stderr=subprocess.STDOUT)
cmd = ['convert', '-average', 'output_*.png', 'output.png']
subprocess.call(cmd, stderr=subprocess.STDOUT)
if __name__ == '__main__':
main()
| emkailu/PAT3DEM | bin/p3convert.py | Python | mit | 998 | 0.036072 |
from createsend import *
auth = {
'access_token': 'YOUR_ACCESS_TOKEN',
'refresh_token': 'YOUR_REFRESH_TOKEN' }
listId = 'YOUR_LIST_ID'
emailAddress = 'YOUR_SUBSCRIBER_EMAIL_ADDRESS'
subscriber = Subscriber(auth, listId, emailAddress)
# Get the details for a subscriber
subscriberDetail = subscriber.get()
for property, value in vars(subscriberDetail).items():
print(property, ":", value)
| campaignmonitor/createsend-python | samples/subscribers.py | Python | mit | 411 | 0.004866 |
""" # lint-amnesty, pylint: disable=cyclic-import
XFields for video module.
"""
import datetime
from xblock.fields import Boolean, DateTime, Dict, Float, List, Scope, String
from xmodule.fields import RelativeTime
# Make '_' a no-op so we can scrape strings. Using lambda instead of
# `django.utils.translation.ugettext_noop` because Django cannot be imported in this file
_ = lambda text: text
class VideoFields:
"""Fields for `VideoBlock`."""
display_name = String(
help=_("The display name for this component."),
display_name=_("Component Display Name"),
default="Video",
scope=Scope.settings
)
saved_video_position = RelativeTime(
help=_("Current position in the video."),
scope=Scope.user_state,
default=datetime.timedelta(seconds=0)
)
# TODO: This should be moved to Scope.content, but this will
# require data migration to support the old video module.
youtube_id_1_0 = String(
help=_("Optional, for older browsers: the YouTube ID for the normal speed video."),
display_name=_("YouTube ID"),
scope=Scope.settings,
default="3_yD_cEKoCk"
)
youtube_id_0_75 = String(
help=_("Optional, for older browsers: the YouTube ID for the .75x speed video."),
display_name=_("YouTube ID for .75x speed"),
scope=Scope.settings,
default=""
)
youtube_id_1_25 = String(
help=_("Optional, for older browsers: the YouTube ID for the 1.25x speed video."),
display_name=_("YouTube ID for 1.25x speed"),
scope=Scope.settings,
default=""
)
youtube_id_1_5 = String(
help=_("Optional, for older browsers: the YouTube ID for the 1.5x speed video."),
display_name=_("YouTube ID for 1.5x speed"),
scope=Scope.settings,
default=""
)
start_time = RelativeTime( # datetime.timedelta object
help=_(
"Time you want the video to start if you don't want the entire video to play. "
"Not supported in the native mobile app: the full video file will play. "
"Formatted as HH:MM:SS. The maximum value is 23:59:59."
),
display_name=_("Video Start Time"),
scope=Scope.settings,
default=datetime.timedelta(seconds=0)
)
end_time = RelativeTime( # datetime.timedelta object
help=_(
"Time you want the video to stop if you don't want the entire video to play. "
"Not supported in the native mobile app: the full video file will play. "
"Formatted as HH:MM:SS. The maximum value is 23:59:59."
),
display_name=_("Video Stop Time"),
scope=Scope.settings,
default=datetime.timedelta(seconds=0)
)
#front-end code of video player checks logical validity of (start_time, end_time) pair.
download_video = Boolean(
help=_("Allow students to download versions of this video in different formats if they cannot use the edX video"
" player or do not have access to YouTube. You must add at least one non-YouTube URL "
"in the Video File URLs field."),
display_name=_("Video Download Allowed"),
scope=Scope.settings,
default=False
)
html5_sources = List(
help=_("The URL or URLs where you've posted non-YouTube versions of the video. Each URL must end in .mpeg,"
" .mp4, .ogg, or .webm and cannot be a YouTube URL. (For browser compatibility, we strongly recommend"
" .mp4 and .webm format.) Students will be able to view the first listed video that's compatible with"
" the student's computer. To allow students to download these videos, "
"set Video Download Allowed to True."),
display_name=_("Video File URLs"),
scope=Scope.settings,
)
track = String(
help=_("By default, students can download an .srt or .txt transcript when you set Download Transcript "
"Allowed to True. If you want to provide a downloadable transcript in a different format, we recommend "
"that you upload a handout by using the Upload a Handout field. If this isn't possible, you can post a "
"transcript file on the Files & Uploads page or on the Internet, and then add the URL for the "
"transcript here. Students see a link to download that transcript below the video."),
display_name=_("Downloadable Transcript URL"),
scope=Scope.settings,
default=''
)
download_track = Boolean(
help=_("Allow students to download the timed transcript. A link to download the file appears below the video."
" By default, the transcript is an .srt or .txt file. If you want to provide the transcript for "
"download in a different format, upload a file by using the Upload Handout field."),
display_name=_("Download Transcript Allowed"),
scope=Scope.settings,
default=False
)
# `sub` is deprecated field and should not be used in future. Now, transcripts are primarily handled in VAL and
# backward compatibility for the video modules already using this field has been ensured.
sub = String(
help=_("The default transcript for the video, from the Default Timed Transcript field on the Basic tab. "
"This transcript should be in English. You don't have to change this setting."),
display_name=_("Default Timed Transcript"),
scope=Scope.settings,
default=""
)
show_captions = Boolean(
help=_("Specify whether the transcripts appear with the video by default."),
display_name=_("Show Transcript"),
scope=Scope.settings,
default=True
)
# Data format: {'de': 'german_translation', 'uk': 'ukrainian_translation'}
transcripts = Dict(
help=_("Add transcripts in different languages."
" Click below to specify a language and upload an .srt transcript file for that language."),
display_name=_("Transcript Languages"),
scope=Scope.settings,
default={}
)
transcript_language = String(
help=_("Preferred language for transcript."),
display_name=_("Preferred language for transcript"),
scope=Scope.preferences,
default="en"
)
transcript_download_format = String(
help=_("Transcript file format to download by user."),
scope=Scope.preferences,
values=[
# Translators: This is a type of file used for captioning in the video player.
{"display_name": _("SubRip (.srt) file"), "value": "srt"},
{"display_name": _("Text (.txt) file"), "value": "txt"}
],
default='srt',
)
speed = Float(
help=_("The last speed that the user specified for the video."),
scope=Scope.user_state
)
global_speed = Float(
help=_("The default speed for the video."),
scope=Scope.preferences,
default=1.0
)
auto_advance = Boolean(
help=_("Specify whether to advance automatically to the next unit when the video ends."),
scope=Scope.preferences,
# The default is True because this field only has an effect when auto-advance controls are enabled
# (globally enabled through feature flag and locally enabled through course setting); in that case
# it's good to start auto-advancing and let the student disable it, instead of the other way around
# (requiring the user to enable it). When auto-advance controls are hidden, this field won't be used.
default=True,
)
youtube_is_available = Boolean(
help=_("Specify whether YouTube is available for the user."),
scope=Scope.user_info,
default=True
)
handout = String(
help=_("Upload a handout to accompany this video. Students can download the handout by "
"clicking Download Handout under the video."),
display_name=_("Upload Handout"),
scope=Scope.settings,
)
only_on_web = Boolean(
help=_(
"Specify whether access to this video is limited to browsers only, or if it can be "
"accessed from other applications including mobile apps."
),
display_name=_("Video Available on Web Only"),
scope=Scope.settings,
default=False
)
edx_video_id = String(
help=_("If you were assigned a Video ID by edX for the video to play in this component, enter the ID here."
" In this case, do not enter values in the Default Video URL, the Video File URLs, "
"and the YouTube ID fields. If you were not assigned a Video ID,"
" enter values in those other fields and ignore this field."),
display_name=_("Video ID"),
scope=Scope.settings,
default="",
)
bumper_last_view_date = DateTime(
display_name=_("Date of the last view of the bumper"),
scope=Scope.preferences,
)
bumper_do_not_show_again = Boolean(
display_name=_("Do not show bumper again"),
scope=Scope.preferences,
default=False,
)
| eduNEXT/edx-platform | common/lib/xmodule/xmodule/video_module/video_xfields.py | Python | agpl-3.0 | 9,242 | 0.004653 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.