repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
---|---|---|---|---|---|---|---|---|
kubernetes-client/python
|
kubernetes/client/models/v1_rule_with_operations.py
|
Python
|
apache-2.0
| 9,436 | 0 |
# coding: utf-8
"""
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: release-1.23
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from kubernetes.client.configuration import Configuration
class V1RuleWithOperations(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'api_groups': 'list[str]',
'api_versions': 'list[str]',
'operations': 'list[str]',
'resources': 'list[str]',
'scope': 'str'
}
attribute_map = {
'api_groups': 'apiGroups',
'api_versions': 'apiVersions',
'operations': 'operations',
'resources': 'resources',
'scope': 'scope'
}
def __init__(self, api_groups=None, api_versions=None, operations=None, resources=None, scope=None, local_vars_configuration=None): # noqa: E501
"""V1RuleWithOperations - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._api_groups = None
self._api_versions = None
self._operations = None
self._resources = None
self._scope = None
self.discriminator = None
if api_groups is not None:
self.api_groups = api_groups
if api_versions is not None:
self.api_versions = api_versions
if operations is not None:
self.operations = operations
if resources is not None:
self.resources = resources
if scope is not None:
self.scope = scope
@property
def api_groups(self):
"""Gets the api_groups of this V1RuleWithOperations. # noqa: E501
APIGroups is the API groups the resources belong to. '*' is all groups. If '*' is present, the length of the slice must be one. Required. # noqa: E501
:return: The api_groups of this V1RuleWithOperations. # noqa: E501
:rtype: list[str]
"""
return self._api_groups
@api_groups.setter
def api_groups(self, api_groups):
"""Sets the api_groups of this V1RuleWithOperations.
APIGroups is the API groups the resources belong to. '*' is all groups. If '*' is present, the length of the slice must be one. Required. # noqa: E501
:param api_groups: The api_groups of this V1RuleWithOperations. # noqa: E501
:type: list[str]
"""
self._api_groups = api_groups
@property
def api_versions(self):
"""Gets the api_versions of this V1RuleWithOperations. # noqa: E501
APIVersions is the API versions the resources belong to. '*' is all versions. If '*' is present, the length of the slice must be one. Required. # noqa: E501
:return: Th
|
e api_versions of this V1RuleWithOperations. # noqa: E501
|
:rtype: list[str]
"""
return self._api_versions
@api_versions.setter
def api_versions(self, api_versions):
"""Sets the api_versions of this V1RuleWithOperations.
APIVersions is the API versions the resources belong to. '*' is all versions. If '*' is present, the length of the slice must be one. Required. # noqa: E501
:param api_versions: The api_versions of this V1RuleWithOperations. # noqa: E501
:type: list[str]
"""
self._api_versions = api_versions
@property
def operations(self):
"""Gets the operations of this V1RuleWithOperations. # noqa: E501
Operations is the operations the admission hook cares about - CREATE, UPDATE, DELETE, CONNECT or * for all of those operations and any future admission operations that are added. If '*' is present, the length of the slice must be one. Required. # noqa: E501
:return: The operations of this V1RuleWithOperations. # noqa: E501
:rtype: list[str]
"""
return self._operations
@operations.setter
def operations(self, operations):
"""Sets the operations of this V1RuleWithOperations.
Operations is the operations the admission hook cares about - CREATE, UPDATE, DELETE, CONNECT or * for all of those operations and any future admission operations that are added. If '*' is present, the length of the slice must be one. Required. # noqa: E501
:param operations: The operations of this V1RuleWithOperations. # noqa: E501
:type: list[str]
"""
self._operations = operations
@property
def resources(self):
"""Gets the resources of this V1RuleWithOperations. # noqa: E501
Resources is a list of resources this rule applies to. For example: 'pods' means pods. 'pods/log' means the log subresource of pods. '*' means all resources, but not subresources. 'pods/*' means all subresources of pods. '*/scale' means all scale subresources. '*/*' means all resources and their subresources. If wildcard is present, the validation rule will ensure resources do not overlap with each other. Depending on the enclosing object, subresources might not be allowed. Required. # noqa: E501
:return: The resources of this V1RuleWithOperations. # noqa: E501
:rtype: list[str]
"""
return self._resources
@resources.setter
def resources(self, resources):
"""Sets the resources of this V1RuleWithOperations.
Resources is a list of resources this rule applies to. For example: 'pods' means pods. 'pods/log' means the log subresource of pods. '*' means all resources, but not subresources. 'pods/*' means all subresources of pods. '*/scale' means all scale subresources. '*/*' means all resources and their subresources. If wildcard is present, the validation rule will ensure resources do not overlap with each other. Depending on the enclosing object, subresources might not be allowed. Required. # noqa: E501
:param resources: The resources of this V1RuleWithOperations. # noqa: E501
:type: list[str]
"""
self._resources = resources
@property
def scope(self):
"""Gets the scope of this V1RuleWithOperations. # noqa: E501
scope specifies the scope of this rule. Valid values are \"Cluster\", \"Namespaced\", and \"*\" \"Cluster\" means that only cluster-scoped resources will match this rule. Namespace API objects are cluster-scoped. \"Namespaced\" means that only namespaced resources will match this rule. \"*\" means that there are no scope restrictions. Subresources match the scope of their parent resource. Default is \"*\". # noqa: E501
:return: The scope of this V1RuleWithOperations. # noqa: E501
:rtype: str
"""
return self._scope
@scope.setter
def scope(self, scope):
"""Sets the scope of this V1RuleWithOperations.
scope specifies the scope of this rule. Valid values are \"Cluster\", \"Namespaced\", and \"*\" \"Cluster\" means that only cluster-scoped resources will match this rule. Namespace API objects are cluster-scoped. \"Namespaced\" means that only namespaced resources will match this rule. \"*\" means that there are no scope restrictions. Subresources match the scope of their parent resource. Default is \"*\". # noqa: E501
:param scope: The scope of this V1RuleWithOperations. # noqa: E501
:type: str
"""
self._scope = scope
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(va
|
whtsky/parguments
|
parguments/cli.py
|
Python
|
mit
| 3,906 | 0 |
# Copyright (c) 2010 by Dan Jacob.
#
# Some rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# * The names of the contributors may not be used to endorse or
# promote products derived from this software without specific
# prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import getpass
try:
assert raw_input
except NameError:
raw_input = input
def prompt(name, default=None):
"""
Grab user input from command line.
:param name: prompt text
:param default: default value if no input provided.
"""
prompt = name + (default and ' [%s]' % default or '')
prompt += name.endswith('?') and ' ' or ': '
while True:
rv = raw_input(prompt)
if rv:
return rv
if default is not None:
return default
def prompt_pass(name, default=None):
"""
Grabs hidden (password) input from command line.
:param name: prompt text
:param default: default value if no input provided.
"""
prompt = name + (default and ' [%s]' % default or '')
prompt += name.endswith('?') and ' ' or ': '
while True:
rv = getpass.getpass(prompt)
if rv:
return rv
if default is not None:
return default
def prompt_bool(name, default=False, yes_choices=None, no_choices=None):
"""
Grabs user input from command lin
|
e and converts to boolean
value.
:param name: prompt text
:param default: default value if no input provided.
:param yes_choices: default 'y', 'yes', '1', 'on', 'true', 't'
:param no_choices: default 'n', 'no', '0', 'off', 'false', 'f'
"""
yes_choices = yes_choices or ('y', 'yes', '1', 'on', 'true', 't')
no_choices = no_choices or ('n', 'n
|
o', '0', 'off', 'false', 'f')
while True:
rv = prompt(name + '?', default and yes_choices[0] or no_choices[0])
if rv.lower() in yes_choices:
return True
elif rv.lower() in no_choices:
return False
def prompt_choices(name, choices, default=None, no_choice=('none',)):
"""
Grabs user input from command line from set of provided choices.
:param name: prompt text
:param choices: list or tuple of available choices.
:param default: default value if no input provided.
:param no_choice: acceptable list of strings for "null choice"
"""
_choices = []
options = []
for choice in choices:
options.append(choice)
_choices.append(choice)
while True:
rv = prompt(name + '? - (%s)' % ', '.join(options), default)
rv = rv.lower()
if rv in no_choice:
return None
if rv in _choices:
return rv
|
JamesLinEngineer/RKMC
|
addons/script.navi-x/src/CLogin.py
|
Python
|
gpl-2.0
| 3,203 | 0.01561 |
#############################################################################
#
# Copyright (C) 2013 Navi-X
#
# This file is part of Navi-X.
#
# Navi-X is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# Navi-X is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Navi-X. If not, see <http://www.gnu.org/licenses/>.
#
#############################################################################
#############################################################################
#
# CDownloader:
# This class handles user login to the Navi-Xtreme website.
#############################################################################
from string import *
import sys, os.path
import urllib
import urllib2
import re, random, string
import xbmc, xbmcgui, xbmcaddon
import re, os, time, d
|
atetime, traceback
import shutil
import os
from libs2 import *
try: Emulating = xbmcgui.Emulating
except: Emulating = False
LABEL_USRNAME = 141
LABEL_PASSWORD = 142
BUTTON_USRNAME = 143
BUTTON_PASSWORD = 1144
BUTTON_LOGIN = 145
BUTTON_CANCEL = 146
class CDialogLogin(xbmcgui.WindowXMLDialog):
def __init__(self,strXMLname, strFallbackPath):#, strDefaultName, forceFallback):
# self.setCoordinateResolution(PAL_4x3)
#user background image
|
# self.bg = xbmcgui.ControlImage(100,100,520,376, imageDir + "background_txt.png")
# self.addControl(self.bg)
self.userloggedin = False
#read user ID from file
self.user_id=''
pass
def onAction(self, action):
if (action == ACTION_PREVIOUS_MENU) or (action == ACTION_PARENT_DIR) or (action == ACTION_PREVIOUS_MENU2):# or (action == ACTION_MOVE_LEFT):
self.close()
def onFocus( self, controlId ):
pass
def onClick( self, controlId ):
pass
def onControl(self, control):
#self.setFocus(control)
pass
def login(self):
#display GUI window
self.doModal()
#perform login to the Navi-Xtreme server
#if success
self.save_user_id()
def logout(self):
self.user_id=''
self.write_user_id() #There is no such function.
def is_user_logged_in(self):
if self.user_id != '':
return True
return False
def rate_item(self, mediaitem):
pass
def read_user_id(self):
pass
def save_user_id(self):
pass
#end of class
#use singleton
#login = CDialogLogin("CLoginskin.xml", os.getcwd())
login = CDialogLogin("CLoginskin2.xml", addon.getAddonInfo('path'))
|
mtlchun/edx
|
common/lib/xmodule/xmodule/lti_module.py
|
Python
|
agpl-3.0
| 37,531 | 0.002771 |
"""
Learning Tools Interoperability (LTI) module.
Resources
---------
Theoretical background and detailed specifications of LTI can be found on:
http://www.imsglobal.org/LTI/v1p1p1/ltiIMGv1p1p1.html
This module is based on the version 1.1.1 of the LTI specifications by the
IMS Global authority. For authentication, it uses OAuth1.
When responding back to the LTI tool provider, we must issue a correct
response. Types of responses and their message payload is available at:
Table A1.2 Interpretation of the 'CodeMajor/severity' matrix.
http://www.imsglobal.org/gws/gwsv1p0/imsgws_wsdlBindv1p0.html
A resource to test the LTI protocol (PHP realization):
http://www.imsglobal.org/developers/LTI/test/v1p1/lms.php
We have also begun to add support for LTI 1.2/2.0. We will keep this
docstring in synch with what support is available. The first LTI 2.0
feature to be supported is the REST API results service, see specification
at
http://www.imsglobal.org/lti/ltiv2p0/uml/purl.imsglobal.org/vocab/lis/v2/outcomes/Result/service.html
What is supported:
------------------
1.) Display of simple LTI in iframe or a new window.
2.) Multiple LTI components on a single page.
3.) The use of multiple LTI providers per course.
4.) Use of advanced LTI component that provides back a grade.
A) LTI 1.1.1 XML endpoint
a.) The LTI provider sends back a grade to a specified URL.
b.) Currently only action "update" is supported. "Read", and "delete"
actions initially weren't required.
B) LTI 2.0 Result Service JSON REST endpoint
(http://www.imsglobal.org/lti/ltiv2p0/uml/purl.imsglobal.org/vocab/lis/v2/outcomes/Result/service.html)
a.) Discovery of all such LTI http endpoints for a course. External tools GET from this discovery
endpoint and receive URLs for interacting with individual grading units.
(see lms/djangoapps/courseware/views.py:get_course_lti_endpoints)
b.) GET, PUT and DELETE in LTI Result JSON binding
(http://www.imsglobal.org/lti/ltiv2p0/mediatype/application/vnd/ims/lis/v2/result+json/index.html)
for a provider to synchronize grades into edx-platform. Reading, Setting, and Deleteing
Numeric grades between 0 and 1 and text + basic HTML feedback comments are supported, via
GET / PUT / DELETE HTTP methods respectively
"""
import datetime
from django.utils.timezone import UTC
import logging
import oauthlib.oauth1
from oauthlib.oauth1.rfc5849 import signature
import hashlib
import base64
import urllib
import textwrap
import bleach
from lxml import etree
from webob import Response
import mock
from xml.sax.saxutils import escape
from xmodule.editing_module import MetadataOnlyEditingDescriptor
from xmodule.raw_module import EmptyDataRawDescriptor
from xmodule.x_module import XModule, module_attr
from xmodule.course_module import CourseDescriptor
from xmodule.lti_2_util import LTI20ModuleMixin, LTIError
from pkg_resources import resource_string
from xblock.core import String, Scope, List, XBlock
from xblock.fields import Boolean, Float
log = logging.getLogger(__name__)
# Make '_' a no-op so we can scrape strings
_ = lambda text: text
DOCS_ANCHOR_TAG_OPEN = (
"<a target='_blank' "
"href='http://edx.readthedocs.org/projects/ca/en/latest/exercises_tools/lti_component.html'>"
)
class LTIFields(object):
"""
Fields to define and obtain LTI tool from provider are set here,
except credentials, which should be set in course settings::
`lti_id` is id to connect tool with credentials in course settings. It should not contain :: (double semicolon)
`launch_url` is launch URL of tool.
`custom_parameters` are additional parameters to navigate to proper book and book page.
For example, for Vitalsource provider, `launch_url` should be
*https://bc-staging.vitalsource.com/books/book*,
and to get to proper book and book page, you should set custom parameters as::
vbid=put_book_id_here
book_location=page/put_page_number_here
Default non-empty URL for `launch_url` is needed due to oauthlib demand (URL scheme should be presented)::
https://github.com/idan/oauthlib/blob/master/
|
oauthlib/oauth1/rfc5849/signature.py#L136
"""
display_name = String(
display_name=_("Display Name"),
help=_(
"Enter the name that students see for this component. "
"Analytics reports may also use the di
|
splay name to identify this component."
),
scope=Scope.settings,
default="LTI",
)
lti_id = String(
display_name=_("LTI ID"),
help=_(
"Enter the LTI ID for the external LTI provider. "
"This value must be the same LTI ID that you entered in the "
"LTI Passports setting on the Advanced Settings page."
"<br />See {docs_anchor_open}the edX LTI documentation{anchor_close} for more details on this setting."
).format(
docs_anchor_open=DOCS_ANCHOR_TAG_OPEN,
anchor_close="</a>"
),
default='',
scope=Scope.settings
)
launch_url = String(
display_name=_("LTI URL"),
help=_(
"Enter the URL of the external tool that this component launches. "
"This setting is only used when Hide External Tool is set to False."
"<br />See {docs_anchor_open}the edX LTI documentation{anchor_close} for more details on this setting."
).format(
docs_anchor_open=DOCS_ANCHOR_TAG_OPEN,
anchor_close="</a>"
),
default='http://www.example.com',
scope=Scope.settings)
custom_parameters = List(
display_name=_("Custom Parameters"),
help=_(
"Add the key/value pair for any custom parameters, such as the page your e-book should open to or "
"the background color for this component."
"<br />See {docs_anchor_open}the edX LTI documentation{anchor_close} for more details on this setting."
).format(
docs_anchor_open=DOCS_ANCHOR_TAG_OPEN,
anchor_close="</a>"
),
scope=Scope.settings)
open_in_a_new_page = Boolean(
display_name=_("Open in New Page"),
help=_(
"Select True if you want students to click a link that opens the LTI tool in a new window. "
"Select False if you want the LTI content to open in an IFrame in the current page. "
"This setting is only used when Hide External Tool is set to False. "
),
default=True,
scope=Scope.settings
)
has_score = Boolean(
display_name=_("Scored"),
help=_(
"Select True if this component will receive a numerical score from the external LTI system."
),
default=False,
scope=Scope.settings
)
weight = Float(
display_name=_("Weight"),
help=_(
"Enter the number of points possible for this component. "
"The default value is 1.0. "
"This setting is only used when Scored is set to True."
),
default=1.0,
scope=Scope.settings,
values={"min": 0},
)
module_score = Float(
help=_("The score kept in the xblock KVS -- duplicate of the published score in django DB"),
default=None,
scope=Scope.user_state
)
score_comment = String(
help=_("Comment as returned from grader, LTI2.0 spec"),
default="",
scope=Scope.user_state
)
hide_launch = Boolean(
display_name=_("Hide External Tool"),
help=_(
"Select True if you want to use this component as a placeholder for syncing with an external grading "
"system rather than launch an external tool. "
"This setting hides the Launch button and any IFrames for this component."
),
default=False,
scope=Scope.settings
)
# Users will be presented with a message indicating that their e-mail/username would be sent to a third
# party application. When "Open in New Page" is not sele
|
jtpaasch/simplygithub
|
simplygithub/internals/merges.py
|
Python
|
mit
| 1,433 | 0 |
# -*- coding: utf-8 -*-
"""Utilities for performing merges."""
from . import api
def prepare(data):
"""Restructure/prepare data about merges for output."""
sha = data.get("sha")
commit = data.get("commit")
message = commit.get("message")
|
tree = commit.get
|
("tree")
tree_sha = tree.get("sha")
return {"message": message, "sha": sha, "tree": {"sha": tree_sha}}
def merge(profile, head, base, commit_message=None):
"""Merge the head of a branch into the base branch.
Args:
profile
A profile generated from ``simplygithub.authentication.profile``.
Such profiles tell this module (i) the ``repo`` to connect to,
and (ii) the ``token`` to connect with.
head
The head to merge. It can be a SHA, or a branch name.
base
The name of the branch to merge the specified head into.
commit_message
The message to give for the commit.
Returns:
A dict with data about the merge.
"""
if not commit_message:
commit_message = "Merged " + head + " into " + base + "."
payload = {
"base": base,
"head": head,
"commit_message": commit_message,
}
response = api.post_merge_request(profile, payload)
data = None
if response.status_code == 201:
json_data = response.json()
data = prepare(json_data)
return data
|
pawhewitt/Dev
|
SU2_PY/SU2/run/geometry.py
|
Python
|
lgpl-2.1
| 3,794 | 0.011861 |
#!/usr/bin/env python
## \file geometry.py
# \brief python package for running geometry analyses
# \author T. Lukaczyk, F. Palacios
# \version 5.0.0 "Rav
|
en"
#
# SU2 Original Developers
|
: Dr. Francisco D. Palacios.
# Dr. Thomas D. Economon.
#
# SU2 Developers: Prof. Juan J. Alonso's group at Stanford University.
# Prof. Piero Colonna's group at Delft University of Technology.
# Prof. Nicolas R. Gauger's group at Kaiserslautern University of Technology.
# Prof. Alberto Guardone's group at Polytechnic University of Milan.
# Prof. Rafael Palacios' group at Imperial College London.
# Prof. Edwin van der Weide's group at the University of Twente.
# Prof. Vincent Terrapon's group at the University of Liege.
#
# Copyright (C) 2012-2017 SU2, the open-source CFD code.
#
# SU2 is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# SU2 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with SU2. If not, see <http://www.gnu.org/licenses/>.
# ----------------------------------------------------------------------
# Imports
# ----------------------------------------------------------------------
import os, sys, shutil, copy
from .. import io as su2io
from interface import GEO as SU2_GEO
from ..util import ordered_bunch
# ----------------------------------------------------------------------
# Direct Simulation
# ----------------------------------------------------------------------
def geometry ( config , step = 1e-3 ):
""" info = SU2.run.geometry(config)
Runs an geometry analysis with:
SU2.run.decomp()
SU2.run.GEO()
Assumptions:
Performs both function and gradient analysis
Inputs:
config - an SU2 configuration
step - gradient finite difference step if config.GEO_MODE=GRADIENT
Outputs:
info - SU2 State with keys:
FUNCTIONS
GRADIENTS
Updates:
Executes in:
./
"""
# local copy
konfig = copy.deepcopy(config)
# unpack
function_name = konfig['GEO_PARAM']
func_filename = konfig['VALUE_OBJFUNC_FILENAME']
grad_filename = konfig['GRAD_OBJFUNC_FILENAME']
# choose dv values
Definition_DV = konfig['DEFINITION_DV']
n_DV = len(Definition_DV['KIND'])
if isinstance(step,list):
assert len(step) == n_DV , 'unexpected step vector length'
else:
step = [step]*n_DV
dv_old = [0.0]*n_DV # SU2_DOT input requirement, assumes linear superposition of design variables
dv_new = step
konfig.unpack_dvs(dv_new,dv_old)
# Run Solution
SU2_GEO(konfig)
# info out
info = su2io.State()
# get function values
if konfig.GEO_MODE == 'FUNCTION':
functions = su2io.tools.read_plot(func_filename)
for key,value in functions.items():
functions[key] = value[0]
info.FUNCTIONS.update( functions )
# get gradient_values
if konfig.GEO_MODE == 'GRADIENT':
gradients = su2io.tools.read_plot(grad_filename)
info.GRADIENTS.update( gradients )
return info
|
hadim/pygraphml
|
pygraphml/tests/__init__.py
|
Python
|
bsd-3-clause
| 93 | 0 |
def
|
run_all():
import nose2
nose2.discover(module='pygraphml')
__al
|
l__ = [run_all]
|
bcarroll/splunk_samltools
|
bin/splunksaml.py
|
Python
|
apache-2.0
| 6,313 | 0.011722 |
import re, sys, time, splunk.Intersplunk
import urllib, zlib, base64
import logging, logging.handlers
try:
import xml.etree.cElementTree as xml
except ImportError:
import xml.etree.ElementTree as xml
def setup_logger(LOGGER_NAME,LOGFILE_NAME):
logger = logging.getLogger(LOGGER_NAME)
file_handler = logging.handlers.RotatingFileHandler(LOGFILE_NAME)
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
logger.setLevel(logging.ERROR)
return(logger)
def set_logger_level(LOGGER_LEVEL='NOTSET'):
logger.info('set_logger_level(' + LOGGER_LEVEL + ') called...')
if LOGGER_LEVEL == 'NOTSET':
logger.setLevel(logging.NOTSET)
elif LOGGER_LEVEL == 'DEBUG':
logger.setLevel(logging.DEBUG)
elif LOGGER_LEVEL == 'INFO':
logger.setLevel(logging.INFO)
elif LOGGER_LEVEL == 'WARNING':
logger.setLevel(logging.WARNING)
elif LOGGER_LEVEL == 'ERROR':
logger.setLevel(logging.ERROR)
elif LOGGER_LEVEL == 'CRITICAL':
logger.setLevel(logging.CRITICAL)
return(None)
def uri_unescape(string):
# Parameters
# string : URI escaped string
# Return
# URI unescaped string
logger.debug('uri_unescape() called...')
uri_unescaped_string = None
try:
uri_unescaped_string = urllib.unquote(string) # urldecode Base64 encoded SAML AuthnRequest
except:
return(string)
return(uri_unescaped_string)
def base64_decode(string):
# Parameters
# string : Base64 encoded string
# Return
# decoded/plain text string
logger.debug('base64_decode() called...')
base64_decoded_string = None
try:
base64_decoded_string = base64.b64decode(string) # decode Base64 encoded XML document
except:
return(string)
return(base64_decoded_string)
def zlib_decompress(string):
# Parameters
# string : zlib compressed string
# Return
# inflated/uncompressed string
zlib_decompressed_string = None
try:
zlib_decompressed_string = zlib.decompress(string, -15) # uncompress XML document
except:
return(string)
return(zlib_decompressed_string)
def xml2dict(xmlstring, prepend_string=None, remove_namespace=True):
logger.debug('xml2dict() called...')
# Parameters
# xmlstring : XML document
# prepend_string : String to add to the beginning of each key
# remove_namespace : If set to True (default), the XML namespace is removed from key names
# Return
# xmlkv : dict of XML element names and values. XML tags and attribute names are concatenated to form the returned key
# TODO: dict keys should indicate the complete XML hierarchy.
# Example: <Root><Element1><Element2 Attribute="stuff" /></Element1></Root> = xmlkv['Root_Element1_Element2_Attribute']
xmlkv = {}
try:
root = xml.fromstring(xmlstring)
tree = xml.ElementTree(root)
except:
logger.warning('Error parsing XML:' + xmlstring)
return(None)
root_tag = repr(root).split('}',1)[1].split('\'',1)[0].replace('\n','').replace('\r','') # strip XML namespace and remove newline characters
if prepend_string is not None:
root_tag = prepend_string + root_tag
for element in tree.iter():
if remove_namespace == True:
if '}' in element.tag:
element.tag = element.tag.split('}',1)[1].replace('\n','').replace('\r','') # strip XML namespaces and remove newline characters
try:
if element.text:
key = root_tag + '_' + element.tag
val = element.text = element.text.replace('\n','').replace('\r','') # remove newline characters
if val.strip():
xmlkv[key] = val
elif element.attrib is not None:
for attribute in element.attrib:
if attribute is not None:
key = root_tag + '_' + element.tag + '_' + attribute.replace('\n','').replace('\r','') # remove newline characters
key = key.replace('__','_') # replace 2 consecutive underscores with a single underscore (this only happens with the tag or attribute name begins with an underscore)
val = element.attrib.get(attribute).replace('\n','').replace('\r','') # remove newline characters
if val.strip():
xmlkv[key] = val
except:
logger.warning(root_tag + '_' + element.tag, element.text)
continue
return(xmlkv)
def dosaml(results,settings):
# Parameters
# string : SAML message
# type : type of SAML message (AuthnRequest, Response, AttributeQuery, etc...) If type is not provided we will try to detect it
# Return
# dict containing SAML message key/value pairs
try:
fields, argvals = splunk.Intersplunk.getKeywordsAndOptions()
for _result in results:
for _field in fields:
if _field in _result:
saml_message = _result[_field]
saml_message = uri_unescape(saml_message)
|
saml_message = base64_decode(saml_message)
saml_message = zlib_decompress(saml_message)
saml_message_dict = xml2dict(saml_message,'SAML')
if saml_message_dict is not None:
|
logger.debug(repr(saml_message_dict))
_result.update(saml_message_dict) # create new fields with SAML attributes
#append extracted_saml_fields to results
splunk.Intersplunk.outputResults(results)
except:
import traceback
stack = traceback.format_exc()
results = splunk.Intersplunk.generateErrorResults("Error : Traceback: " + str(stack))
logger.error("Error : " + str(stack))
logger = setup_logger('SplunkSAML','/opt/splunk/var/log/splunk/saml_utils.log')
#set_logger_level('DEBUG')
results, dummyresults, settings = splunk.Intersplunk.getOrganizedResults()
results = dosaml(results, settings)
|
ganeti/ganeti
|
test/py/ganeti.rapi.testutils_unittest.py
|
Python
|
bsd-2-clause
| 7,060 | 0.005949 |
#!/usr/bin/python3
#
# Copyright (C) 2012 Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Script for testing ganeti.rapi.testutils"""
import unittest
from ganeti import compat
from ganeti import constants
from ganeti import errors
from ganeti import opcodes
from ganeti import luxi
from ganeti import rapi
from ganeti import utils
import ganeti.rapi.testutils
import ganeti.rapi.client
import testutils
KNOWN_UNUSED_LUXI = compat.UniqueFrozenset([
luxi.REQ_SUBMIT_MANY_JOBS,
luxi.REQ_SUBMIT_JOB_TO_DRAINED_QUEUE,
luxi.REQ_ARCHIVE_JOB,
luxi.REQ_AUTO_ARCHIVE_JOBS,
luxi.REQ_CHANGE_JOB_PRIORITY,
luxi.REQ_PICKUP_JOB,
luxi.REQ_QUERY_EXPORTS,
luxi.REQ_QUERY_CONFIG_VALUES,
luxi.REQ_QUERY_NETWORKS,
luxi.REQ_QUERY_TAGS,
luxi.REQ_SET_DRAIN_FLAG,
luxi.REQ_SET_WATCHER_PAUSE,
])
# Global variable for storing used LUXI calls
_used_luxi_calls = None
class TestHideInternalErrors(unittest.TestCase):
def test(self):
def inner():
raise errors.GenericError("error")
fn = rapi.testutils._HideInternalErrors(inner)
self.assertRaises(rapi.testutils.VerificationError, fn)
class TestVerifyOpInput(unittest.TestCase):
def testUnknownOpId(self):
voi = rapi.testutils.VerifyOpInput
self.assertRaises(rapi.testutils.VerificationError, voi, "UNK_OP_ID", None)
def testUnknownParameter(self):
voi = rapi.testutils.VerifyOpInput
self.assertRaises(rapi.testutils.VerificationError, voi,
opcodes.OpClusterRename.OP_ID, {
"unk": "unk",
})
def testWrongParameterValue(self):
voi = rapi.testutils.VerifyOpInput
self.as
|
sertRaises(rapi.testutils.VerificationError, voi,
opcodes.OpClusterRe
|
name.OP_ID, {
"name": object(),
})
def testSuccess(self):
voi = rapi.testutils.VerifyOpInput
voi(opcodes.OpClusterRename.OP_ID, {
"name": "new-name.example.com",
})
class TestVerifyOpResult(unittest.TestCase):
def testSuccess(self):
vor = rapi.testutils.VerifyOpResult
vor(opcodes.OpClusterVerify.OP_ID, {
constants.JOB_IDS_KEY: [
(False, "error message"),
],
})
def testWrongResult(self):
vor = rapi.testutils.VerifyOpResult
self.assertRaises(rapi.testutils.VerificationError, vor,
opcodes.OpClusterVerify.OP_ID, [])
def testNoResultCheck(self):
vor = rapi.testutils.VerifyOpResult
vor(opcodes.OpTestDummy.OP_ID, None)
class TestInputTestClient(unittest.TestCase):
def setUp(self):
self.cl = rapi.testutils.InputTestClient()
def tearDown(self):
_used_luxi_calls.update(self.cl._GetLuxiCalls())
def testGetInfo(self):
self.assertTrue(self.cl.GetInfo() is NotImplemented)
def testPrepareExport(self):
result = self.cl.PrepareExport("inst1.example.com",
constants.EXPORT_MODE_LOCAL)
self.assertTrue(result is NotImplemented)
self.assertRaises(rapi.testutils.VerificationError, self.cl.PrepareExport,
"inst1.example.com", "###invalid###")
def testGetJobs(self):
self.assertTrue(self.cl.GetJobs() is NotImplemented)
def testQuery(self):
result = self.cl.Query(constants.QR_NODE, ["name"])
self.assertTrue(result is NotImplemented)
def testQueryFields(self):
result = self.cl.QueryFields(constants.QR_INSTANCE)
self.assertTrue(result is NotImplemented)
def testCancelJob(self):
self.assertTrue(self.cl.CancelJob("1") is NotImplemented)
def testGetNodes(self):
self.assertTrue(self.cl.GetNodes() is NotImplemented)
def testGetInstances(self):
self.assertTrue(self.cl.GetInstances() is NotImplemented)
def testGetGroups(self):
self.assertTrue(self.cl.GetGroups() is NotImplemented)
def testWaitForJobChange(self):
result = self.cl.WaitForJobChange("1", ["id"], None, None)
self.assertTrue(result is NotImplemented)
def testGetFilters(self):
self.assertTrue(self.cl.GetFilters() is NotImplemented)
def testGetFilter(self):
result = self.cl.GetFilter("4364c043-f232-41e3-837f-f1ce846f21d2")
self.assertTrue(result is NotImplemented)
def testReplaceFilter(self):
self.assertTrue(self.cl.ReplaceFilter(
uuid="c6a70f02-facb-4e37-b344-54f146dd0396",
priority=1,
predicates=[["jobid", [">", "id", "watermark"]]],
action="CONTINUE",
reason_trail=["testReplaceFilter", "myreason", utils.EpochNano()],
) is NotImplemented)
def testAddFilter(self):
self.assertTrue(self.cl.AddFilter(
priority=1,
predicates=[["jobid", [">", "id", "watermark"]]],
action="CONTINUE",
reason_trail=["testAddFilter", "myreason", utils.EpochNano()],
) is NotImplemented)
def testDeleteFilter(self):
self.assertTrue(self.cl.DeleteFilter(
uuid="c6a70f02-facb-4e37-b344-54f146dd0396",
) is NotImplemented)
class CustomTestRunner(unittest.TextTestRunner):
def run(self, *args):
global _used_luxi_calls
assert _used_luxi_calls is None
diff = (KNOWN_UNUSED_LUXI - luxi.REQ_ALL)
assert not diff, "Non-existing LUXI calls listed as unused: %s" % diff
_used_luxi_calls = set()
try:
# Run actual tests
result = unittest.TextTestRunner.run(self, *args)
diff = _used_luxi_calls & KNOWN_UNUSED_LUXI
if diff:
raise AssertionError("LUXI methods marked as unused were called: %s" %
utils.CommaJoin(diff))
diff = (luxi.REQ_ALL - KNOWN_UNUSED_LUXI - _used_luxi_calls)
if diff:
raise AssertionError("The following LUXI methods were not used: %s" %
utils.CommaJoin(diff))
finally:
# Reset global variable
_used_luxi_calls = None
return result
if __name__ == "__main__":
testutils.GanetiTestProgram(testRunner=CustomTestRunner)
|
lesina/labs2016
|
contests_1sem/6-7/E.py
|
Python
|
gpl-3.0
| 443 | 0.006772 |
a, b = list(map(int, input().split()))
array = []
for i in range(a):
array.append([])
for j in range(b):
if (i+j) % 2:
array[i].append('*')
else:
array[i].a
|
ppend('.')
#for i in range(n):
# for j in range(
|
n):
# if i == j or i == n//2 or j == n//2 or i == n-j-1:
# array[i][j] = "*"
for i in range(a):
for j in range(b):
print(array[i][j], end = " ")
print()
|
1tush/reviewboard
|
reviewboard/reviews/tests.py
|
Python
|
mit
| 108,697 | 0.000037 |
from __future__ import print_function, unicode_literals
from datetime import timedelta
import logging
import os
from django.conf import settings
from django.contrib.auth.models import AnonymousUser, User
from django.core.exceptions import ValidationError
from django.core.files.uploadedfile import SimpleUploadedFile
from django.template import Context, Template
from django.utils import six
from djblets.siteconf
|
ig.models import SiteConfiguration
from djblets.testing.decorators import add_fixtures
from kgb import SpyAgency
from reviewboard.accounts.models import Profile, LocalSiteProfile
from reviewboard.attachments.models import FileAttachment
from reviewboard.reviews.forms import DefaultReviewerForm, GroupForm
from reviewboard.reviews.markdown_utils impor
|
t (markdown_escape,
markdown_unescape)
from reviewboard.reviews.models import (Comment,
DefaultReviewer,
Group,
ReviewRequest,
ReviewRequestDraft,
Review,
Screenshot)
from reviewboard.scmtools.core import Commit
from reviewboard.scmtools.models import Repository, Tool
from reviewboard.site.models import LocalSite
from reviewboard.site.urlresolvers import local_site_reverse
from reviewboard.testing import TestCase
class ReviewRequestManagerTests(TestCase):
"""Tests ReviewRequestManager functions."""
fixtures = ['test_users']
@add_fixtures(['test_scmtools'])
def test_create_with_site(self):
"""Testing ReviewRequest.objects.create with LocalSite"""
user = User.objects.get(username='doc')
local_site = LocalSite.objects.create(name='test')
repository = self.create_repository()
review_request = ReviewRequest.objects.create(
user, repository, local_site=local_site)
self.assertEqual(review_request.repository, repository)
self.assertEqual(review_request.local_site, local_site)
self.assertEqual(review_request.local_id, 1)
@add_fixtures(['test_scmtools'])
def test_create_with_site_and_commit_id(self):
"""Testing ReviewRequest.objects.create with LocalSite and commit ID"""
user = User.objects.get(username='doc')
local_site = LocalSite.objects.create(name='test')
repository = self.create_repository()
review_request = ReviewRequest.objects.create(
user, repository,
commit_id='123',
local_site=local_site)
self.assertEqual(review_request.repository, repository)
self.assertEqual(review_request.commit_id, '123')
self.assertEqual(review_request.local_site, local_site)
self.assertEqual(review_request.local_id, 1)
@add_fixtures(['test_scmtools'])
def test_create_with_site_and_commit_id_not_unique(self):
"""Testing ReviewRequest.objects.create with LocalSite and
commit ID that is not unique
"""
user = User.objects.get(username='doc')
local_site = LocalSite.objects.create(name='test')
repository = self.create_repository()
# This one should be fine.
ReviewRequest.objects.create(user, repository, commit_id='123',
local_site=local_site)
self.assertEqual(local_site.review_requests.count(), 1)
# This one will yell.
self.assertRaises(
ValidationError,
lambda: ReviewRequest.objects.create(
user, repository,
commit_id='123',
local_site=local_site))
# Make sure that entry doesn't exist in the database.
self.assertEqual(local_site.review_requests.count(), 1)
@add_fixtures(['test_scmtools'])
def test_create_with_site_and_commit_id_and_fetch_problem(self):
"""Testing ReviewRequest.objects.create with LocalSite and
commit ID with problem fetching commit details
"""
user = User.objects.get(username='doc')
local_site = LocalSite.objects.create(name='test')
repository = self.create_repository()
self.assertEqual(local_site.review_requests.count(), 0)
ReviewRequest.objects.create(
user, repository,
commit_id='123',
local_site=local_site,
create_from_commit_id=True)
# Make sure that entry doesn't exist in the database.
self.assertEqual(local_site.review_requests.count(), 1)
review_request = local_site.review_requests.get()
self.assertEqual(review_request.local_id, 1)
self.assertEqual(review_request.commit_id, '123')
def test_public(self):
"""Testing ReviewRequest.objects.public"""
user1 = User.objects.get(username='doc')
user2 = User.objects.get(username='grumpy')
self.create_review_request(summary='Test 1',
publish=True,
submitter=user1)
self.create_review_request(summary='Test 2',
submitter=user2)
self.create_review_request(summary='Test 3',
status='S',
public=True,
submitter=user1)
self.create_review_request(summary='Test 4',
status='S',
public=True,
submitter=user2)
self.create_review_request(summary='Test 5',
status='D',
public=True,
submitter=user1)
self.create_review_request(summary='Test 6',
status='D',
submitter=user2)
self.assertValidSummaries(
ReviewRequest.objects.public(user=user1),
[
'Test 1',
])
self.assertValidSummaries(
ReviewRequest.objects.public(status=None),
[
'Test 5',
'Test 4',
'Test 3',
'Test 1',
])
self.assertValidSummaries(
ReviewRequest.objects.public(user=user2, status=None),
[
'Test 6',
'Test 5',
'Test 4',
'Test 3',
'Test 2',
'Test 1'
])
@add_fixtures(['test_scmtools'])
def test_public_without_private_repo_access(self):
"""Testing ReviewRequest.objects.public without access to private
repositories
"""
user = User.objects.get(username='grumpy')
repository = self.create_repository(public=False)
review_request = self.create_review_request(repository=repository,
publish=True)
self.assertFalse(review_request.is_accessible_by(user))
review_requests = ReviewRequest.objects.public(user=user)
self.assertEqual(review_requests.count(), 0)
@add_fixtures(['test_scmtools'])
def test_public_with_private_repo_access(self):
"""Testing ReviewRequest.objects.public with access to private
repositories
"""
user = User.objects.get(username='grumpy')
repository = self.create_repository(public=False)
repository.users.add(user)
review_request = self.create_review_request(repository=repository,
publish=True)
self.assertTrue(review_request.is_accessible_by(user))
review_requests = ReviewRequest.objects.public(user=user)
self.assertEqual(review_requests.count(), 1)
@add_fixtures(['test_scmtools'])
def test_public_with_private_repo_access_through_group(self):
"""Testing ReviewRequest.objects.public with access to private
repositories
"""
user = User.objects.get(user
|
yaroslav-tarasov/avango
|
avango-skelanim/examples/skeletal_character_control/main_animation_config.py
|
Python
|
lgpl-3.0
| 13,239 | 0.001586 |
import avango
import avango.script
import avango.gua.skelanim
from examples_common.GuaVE import GuaVE
import examples_common.navigator
from avango.gua.skelanim.CharacterControl import CharacterControl
from avango.gua.skelanim.AnimationControl import AnimationConfig
### CAMERA CONTROL VIA XBOX CONTROLLER:
def camera_control_xbox_connect(camera_control, device_sensor):
# optional / additional xbox controller settings:
camera_control.XBOX_X.connect_from(device_sensor.Value2)
camera_control.XBOX_Y.connect_from(device_sensor.Value3)
camera_control.XBOX_LZ.connect_from(device_sensor.Value4)
camera_control.XBOX_RZ.connect_from(device_sensor.Value5)
def camera_control_xbox_disconnect(camera_control, device_sensor):
# optional / additional xbox controller settings:
camera_control.XBOX_X.disconnect()
camera_control.XBOX_Y.disconnect()
camera_control.XBOX_LZ.disconnect()
camera_control.XBOX_RZ.disconnect()
def start():
# setup scenegraph
graph = avango.gua.nodes.SceneGraph(Name="scenegraph")
#environment:
tri_mesh_loader = avango.gua.nodes.TriMeshLoader()
path = "/opt/project_animation/demo/data/objects/cube2.obj"
flags = avango.gua.LoaderFlags.NORMALIZE_POSITION \
| avango.gua.LoaderFlags.NORMALIZE_SCALE \
| avango.gua.LoaderFlags.OPTIMIZE_GEOMETRY \
| avango.gua.LoaderFlags.MAKE_PICKABLE \
| avango.gua.LoaderFlags.LOAD_MATERIALS
plane = \
tri_mesh_loader.create_geometry_from_file("cube",
path,
flags)
plane.Transform.value *= \
avango.gua.make_scale_mat(10.0, 0.01, 10.0) *\
avango.gua.make_trans_mat(0, -3, 0)
path2 = "/opt/project_animation/demo/data/objects/glass_2_3_nm.TGA"
plane.Material.value.set_uniform("NormalMap",
path2)
sunlight = avango.gua.nodes.LightNode(
Type=avango.gua.LightType.SUN,
Name="light",
Color=avango.gua.Color(245.0/255.0, 246.0/255.0, 178.0/255.0),
Brightness=5.0,
Transform=(avango.gua.make_rot_mat(119.5, 0.0, 1.0, 0.0) *
avango.gua.make_rot_mat(-10, 1.0, 0.0, 0.0))
)
#view setup:
size = avango.gua.Vec2ui(2560, 1440)
window = avango.gua.nodes.GlfwWindow(
Size=size,
LeftResolution=size
)
window.CursorMode.value = 2
window.EnableFullscreen.value = True
cam = avango.gua.nodes.CameraNode(LeftScreenPath="/screen",
SceneGraph="scenegraph",
Resolution=size,
OutputWindowName="window")
#render pipeline
pipeline_description = avango.gua.nodes.PipelineDescription(
Passes=[
avango.gua.nodes.TriMeshPassDescription(),
avango.gua.nodes.LightVisibilityPassDescription(),
avango.gua.skelanim.nodes.SkeletalAnimationPassDescription(),
avango.gua.nodes.ResolvePassDescription(),
avango.gua.nodes.SSAAPassDescription(),
])
pipeline_description.Passes.value[3].EnableSSAO.value = True
pipeline_description.Passes.value[3].SSAORadius.value = 2.0
pipeline_description.Passes.value[3].SSAOIntensity.value = 2.0
pipeline_description.Passes.value[3].BackgroundMode.value = 1
pipeline_description.Passes.value[3].BackgroundTexture.value = \
"/opt/avango/master/examples/picking/data/textures/skymap.jpg"
pipeline_description.Passes.value[3].ToneMappingMode.value = \
avango.gua.ToneMappingMode.LINEAR
#pipeline_description.EnableABuffer.value = True
cam.PipelineDescription.value = pipeline_description
cam.Transform.value = avango.gua.make_trans_mat(0.0, 0.0, 0.4)
cam.FarClip.value = 300
cam.NearClip.value = 0.01
screen = avango.gua.nodes.ScreenNode(Name="screen", Width=0.8, Height=0.45)
screen.Children.value = [cam]
screen.Transform.value = avango.gua.make_trans_mat(0, 0.1, -2)
graph.Root.value.Children.value = [screen, plane, sunlight]
avango.gua.register_window("window", window)
#setup viewer
viewer = avango.gua.nodes.Viewer()
viewer.SceneGraphs.value = [graph]
viewer.Windows.value = [window]
window.CursorMode.value = avango.gua.CursorMode.DISABLED
#navigation
navigator = examples_common.navigator.Navigator()
#navigator.StartLocation.value = screen.Transform.value.get_translate()
navigator.StartRotation.value = avango.gua.Vec2(0, 790)
navigator.OutTransform.connect_from(screen.Transform)
navigator.RotationSpeed.value = 0.2
navigator.MotionSpeed.value = 0.004
# enable navigation
screen.Transform.connect_from(navigator.OutTransform)
#XBOX Controller
device_sensor = avango.daemon.nodes.DeviceSensor(
DeviceService=avango.daemon.DeviceService())
device_sensor.Station.value = "device-xbox-1"
#unreal tournament characters
skel_mesh_loader = avango.gua.skelanim.nodes.SkeletalAnimationLoader()
#LOOP MODE DEMO BOB
path_bob = "/opt/project_animation/Assets/UnrealTournament/Characters/" +\
"Necris_Male/necris_male_ut4_SKELMESH.FBX"
flags_bob = avango.gua.LoaderFlags.LOAD_MATERIALS \
| avango.gua.LoaderFlags.NORMALIZE_SCALE
bob_loop = \
skel_mesh_loader.create_geometry_from_file("bob_loop",
path_bob,
flags_bob)
bob_loop.Transform.value = avango.gua.make_trans_mat(0.0, -0.01, 0.0) *\
bob_loop.Transform.value *\
avango.gua.make_rot_mat(-90.0, 1.0, 0.0, 0.0) *\
avango.gua.make_scale_mat(0.2, 0.2, 0.2)
graph.Root.value.Children.value.append(bob_loop)
#load animations
path_idle = "/opt/project_animation/Assets/"
|
+ \
"UnrealTournament/UniversalAnimations/Idle_Ready_Rif.FBX"
path_run = "/opt/project_animation/Assets/" + \
"UnrealTournament/UniversalAnimations/Run_Fwd_Rif.FBX"
bob_loop.load_animation(path_idle, "idle")
bob_loop.load_animation(path_idle, "idle2")
bob_loop.load_animation(path_run, "run_fwd")
bob_loop.load_anim
|
ation(path_run, "run_fwd2")
#character control
character_control_loop = CharacterControl()
character_control_loop.my_constructor(bob_loop, bob_loop,
AnimationConfig("idle"), window)
character_control_loop.on_animation_end("idle",
AnimationConfig("run_fwd",
loop=True,
speed=1.0,
duration=3.0))
character_control_loop.on_animation_end("run_fwd",
AnimationConfig("idle2",
loop=False,
speed=1.0,
duration=3.0))
character_control_loop.on_animation_end("idle2",
AnimationConfig("run_fwd2",
loop=False,
speed=1.0,
duration=3.0))
character_control_loop.on_animation_end("run_fwd2",
AnimationConfig("idle",
loop=True,
speed=1.0,
duration=3.0))
#SPEED DEMO BOB
bob_speed = skel_mesh_loader.create_geometry_from_file("bob_speed",
path_bob,
flags_bob)
bob_speed.Transform.value = avango.gua.make_trans_mat(0.1, -0.01,
|
oiertwo/vampyr
|
flask/index.py
|
Python
|
mit
| 1,592 | 0.014447 |
__author__ = 'oier'
import json
from flask import Flask
|
, make_response
app = Flask(__name__)
import seaborn as sns
import numpy as np
import pandas as pd
import os
from datetime import datetime
import matplotlib.pyplot as plt
import sys
from matplotlib.figure import Figure
from matplotlib.backends.backend_agg import FigureCanvasAgg a
|
s FigureCanvas
from io import StringIO
from sklearn import linear_model
from models import InputForm, ValueSelector
from flask import Flask, render_template, request
from compute import compute, load_data, line_plot
@app.route('/')
def index():
return 'Hello World!'
def form_values(request):
data = load_data()
form = ValueSelector(request)
form.value.choices = [(k,i) for k,i in enumerate(data.columns)]
return(form)
@app.route('/blood', methods=['GET', 'POST'])
def blood():
form = form_values(request.form)
if request.method == 'POST':# and form.validate():
result = line_plot(form.value.data)
else:
print("False")
result = None
return render_template('plot.html',
form=form, result=result)
@app.route('/vib1', methods=['GET', 'POST'])
def vib1():
#form = InputForm(request.form)
form = form_values(request.form)
if request.method == 'POST' and form.validate():
result = compute(form.A.data, form.b.data,
form.w.data, form.T.data)
else:
result = None
return render_template('view_plain.html',
form=form, result=result)
if __name__ == '__main__':
app.run()
|
ziima/django-multilingual-ds9
|
multilingual/models/sql/__init__.py
|
Python
|
mit
| 44 | 0 |
"""
Sql support f
|
or multilingual models
"
|
""
|
openstack/tacker
|
tacker/db/migration/alembic_migrations/versions/13ecc2dd6f7f_change_nullable_value_of_path_id.py
|
Python
|
apache-2.0
| 1,218 | 0.004926 |
# Copyright 2018 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""Change nullable value of path_id
Revision ID: 13ecc2dd6f7f
Revises: 4747cc26b9c6
Create Date: 2018-07-24 16:47:01.378226
"""
# flake8: noqa: E402
# revision identifiers, used by Alembic.
revision = '13ecc2dd6f7f'
down_revision = '4747cc26b9c6'
from
|
alembic import op
import sqlalchemy as sa
def upgrade(active_plugins=None, options=None):
op.alter_column('vnffgchains', 'path_id',
existing_type=sa.String(length=255),
nullable=True)
op.alter_column('vnffgnfps', 'path_id',
existing_ty
|
pe=sa.String(length=255),
nullable=True)
|
Copper-Head/the-three-stooges
|
conll2hdf5.py
|
Python
|
mit
| 3,576 | 0.004754 |
import argparse
from collections import Counter
from itertools import chain
from numpy import save, array
from os import listdir
from os import path
import re
import sys
import dataproc as dp
parser = argparse.ArgumentParser()
parser.add_argument('-d', '--dir', type=str, help='Source directory to conll-data.')
parser.add_argument('-n', '--name', type=str, help='Specifies the corpus name.')
parser.add_argument('-u', '--maxlength', type=int, default=sys.maxsize, help='Max sentence length to avoid memory errors.')
parser.add_argument('-l', '--minlength', type=int, default=0, help='Minimal sentence length, default is 0.')
parser.add_argument('-r', '--removeunknown', type=int, default=0, help='Determines if sentences with rare words should be dropped. Any number != 0 says no-drop.')
parser.add_argument('-w', '--wordfreq', type=int, default=1, help='Minimum frequence of words, words with lower'
+' frequence will be replaced by <UNKNOWN>. Default=5')
parser.add_argument('-c', '--case', type=int, default=1, help='Determines, if the vocabulary should be case sensitive. It is on per default, 0 means non-case sensitive.')
parser.add_argument('-p', '--punctuation', type=int, default=1, help='0 - delete punctuation; any other number - keep punctuation')
args = parser.parse_args()
target_file = './data/'+args.name+'_data.hdf5'
alphabet_file = './data/'+args.name+'_ix2tok.npy'
len_upper_limit = args.maxlength
len_lower_limit = args.minlength
word_freq_limit = args.wordfreq
EOS = '<EOS>'
if args.punctuation:
def filter_punc(s):
return s
else:
def filter_punc(s):
return re.sub('\.|\?|!|;|:|,', '', s)
if args.case:
def transform(s):
return s
else:
def transform(s):
return s.lower()
seqs = []
drop_seqs = []
files = filter(lambda f: f.endswith('.conll'), listdir(args.dir))
for fname in files:
with open(path.join(args.dir, fname)) as f:
data = f.read()
for sentence in data.split('\n\n')
|
:
seq = []
for line in sentence.split('\n'):
if line.strip():
word = filter_punc(transform(line.split('\t')[1]))
if word: seq.append(word)
if len(
|
seq) <= len_upper_limit and len(seq) >= len_lower_limit:
seq.append(EOS)
seqs.append(seq)
else:
drop_seqs.append(seq)
counter = Counter(list(chain(*seqs)))
ix_seq = []
ix_seqs = []
tok2ix = {} if args.removeunknown or args.wordfreq == 1 else {'<UNKNOWN>': 0}
ix = len(tok2ix)
for seq in seqs:
for tok in seq:
if counter[tok] < word_freq_limit:
if args.removeunknown:
ix_seq = []
break
else:
ix_seq.append(0)
else:
if tok in tok2ix:
ix_seq.append(tok2ix[tok])
else:
tok2ix[tok] = ix
ix_seq.append(ix)
ix += 1
if ix_seq:
ix_seqs.append(ix_seq)
else:
drop_seqs.append(ix_seq)
ix_seq = []
seq_arr = array(ix_seqs)
print('Dropping', len(drop_seqs), 'sentences containing', len(list(chain(*drop_seqs))), 'tokens.')
print(len(ix_seqs), 'sentences with', len(list(chain(*ix_seqs))), 'tokens remaining.')
print('Vocabulary size:', len(tok2ix))
# save sentences
split_n = int(.9*seq_arr.shape[0])
dp.split_hdf5_file(target_file, seq_arr[:split_n], seq_arr[split_n:], varlen=True)
# save vocab indexing
ix2tok = {v: k for k, v in tok2ix.items()}
save(alphabet_file, array(ix2tok))
|
skyoo/jumpserver
|
apps/perms/api/application/__init__.py
|
Python
|
gpl-2.0
| 153 | 0 |
from
|
.user_permission import *
from .application_permission import *
from .application_permission_relat
|
ion import *
from .user_group_permission import *
|
kellogg76/ArduinoTelescopeDustCover
|
open.py
|
Python
|
mit
| 507 | 0.003945 |
## Open a serial connection with Arduino.
import time
import serial
ser = serial.Serial("COM9", 9600) # Open serial port that Arduino is using
time.sleep(3) # Wait 3 seconds for Arduino to reset
pr
|
int ser # Print serial config
print "Sending serial command to OPEN the dust cover."
ser.write("O")
print "Closing serial connecti
|
on."
ser.close()
# Reminder to close the connection when finished
if(ser.isOpen()):
print "Serial connection is still open."
|
proversity-org/edx-platform
|
common/lib/calc/calc/calc.py
|
Python
|
agpl-3.0
| 13,906 | 0.000791 |
"""
Parser and evaluator for FormulaResponse and NumericalResponse
Uses pyparsing to parse. Main function as of now is evaluator().
"""
import math
import numbers
import operator
import numpy
import scipy.constants
from pyparsing import (
CaselessLiteral,
Combine,
Forward,
Group,
Literal,
MatchFirst,
Optional,
ParseResults,
Suppress,
Word,
ZeroOrMore,
alphanums,
alphas,
nums,
stringEnd
)
import functions
# Functions available by default
# We use scimath variants which give complex results when needed. For example:
# np.sqrt(-4+0j) = 2j
# np.sqrt(-4) = nan, but
# np.lib.scimath.sqrt(-4) = 2j
DEFAULT_FUNCTIONS = {
'sin': numpy.sin,
'cos': numpy.cos,
'tan': numpy.tan,
'sec': functions.sec,
'csc': functions.csc,
'cot': functions.cot,
'sqrt': numpy.lib.scimath.sqrt,
'log10': numpy.lib.scimath.log10,
'log2': numpy.lib.scimath.log2,
'ln': numpy.lib.scimath.log,
'exp': numpy.exp,
'arccos': numpy.lib.scimath.arccos,
'arcsin': numpy.lib.scimath.arcsin,
'arctan': numpy.arctan,
'arcsec': functions.arcsec,
'arccsc': functions.arccsc,
'arccot': functions.arccot,
'abs': numpy.abs,
'fact': math.factorial,
'factorial': math.factorial,
'sinh': numpy.sinh,
'cosh': numpy.cosh,
'tanh': numpy.tanh,
'sech': functions.sech,
'csch': functions.csch,
'coth': functions.coth,
'arcsinh': numpy.arcsinh,
'arccosh': numpy.arccosh,
'arctanh': numpy.lib.scimath.arctanh,
'arcsech': functions.arcsech,
'arccsch': functions.arccsch,
'arccoth': functions.arccoth
}
DEFAULT_VARIABLES = {
'i': numpy.complex(0, 1),
'j': numpy.complex(0, 1),
'e': numpy.e,
'pi': numpy.pi,
'k': scipy.constants.k, # Boltzmann: 1.3806488e-23 (Joules/Kelvin)
'c': scipy.constants.c, # Light Speed: 2.998e8 (m/s)
'T': 298.15, # Typical room temperature: 298.15 (Kelvin), same as 25C/77F
'q': scipy.constants.e # Fund. Charge: 1.602176565e-19 (Coulombs)
}
# We eliminated the following extreme suffixes:
# P (1e15), E (1e18), Z (1e21), Y (1e24),
# f (1e-15), a (1e-18), z (1e-21), y (1e-24)
# since they're rarely used, and potentially confusing.
# They may also conflict with variables if we ever allow e.g.
# 5R instead of 5*R
SUFFIXES = {
'%': 0.01, 'k': 1e3, 'M': 1e6, 'G': 1e9, 'T': 1e12,
'c': 1e-2, 'm': 1e-3, 'u': 1e-6, 'n': 1e-9, 'p': 1e-12
}
class UndefinedVariable(Exception):
"""
Indicate when a student inputs a variable which was not expected.
"""
pass
def lower_dict(input_dict):
"""
Convert all keys in a dictionary to lowercase; keep their original values.
Keep in mind that it is possible (but not useful?) to define different
variables that have the same lowercase representation. It would be hard to
tell which is used in the final dict and which isn't.
"""
return {k.lower(): v for k, v in input_dict.iteritems()}
# The following few functions define evaluation actions, which are run on lists
# of results from each parse component. They convert the strings and (previously
# calculated) numbers into the number that component represents.
def super_float(text):
"""
Like float, but with SI extensions. 1k goes to 1000.
"""
if text[-1] in SUFFIXES:
return float(text[:-1]) * SUFFIXES[text[-1]]
else:
return float(text)
def eval_number(parse_result):
"""
Create a float out of its string parts.
e.g. [ '7.13', 'e', '3' ] -> 7130
Calls super_float above.
"""
return super_float("".join(parse_result))
def eval_atom(parse_result):
"""
Return the value wrapped by the ato
|
m.
In the case of parenthesis, ignore them.
"""
# Find first number in the list
result = next(k for k in parse_result if isinstance(k, numbers.Number))
return result
def eval_power(parse_result):
"""
Take a list of numbers and exponentiate them, right to left.
e.g. [ 2, 3, 2 ] -> 2^3^2 = 2^(3^2) -> 512
|
(not to be interpreted (2^3)^2 = 64)
"""
# `reduce` will go from left to right; reverse the list.
parse_result = reversed(
[k for k in parse_result
if isinstance(k, numbers.Number)] # Ignore the '^' marks.
)
# Having reversed it, raise `b` to the power of `a`.
power = reduce(lambda a, b: b ** a, parse_result)
return power
def eval_parallel(parse_result):
"""
Compute numbers according to the parallel resistors operator.
BTW it is commutative. Its formula is given by
out = 1 / (1/in1 + 1/in2 + ...)
e.g. [ 1, 2 ] -> 2/3
Return NaN if there is a zero among the inputs.
"""
if len(parse_result) == 1:
return parse_result[0]
if 0 in parse_result:
return float('nan')
reciprocals = [1. / e for e in parse_result
if isinstance(e, numbers.Number)]
return 1. / sum(reciprocals)
def eval_sum(parse_result):
"""
Add the inputs, keeping in mind their sign.
[ 1, '+', 2, '-', 3 ] -> 0
Allow a leading + or -.
"""
total = 0.0
current_op = operator.add
for token in parse_result:
if token == '+':
current_op = operator.add
elif token == '-':
current_op = operator.sub
else:
total = current_op(total, token)
return total
def eval_product(parse_result):
"""
Multiply the inputs.
[ 1, '*', 2, '/', 3 ] -> 0.66
"""
prod = 1.0
current_op = operator.mul
for token in parse_result:
if token == '*':
current_op = operator.mul
elif token == '/':
current_op = operator.truediv
else:
prod = current_op(prod, token)
return prod
def add_defaults(variables, functions, case_sensitive):
"""
Create dictionaries with both the default and user-defined variables.
"""
all_variables = dict(DEFAULT_VARIABLES)
all_functions = dict(DEFAULT_FUNCTIONS)
all_variables.update(variables)
all_functions.update(functions)
if not case_sensitive:
all_variables = lower_dict(all_variables)
all_functions = lower_dict(all_functions)
return (all_variables, all_functions)
def evaluator(variables, functions, math_expr, case_sensitive=False):
"""
Evaluate an expression; that is, take a string of math and return a float.
-Variables are passed as a dictionary from string to value. They must be
python numbers.
-Unary functions are passed as a dictionary from string to function.
"""
# No need to go further.
if math_expr.strip() == "":
return float('nan')
# Parse the tree.
math_interpreter = ParseAugmenter(math_expr, case_sensitive)
math_interpreter.parse_algebra()
# Get our variables together.
all_variables, all_functions = add_defaults(variables, functions, case_sensitive)
# ...and check them
math_interpreter.check_variables(all_variables, all_functions)
# Create a recursion to evaluate the tree.
if case_sensitive:
casify = lambda x: x
else:
casify = lambda x: x.lower() # Lowercase for case insens.
evaluate_actions = {
'number': eval_number,
'variable': lambda x: all_variables[casify(x[0])],
'function': lambda x: all_functions[casify(x[0])](x[1]),
'atom': eval_atom,
'power': eval_power,
'parallel': eval_parallel,
'product': eval_product,
'sum': eval_sum
}
return math_interpreter.reduce_tree(evaluate_actions)
class ParseAugmenter(object):
"""
Holds the data for a particular parse.
Retains the `math_expr` and `case_sensitive` so they needn't be passed
around method to method.
Eventually holds the parse tree and sets of variables as well.
"""
def __init__(self, math_expr, case_sensitive=False):
"""
Create the ParseAugmenter for a given math expression string.
Do the parsing later, when called like `OBJ.parse_algebra()`.
"""
self.case_sensitive = case_sensitive
self.math_expr = math_expr
|
eugenekolo/project-euler
|
euler025.py
|
Python
|
mit
| 856 | 0.008216 |
#!/usr/bin/python3
# -*- coding: utf-8 -*-
################################################################################
# Euler 25
# 1000-digit Fibonacci number
# Author: Eugene Kolo - 2014
# Contact: www.eugenekolo.com
# The Fibonacci sequence is defined by the recurrence relation:
# Fn = Fn−1 + Fn−2, where F1 = 1 and F2 = 1.
# Hence the
|
first 12 terms will be: F1 = 1 F2 = 1 F3 = 2 F4 = 3 F5 = 5 F6 = 8 F7 = 13
# F8 = 21 F9 = 34 F10 = 55 F11 = 89 F12 = 144
# The 12th term, F12, is the first term to contain three digits.
# What is the f
|
irst term in the Fibonacci sequence to contain 1000 digits?
################################################################################
def solve():
from eulerlib import fib
n = 0
while (len(str(fib(n))) < 1000):
n +=1
return n
if __name__ == '__main__':
print(solve())
|
ekristen/mythboxee
|
mythtv/MythStatic.py
|
Python
|
mit
| 261 | 0.011494 |
# -*- coding: utf-8 -*-
"""
Contains any static and global variables for MythTV Python Bindings
"""
OWN_VERSION = (0,23,1,0)
SCHEMA_VE
|
RSION = 1254
MVSCHEMA_VERSION = 1032
NVSC
|
HEMA_VERSION = 1004
PROTO_VERSION = 23056
PROGRAM_FIELDS = 47
BACKEND_SEP = '[]:[]'
|
JarbasAI/jarbas-core
|
mycroft/jarbas-skills/LILACS_core/question_parser.py
|
Python
|
gpl-3.0
| 5,535 | 0.003433 |
import re
import spotlight
from requests import ConnectionError, HTTPError
class EnglishQuestionParser():
"""
Poor-man's english question parser. Not even close to conclusive, but
appears to construct some decent w|a queries and responses.
__author__ = 'seanfitz'
"""
def __init__(self):
self.regexes = [
#re.compile(
# ".*(?P<QuestionWord>are) "
# "(?P<Query>.*)"),
re.compile(
".*(?P<QuestionWord>who|what|when|where|why|which|whose) "
|
"(?P<Query1>.*) (?P<QuestionVerb>is|are|was|were) "
"(?P<Query2>.*)"),
re.compile(
".*(?P<QuestionWord>are|is) "
"(?P<Query1>.*) (?P<QuestionVerb>an|a|an example off|an instance off) "
"(?P<Query2>.*)"),
re.compile(
"(?P<Query1>.*) (?P<QuestionVer
|
b>and) "
"(?P<Query2>.*) (?P<QuestionWord>in common)"),
re.compile(
".*(?P<QuestionWord>talk|rant|think) "
"(?P<QuestionVerb>\w+) (?P<Query>.*)"),
re.compile(
".*(?P<QuestionWord>who|what|when|where|why|which|how|example|examples) "
"(?P<QuestionVerb>\w+) (?P<Query>.*)")
]
def _normalize(self, groupdict):
if 'Query' in groupdict:
return groupdict
elif 'Query1' and 'Query2' in groupdict:
return {
'QuestionWord': groupdict.get('QuestionWord'),
'QuestionVerb': groupdict.get('QuestionVerb'),
'Query': ' '.join([groupdict.get('Query1'), groupdict.get(
'Query2')])
}
def parse(self, utterance):
for regex in self.regexes:
match = regex.match(utterance)
if match:
return self._normalize(match.groupdict())
return None
class LILACSQuestionParser():
def __init__(self, host="http://spotlight.sztaki.hu:2222/rest/annotate"):
# 222 2en 8pt 5fr
self.parser = EnglishQuestionParser()
self.host = host
def process_entitys(self, text):
subjects, parents, synonims = self.tag_from_dbpedia(text)
center = 666
center_node = ""
for node in subjects:
if subjects[node] < center:
center = subjects[node]
center_node = node
target = 666
#TODO better select target mechanism
target_node = ""
for node in subjects:
if subjects[node] < target and node != center_node:
target = subjects[node]
target_node = node
parse = self.poor_parse(text)
try:
question = parse["QuestionWord"]
except:
question = "unknown"
middle = [node for node in subjects if node != center_node and node != target_node]
return center_node, target_node, parents, synonims, middle, question
def poor_parse(self, text):
return self.parser.parse(text)
def tag_from_dbpedia(self, text):
text = text.lower()
subjects = {}
parents = {}
synonims = {}
try:
annotations = spotlight.annotate(self.host, text, spotter='Default')
for annotation in annotations:
# how sure we are this is about this dbpedia entry
score = annotation["similarityScore"]
# entry we are talking about
subject = annotation["surfaceForm"].lower()
# smaller is closer to be main topic of sentence
offset = annotation["offset"]
# TODO tweak this value and make configuable
if float(score) < 0.4:
continue
subjects.setdefault(subject, offset)
# categorie of this <- linked nodes <- parsing for dbpedia search
if annotation["types"]:
p = []
types = annotation["types"].split(",")
for type in types:
type = type.replace("DBpedia:", "").replace("Schema:", "").replace("Http://xmlns.com/foaf/0.1/", "").lower()
if type not in p:
p.append(type)
parents.setdefault(subject, p)
# dbpedia link
url = annotation["URI"]
#print "link: " + url
dbpedia_name = url.replace("http://dbpedia.org/resource/", "").replace("_", " ")
if dbpedia_name.lower() not in subject:
synonims.setdefault(subject, dbpedia_name.lower())
except ConnectionError as e:
# TODO use logger
print e
except HTTPError as e:
print e
return subjects, parents, synonims
def test_qp():
parser = LILACSQuestionParser()
questions = ["how to kill animals ( a cow ) and make meat", "what is a living being", "why are humans living beings", "give examples of animals"]
for text in questions:
center_node, target_node, parents, synonims, midle, question = parser.process_entitys(text)
print "\nQuestion: " + text
print "question_type: " + question
print "center_node: " + center_node
print "target_node: " + target_node
print "parents: " + str(parents)
print "relevant_nodes: " + str(midle)
print "synonims: " + str(synonims)
|
mverzett/rootpy
|
rootpy/extern/hep/pdg.py
|
Python
|
gpl-3.0
| 31,391 | 0.00051 |
#
# $Id: PDG.py,v 1.5 2009-01-26 03:05:43 ssnyder Exp $
# File: PDG.py
# Created: sss, Mar 2005
# Purpose: Define PDG ID codes.
#
"""
This module contains names for the various PDG particle
|
ID codes.
The names are the same as in EventKernel/PdtPdg.h.
This module also contains a dictionary
|
pdgid_names mapping ID codes
back to printable strings, and a function pdgid_to_name to do this
conversion. Similarly, root_names and pdgid_to_root_name translate to
strings with root markup.
"""
from __future__ import absolute_import
from ROOT import TDatabasePDG
from pkg_resources import resource_filename
import os
db = TDatabasePDG()
db.ReadPDGTable(resource_filename('rootpy', 'etc/pdg_table.txt'))
def GetParticle(id):
return db.GetParticle(id)
# Table to translate from PDG IDs to printable strings.
pdgid_names = {}
# Table to translate from PDG IDs to strings with root markup.
root_names = {}
def id_to_name(id):
"""
Convert a PDG ID to a printable string.
"""
name = pdgid_names.get(id)
if not name:
name = repr(id)
return name
def id_to_root_name(id):
"""
Convert a PDG ID to a string with root markup.
"""
name = root_names.get(id)
if not name:
name = repr(id)
return name
#
# Table of PDG IDs, associating the ID codes with up to several names.
# This is formatted as one big string to make it easier to maintain
# (don't need to quote everything individually).
# The format of each line is like this:
#
# mname = id pname rname
#
# An attribute mname will be added to this module with a value of id.
# These names are intended to match those in PdgPdt.h.
# pname is a printable name for the entry, and rname is a name
# with root-style markup. These names will be put into the pdgid_names
# and root_names dictionaries, respectively. They can be left as `!'
# if no name is available. pname and rname should not contain spaces.
# Blank lines or those starting with `#' will be ignored.
#
_pdgtable = \
"""
d = 1 D d
anti_d = -1 DBAR #bar{d}
u = 2 U u
anti_u = -2 UBAR #bar{u}
s = 3 S s
anti_s = -3 SBAR #bar{s}
c = 4 C c
anti_c = -4 CBAR #bar{c}
b = 5 B b
anti_b = -5 BBAR #bar{b}
t = 6 T t
anti_t = -6 TBAR #bar{t}
l = 7 LPRIME !
anti_l = -7 LPRIMEBAR !
h = 8 ! !
anti_h = -8 ! !
g = 21 GLUE g
e_minus = 11 E- e^{-}
e_plus = -11 E+ e^{+}
nu_e = 12 NUE #nu_{e}
anti_nu_e = -12 ANUE #bar{#nu}_{e}
mu_minus = 13 MU- #mu^{-}
mu_plus = -13 MU+ #mu^{+}
nu_mu = 14 NUM #nu_{#mu}
anti_nu_mu = -14 ANUM #bar{#nu}_{#mu}
tau_minus = 15 TAU- #tau^{-}
tau_plus = -15 TAU+ #tau^{+}
nu_tau = 16 NUT #nu_{#tau}
anti_nu_tau = -16 ANUT #bar{nu}_{#tau}
L_minus = 17 ! !
L_plus = -17 ! !
nu_L = 18 ! !
anti_nu_L = -18 ! !
gamma = 22 PHOT #gamma
Z0 = 23 Z0 Z
W_plus = 24 W+ W^{+}
W_minus = -24 W- W^{-}
Higgs0 = 25 H0 h^{0}
reggeon = 28 ! !
pomeron = 29 ! !
Z_prime0 = 32 ! !
Z_prime_prime0 = 33 ! !
W_prime_plus = 34 ! !
W_prime_minus = -34 ! !
Higgs_prime0 = 35 ! !
A0 = 36 ! !
Higgs_plus = 37 ! !
Higgs_minus = -37 ! !
R0 = 40 ! !
anti_R0 = -40 ! !
specflav = 81 ! !
rndmflav = 82 ! !
anti_rndmflav = -82 ! !
phasespa = 83 ! !
c_minushadron = 84 ! !
anti_c_minushadron = -84 ! !
b_minushadron = 85 ! !
anti_b_minushadron = -85 ! !
t_minushadron = 86 ! !
anti_t_minushadron = -86 ! !
Wvirt_plus = 89 ! !
Wvirt_minus = -89 ! !
diquark = 90 ! !
anti_diquark = -90 ! !
cluster = 91 CLUSTER cluster
string = 92 ! !
indep = 93 ! !
CMshower = 94 ! !
SPHEaxis = 95 ! !
THRUaxis = 96 ! !
CLUSjet = 97 ! !
CELLjet = 98 ! !
table = 99 ! !
pi0 = 111 PI0 #pi^{0}
pi_plus = 211 PI+ #pi^{+}
pi_minus = -211 PI- #pi^{-}
pi_diffr_plus = 210 ! !
pi_diffr_minus = -210 ! !
pi_2S0 = 20111 ! !
pi_2S_plus = 20211 ! !
pi_2S_minus = -20211 ! !
eta = 221 ETA #eta
eta_2S = 20221 ! !
eta_prime = 331 ! !
rho0 = 113 ! #rho^{0}
rho_plus = 213 RHO+ #rho^{+}
rho_minus = -213 RHO- #rho^{-}
rho_2S0 = 30113 ! !
rho_2S_plus = 30213 ! !
rho_2S_minus = -30213 ! !
rho_3S0 = 40113 ! !
rho_3S_plus = 40213 ! !
rho_3S_minus = -40213 ! !
omega = 223 ! !
omega_2S = 30223 ! !
phi = 333 PHI #phi
a_00 = 10111 ! !
a_0_plus = 10211 ! !
a_0_minus = -10211 ! !
f_0 = 10221 ! !
f_prime_0 = 10331 ! !
b_10 = 10113 ! !
b_1_plus = 10213 ! !
b_1_minus = -10213 ! !
h_1 = 10223 h_1 h_{1}
h_prime_1 = 10333 ! !
a_10 = 20113 ! !
a_1_plus = 20213 ! !
a_1_minus = -20213 ! !
f_1 = 20223 ! !
f_prime_1 = 20333 ! !
a_20 = 115 ! !
a_2_plus = 215
|
theosotr/netxanal
|
mvc/controller/visualization.py
|
Python
|
apache-2.0
| 21,545 | 0.001439 |
"""
This module contains classes for data and graph visualization.
For that purpose, there are classes for the creation of simple graph's images,
images with path (critical, shortest, strongest) depiction and giving to a user
the chance to customize images and the way nodes and edges are depicted.
Apart from this, there are also classes for the creation of frequency diagrams
(for many measures of nodes such as closeness centrality, clustering coefficient),
and diagrams of average degree and average shortest path length evolution through
the time.
"""
__author__ = 'Thodoris Sotiropoulos'
from mvc.controller.analysis import Community
from mvc.controller.analysis import Path
import matplotlib
matplotlib.use('AGG')
import StringIO
import pylab as plt
import networkx as nx
import copy
import math
from random import random
class GraphImage:
"""
This class represents an image of graph and how graph's nodes and edges
are depicted.
For example, nodes of graph are depicted with red and edges are depicted
with black.
"""
def __init__(self, image_style, graphfile):
"""
Initialize image of graph according to what should be depicted.
For example style of image is defined such as size of nodes, edge color,
node shape, node color, edge width, edge style.
Moreover, an encoded string of image based on base64 encoding is created,
without any depiction of any path, community, etc.
:param image_style Style of image.
:param graphfile Graph object which is going to depicted.
"""
self.url = None
self.communities = None
self.communities_image = False
self.communities_color = {}
self.level = 1
self.path_image = False
self.paths = None
self.ranking = None
self.ranking_image = False
self.graph = graphfile.graph
self.image_style = image_style
self.simple_image()
def get_node_pos(self):
"""
Gets layout of graph's nodes.
|
:return Position of nodes.
"""
pos = nx.get_node_attributes(self.graph.graph, 'position')
return pos
def draw_edge_weights(self, pos):
"""
Draws edge weights.
For undirected graphs, weight label is positioned at the center of edge.
For directed
|
graphs, weight label is positioned at the side of target node.
For example, is there is an edge between nodes A and B as the following
A --> B with weight C, label C is going to be depicted at the side of node
B.
:param pos Position of nodes.
"""
if self.graph.graphtype == 'Undirected':
return self.draw_edge_weights_undirected(pos)
edge_list = []
for u, v in self.graph.graph.edges():
edge_labels = {}
e1 = (u, v)
edge_labels[tuple(e1)] = self.graph.graph.edge[u][v]['weight']
if edge_list.count(str(u + v)) == 0 and self.graph.graphtype == 'Directed':
nx.draw_networkx_edge_labels(self.graph, pos, edge_labels=edge_labels,
font_size=9, label_pos=0.2)
if self.graph.graph.has_edge(v, u):
edge_lab = {}
e2 = (v, u)
edge_list.append(str(v + u))
edge_lab[tuple(e2)] = self.graph.graph.edge[v][u]['weight']
nx.draw_networkx_edge_labels(self.graph, pos, edge_labels=edge_lab,
font_size=9, label_pos=0.2)
def draw_edge_weights_undirected(self, pos):
"""
Draws edge weights.
For undirected graphs, weight label is positioned at the center of edge.
:param pos Position of nodes.
"""
edge_labels = {}
for u, v in self.graph.graph.edges():
e = (u, v)
edge_labels[tuple(e)] = self.graph.graph.edge[u][v]['weight']
nx.draw_networkx_edge_labels(self.graph, pos, edge_labels=edge_labels,
font_size=9)
def create_image_url(self):
"""
Creates an encoded string of PNG image of graph based on base64 encoding.
"""
plt.axis("off")
try:
rv = StringIO.StringIO()
plt.savefig(rv, format="png")
self.url = "data:image/png;base64,%s" % rv.getvalue().encode("base64").strip()
finally:
plt.clf()
plt.close()
def simple_image(self):
"""
Creates a simple image of graph visualization without any depiction of
path between two nodes, or communities, or defining size and color of
nodes according to their values in a measure (closeness centrality,
clustering coefficient, etc.)
"""
pos = self.get_node_pos()
self.draw_nodes(pos)
self.draw_edges(pos)
self.create_image_url()
def draw_nodes(self, pos):
"""
Draws nodes of graphs according to their style.
Node style is defined by the node size, node color, node shape.
:param pos Position of nodes.
"""
nodes = self.graph.graph.nodes()
nx.draw_networkx_nodes(self.graph.graph, pos, nodelist=nodes,
node_size=self.image_style.node_size,
node_color=self.image_style.node_color,
node_shape=self.image_style.node_shape)
def create_path(self, path=None):
"""
Creates an image of graph with a depiction of path between two nodes.
Path can be the critical, shortest, strongest path between these two nodes.
:param path True if image depicts a path between two nodes, false
otherwise.
"""
self.path_image = True
self.communities_image = False
self.ranking_image = False
if path is not None:
self.paths = path
pos = self.get_node_pos()
self.draw_path_nodes(pos)
self.draw_path_edges(pos)
self.create_image_url()
def draw_path_nodes(self, pos):
"""
Draws nodes in an image which depicts a path between two nodes.
Nodes which are included in this path, are depicted with crimson
color and size bigger than the size of nodes which are not included
in this path.
:param pos Position of nodes.
"""
for path in self.paths.path_sequence:
nx.draw_networkx_nodes(self.graph.graph, pos, nodelist=path,
node_size=self.image_style.node_size + 100,
node_color='crimson',
node_shape=self.image_style.node_shape)
rest_nodes = Path.get_nodes_which_are_not_in_path(self.graph.graph,
self.paths.path_sequence)
nx.draw_networkx_nodes(self.graph.graph, pos, nodelist=rest_nodes,
node_size=self.image_style.node_size,
node_color=self.image_style.node_color,
node_shape=self.image_style.node_shape)
def draw_path_edges(self, pos):
"""
Draws edges in an image which depicts a path between two nodes.
Edges which are included in this path, are depicted with black
color, width bigger than the size of nodes which are not inlcuded
in this path and with a dashed line.
:param pos Position of nodes.
"""
all_vertices = []
for path in self.paths.path_sequence:
path_vertices = Path.get_path_edges(path)
all_vertices.append(path_vertices)
nx.draw_networkx_edges(self.graph.graph, pos, edgelist=path_vertices,
width=self.image_style.edge_width + 1,
edge_color="black", style="dashed")
rest_edges = Path.get_edges_which_are_not_in_paths(self.graph.graph,
al
|
TeamODrKnow/doctor-know
|
haigha/frames/frame.py
|
Python
|
mit
| 4,107 | 0.000243 |
'''
Copyright (c) 2011-2014, Agora Games, LLC All rights reserved.
https://github.com/agoragames/haigha/blob/master/LICENSE.txt
'''
import struct
import sys
from collections import deque
from haigha.reader import Reader
class Frame(object):
'''
Base class for a frame.
'''
# Exceptions
class FrameError(Exception):
'''Base class for all frame errors'''
class FormatError(FrameError):
'''The frame was mal-formed.'''
class InvalidFrameType(FrameError):
'''The frame type is unknown.'''
# Class data
_frame_type_map = {}
# Class methods
@classmethod
def register(cls):
'''
Register a frame type.
'''
cls._frame_type_map[cls.type()] = cls
@classmethod
def type(self):
'''
Fetch the type of this frame. Should be an octet.
'''
raise NotImplementedError()
@classmethod
def read_frames(cls, reader):
'''
Read one or more frames from an IO stream. Buffer must support file
object interface.
After reading, caller will need to check if there are bytes remaining
in the stream. If there are, then that implies that there is one or
more incomplete frames and more data needs to be read. The position
of the cursor in the frame stream will mark the point at which the
last good frame was read. If the caller is expecting a sequence of
frames and only received a part of that sequence, they are responsible
for buffering those frames until the rest of the frames in the sequence
have arrived.
'''
rval = deque()
while True:
frame_start_pos = reader.tell()
try:
frame = Frame._read_frame(reader)
except Reader.BufferUnderflow:
# No more data in the stream
frame = None
except Reader.ReaderError as e:
# Some other format error
raise Frame.FormatError, str(e), sys.exc_info()[-1]
except struct.error as e:
raise Frame.FormatError, str(e), sys.exc_info()[-1]
if frame is None:
reader.seek(frame_start_pos)
break
rval.append(frame)
return rval
@classmethod
def _read_frame(cls, reader):
'''
Read a single frame from a Reader. Will return None if there is an
incomplete frame in the stream.
Rai
|
se MissingFooter if there's a problem reading the footer byte.
'''
frame_type = reader.read_octet()
channel_id = reader.read_short()
size = reader.read_long()
payload = Reader(reader, reader.tell(), size)
# Seek to end of paylo
|
ad
reader.seek(size, 1)
ch = reader.read_octet() # footer
if ch != 0xce:
raise Frame.FormatError(
'Framing error, unexpected byte: %x. frame type %x. channel %d, payload size %d',
ch, frame_type, channel_id, size)
frame_class = cls._frame_type_map.get(frame_type)
if not frame_class:
raise Frame.InvalidFrameType("Unknown frame type %x", frame_type)
return frame_class.parse(channel_id, payload)
# Instance methods
def __init__(self, channel_id=-1):
self._channel_id = channel_id
@classmethod
def parse(cls, channel_id, payload):
'''
Subclasses need to implement parsing of their frames. Should return
a new instance of their type.
'''
raise NotImplementedError()
@property
def channel_id(self):
return self._channel_id
def __str__(self):
return "%s[channel: %d]" % (self.__class__.__name__, self.channel_id)
def __repr__(self):
# Have to actually call the method rather than __repr__==__str__
# because subclasses overload __str__
return str(self)
def write_frame(self, stream):
'''
Write this frame.
'''
raise NotImplementedError()
|
pretsell/PokemonGo-Map
|
pogom/app.py
|
Python
|
agpl-3.0
| 19,025 | 0.002628 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
import calendar
import logging
from flask import Flask, abort, jsonify, render_template, request
from flask.json import JSONEncoder
from flask_compress import Compress
from datetime import datetime
from s2sphere import LatLng
from pogom.utils import get_args
from datetime import timedelta
from collections import OrderedDict
from . import config
from .models import Pokemon, Gym, Pokestop, ScannedLocation, MainWorker, WorkerStatus
from .utils import now
log = logging.getLogger(__name__)
compress = Compress()
class Pogom(Flask):
def __init__(self, import_name, **kwargs):
super(Pogom, self).__init__(import_name, **kwargs)
compress.init_app(self)
self.json_encoder = CustomJSONEncoder
self.route("/", methods=['GET'])(self.fullmap)
self.route("/raw_data", methods=['GET'])(self.raw_data)
self.route("/loc", methods=['GET'])(self.loc)
self.route("/next_loc", methods=['POST'])(self.next_loc)
self.route("/mobile", methods=['GET'])(self.list_pokemon)
self.route("/search_control", methods=['GET'])(self.get_search_control)
self.route("/search_control", methods=['POST'])(self.post_search_control)
self.route("/stats", methods=['GET'])(self.get_stats)
self.route("/status", methods=['GET'])(self.get_status)
self.route("/status", methods=['POST'])(self.post_status)
self.route("/gym_data", methods=['GET'])(self.get_gymdata)
def set_search
|
_control(self, control):
self.search_control = control
def set_heartbeat_control(self, heartb):
self.heartbeat = heartb
def set_location_queue(self, queue):
self.location_queue = queue
def set_current
|
_location(self, location):
self.current_location = location
def get_search_control(self):
return jsonify({'status': not self.search_control.is_set()})
def post_search_control(self):
args = get_args()
if not args.search_control or args.on_demand_timeout > 0:
return 'Search control is disabled', 403
action = request.args.get('action', 'none')
if action == 'on':
self.search_control.clear()
log.info('Search thread resumed')
elif action == 'off':
self.search_control.set()
log.info('Search thread paused')
else:
return jsonify({'message': 'invalid use of api'})
return self.get_search_control()
def fullmap(self):
self.heartbeat[0] = now()
args = get_args()
if args.on_demand_timeout > 0:
self.search_control.clear()
fixed_display = "none" if args.fixed_location else "inline"
search_display = "inline" if args.search_control and args.on_demand_timeout <= 0 else "none"
scan_display = "none" if (args.only_server or args.fixed_location or args.spawnpoint_scanning) else "inline"
map_lat = self.current_location[0]
map_lng = self.current_location[1]
if request.args:
map_lat = request.args.get('lat') or self.current_location[0]
map_lng = request.args.get('lon') or self.current_location[1]
return render_template('map.html',
lat=map_lat,
lng=map_lng,
gmaps_key=config['GMAPS_KEY'],
lang=config['LOCALE'],
is_fixed=fixed_display,
search_control=search_display,
show_scan=scan_display
)
def raw_data(self):
self.heartbeat[0] = now()
args = get_args()
if args.on_demand_timeout > 0:
self.search_control.clear()
d = {}
# Request time of this request.
d['timestamp'] = datetime.utcnow()
# Request time of previous request.
if request.args.get('timestamp'):
timestamp = int(request.args.get('timestamp'))
timestamp -= 1000 # Overlap, for rounding errors.
else:
timestamp = 0
swLat = request.args.get('swLat')
swLng = request.args.get('swLng')
neLat = request.args.get('neLat')
neLng = request.args.get('neLng')
oSwLat = request.args.get('oSwLat')
oSwLng = request.args.get('oSwLng')
oNeLat = request.args.get('oNeLat')
oNeLng = request.args.get('oNeLng')
# Previous switch settings.
lastgyms = request.args.get('lastgyms')
lastpokestops = request.args.get('lastpokestops')
lastpokemon = request.args.get('lastpokemon')
lastslocs = request.args.get('lastslocs')
lastspawns = request.args.get('lastspawns')
if request.args.get('luredonly', 'true') == 'true':
luredonly = True
else:
luredonly = False
# Current switch settings saved for next request.
if request.args.get('gyms', 'true') == 'true':
d['lastgyms'] = request.args.get('gyms', 'true')
if request.args.get('pokestops', 'true') == 'true':
d['lastpokestops'] = request.args.get('pokestops', 'true')
if request.args.get('pokemon', 'true') == 'true':
d['lastpokemon'] = request.args.get('pokemon', 'true')
if request.args.get('scanned', 'true') == 'true':
d['lastslocs'] = request.args.get('scanned', 'true')
if request.args.get('spawnpoints', 'false') == 'true':
d['lastspawns'] = request.args.get('spawnpoints', 'false')
# If old coords are not equal to current coords we have moved/zoomed!
if oSwLng < swLng and oSwLat < swLat and oNeLat > neLat and oNeLng > neLng:
newArea = False # We zoomed in no new area uncovered.
elif not (oSwLat == swLat and oSwLng == swLng and oNeLat == neLat and oNeLng == neLng):
newArea = True
else:
newArea = False
# Pass current coords as old coords.
d['oSwLat'] = swLat
d['oSwLng'] = swLng
d['oNeLat'] = neLat
d['oNeLng'] = neLng
if request.args.get('pokemon', 'true') == 'true':
if request.args.get('ids'):
ids = [int(x) for x in request.args.get('ids').split(',')]
d['pokemons'] = Pokemon.get_active_by_id(ids, swLat, swLng,
neLat, neLng)
elif lastpokemon != 'true':
# If this is first request since switch on, load all pokemon on screen.
d['pokemons'] = Pokemon.get_active(swLat, swLng, neLat, neLng)
else:
# If map is already populated only request modified Pokemon since last request time.
d['pokemons'] = Pokemon.get_active(swLat, swLng, neLat, neLng, timestamp=timestamp)
if newArea:
# If screen is moved add newly uncovered Pokemon to the ones that were modified since last request time.
d['pokemons'] = d['pokemons'] + (Pokemon.get_active(swLat, swLng, neLat, neLng, oSwLat=oSwLat, oSwLng=oSwLng, oNeLat=oNeLat, oNeLng=oNeLng))
if request.args.get('eids'):
# Exclude id's of pokemon that are hidden.
eids = [int(x) for x in request.args.get('eids').split(',')]
d['pokemons'] = [x for x in d['pokemons'] if x['pokemon_id'] not in eids]
if request.args.get('reids'):
reids = [int(x) for x in request.args.get('reids').split(',')]
d['pokemons'] = d['pokemons'] + (Pokemon.get_active_by_id(reids, swLat, swLng, neLat, neLng))
d['reids'] = reids
if request.args.get('pokestops', 'true') == 'true':
if lastpokestops != 'true':
d['pokestops'] = Pokestop.get_stops(swLat, swLng, neLat, neLng, lured=luredonly)
else:
d['pokestops'] = Pokestop.get_stops(swLat, swLng, neLat, neLng, timestamp=timestamp)
if newArea:
d['pokestops'] = d['pokestop
|
OscaRoa/api-cats
|
cats/urls.py
|
Python
|
mit
| 475 | 0 |
from django.conf.urls import url
from cats.views.cat import (
CatList,
CatDetail
)
from cats.views.breed import (
BreedList,
Bree
|
dDetail
)
urlpatterns = [
# Cats URL's
url(r'^cats/$', CatList.as_view(), name='list'),
url(r'^cats/(?P<pk>\d+)/$', CatDetail.as_view(), name='detail'),
|
# Breeds URL's
url(r'^breeds/$', BreedList.as_view(), name='list_breeds'),
url(r'^breeds/(?P<pk>\d+)/$', BreedDetail.as_view(), name='detail_breed'),
]
|
adobe-flash/avmplus
|
build/buildbot/slaves/android/scripts/shell-client-android.py
|
Python
|
mpl-2.0
| 1,874 | 0.026147 |
#!/usr/bin/env python
# -*- python -*-
# ex: set syntax=python:
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import sys,socket,os,time,re
port=None
host=None
if os.environ.has_key("SHELLPORT"):
try:
port=int(os.environ.get("SHELLPORT"))
except:
print("error: parsing SHELLPORT")
if os.environ.has_key("SHELLSERVER"):
host=os.environ.get("SHELLSERVER")
if len(sys.argv)>1 and re.search('^--sh
|
ellserver=',sys.argv[1]):
shellserver=sys.argv[1][14:]
if shellserver.find(':')>-1:
host
|
=shellserver[0:shellserver.find(':')]
try:
port=int(shellserver[shellserver.find(':')+1:])
except:
True
sys.argv=sys.argv[1:]
if (host==None or port==None):
print("error: SHELLPORT and SHELLSERVER must be set")
sys.exit(1)
args=""
for item in sys.argv[1:]:
args+=item+" "
s=socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((host,port))
s.send("abc_android %s" % args)
result=''
timeout=300
starttime=time.time()
while True:
newdata=s.recv(1024)
# print("data: "+newdata)
result+=newdata
if re.search("EXITCODE=[0-9]+\s",result):
break
if result.find("-- application argument")>-1:
break
if result.find("\n$")>-1:
break
if time.time()-starttime>timeout:
print("error: timeout detected")
break
s.close()
if re.search("EXITCODE=[0-9]+\s",result):
exitcode=result[result.find("EXITCODE")+9:]
if exitcode.find("$")>-1:
exitcode=exitcode[0:exitcode.find("$")]
try:
exitcode=int(exitcode.strip())
except:
True
result=result[0:result.find("EXITCODE")]
else:
exitcode=0
print(result)
sys.exit(exitcode)
|
samueljackson92/major-project
|
src/tests/regression_tests/intensity_regression_test.py
|
Python
|
mit
| 780 | 0 |
import unittest
import pandas as pd
import nose.tools
from mia.features.blobs import detect_blobs
from mia.features.intensity import detect_intensity
from mia.utils import preprocess_image
from ..test_utils import get_file_path
class IntensityTests(unittest.TestCase):
@classmethod
def setupClass(cls):
img_path = get_file_path("mias/mdb154.png")
msk_path = get_file_path("mias/masks/mdb154_mask.png")
cls._img, cls._msk = preprocess_image(img_path, msk_path)
# def test_detect_intensity(self):
# blobs = detect_blobs(self._img, self._msk)
# intensity = detect_intensity(self._im
|
g, blobs)
#
# nose.tools.assert_true(isinstance(intensity, pd.DataFrame))
# nose.tools.assert_equal(intensi
|
ty.shape[1], 10)
|
idea4bsd/idea4bsd
|
python/testData/psi/NotClosedBraceSet.py
|
Python
|
apache-2.0
| 10 | 0.2 |
a =
|
{'b',
|
]
|
xc0ut/PokeAlarm
|
PokeAlarm/Utils.py
|
Python
|
agpl-3.0
| 11,971 | 0.00259 |
# Standard Library Imports
import configargparse
from datetime import datetime, timedelta
from glob import glob
import json
import logging
from math import radians, sin, cos, atan2, sqrt, degrees
import os
import sys
import re
# 3rd Party Imports
# Local Imports
from . import config
log = logging.getLogger('Utils')
################################################### SYSTEM UTILITIES ###################################################
# Checks is a line contains any subsititions located in args
def contains_arg(line, args):
for word in args:
if ('<' + word + '>') in line:
return True
return False
def get_path(path):
if not os.path.isabs(path): # If not absolute path
path = os.path.join(config['ROOT_PATH'], path)
return path
def parse_boolean(val):
b = str(val).lower()
if b in {'t', 'true', 'y', 'yes'}:
return True
if b in ('f', 'false', 'n', 'no'):
return False
return None
def parse_unicode(bytestring):
decoded_string = bytestring.decode(sys.getfilesystemencoding())
return decoded_string
# Used for lazy installs - installs required
|
module with pip
def pip_install(module, version):
import subprocess
target = "{}=={}".format(module, version)
log.info("Attempting to pip install %s..." % target)
subprocess.call(['pip', 'install', target])
log.info("%s install com
|
plete." % target)
# Used to exit when leftover parameters are founds
def reject_leftover_parameters(dict_, location):
if len(dict_) > 0:
log.error("Unknown parameters at {}: ".format(location))
log.error(dict_.keys())
log.error("Please consult the PokeAlarm documentation for accepted parameters.")
sys.exit(1)
# Load a key from the given dict, or throw an error if it isn't there
def require_and_remove_key(key, _dict, location):
if key in _dict:
return _dict.pop(key)
else:
log.error("The parameter '{}' is required for {}".format(key, location)
+ " Please check the PokeAlarm documentation for correct formatting.")
sys.exit(1)
########################################################################################################################
################################################## POKEMON UTILITIES ###################################################
# Returns the id corresponding with the pokemon name (use all locales for flexibility)
def get_pkmn_id(pokemon_name):
name = pokemon_name.lower()
if not hasattr(get_pkmn_id, 'ids'):
get_pkmn_id.ids = {}
files = glob(get_path('locales/*/pokemon.json'))
for file_ in files:
with open(file_, 'r') as f:
j = json.loads(f.read())
for id_ in j:
nm = j[id_].lower()
get_pkmn_id.ids[nm] = int(id_)
return get_pkmn_id.ids.get(name)
# Returns the id corresponding with the move (use all locales for flexibility)
def get_move_id(move_name):
name = move_name.lower()
if not hasattr(get_move_id, 'ids'):
get_move_id.ids = {}
files = glob(get_path('locales/*/moves.json'))
for file_ in files:
with open(file_, 'r') as f:
j = json.loads(f.read())
for id_ in j:
nm = j[id_].lower()
get_move_id.ids[nm] = int(id_)
return get_move_id.ids.get(name)
# Returns the id corresponding with the pokemon name (use all locales for flexibility)
def get_team_id(team_name):
name = team_name.lower()
if not hasattr(get_team_id, 'ids'):
get_team_id.ids = {}
files = glob(get_path('locales/*/teams.json'))
for file_ in files:
with open(file_, 'r') as f:
j = json.loads(f.read())
for id_ in j:
nm = j[id_].lower()
get_team_id.ids[nm] = int(id_)
return get_team_id.ids.get(name)
# Returns the damage of a move when requesting
def get_move_damage(move_id):
if not hasattr(get_move_damage, 'info'):
get_move_damage.info = {}
file_ = get_path('locales/move_info.json')
with open(file_, 'r') as f:
j = json.loads(f.read())
for id_ in j:
get_move_damage.info[int(id_)] = j[id_]['damage']
return get_move_damage.info.get(move_id, 'unkn')
# Returns the dps of a move when requesting
def get_move_dps(move_id):
if not hasattr(get_move_dps, 'info'):
get_move_dps.info = {}
file_ = get_path('locales/move_info.json')
with open(file_, 'r') as f:
j = json.loads(f.read())
for id_ in j:
get_move_dps.info[int(id_)] = j[id_]['dps']
return get_move_dps.info.get(move_id, 'unkn')
# Returns the duration of a move when requesting
def get_move_duration(move_id):
if not hasattr(get_move_duration, 'info'):
get_move_duration.info = {}
file_ = get_path('locales/move_info.json')
with open(file_, 'r') as f:
j = json.loads(f.read())
for id_ in j:
get_move_duration.info[int(id_)] = j[id_]['duration']
return get_move_duration.info.get(move_id, 'unkn')
# Returns the duation of a move when requesting
def get_move_energy(move_id):
if not hasattr(get_move_energy, 'info'):
get_move_energy.info = {}
file_ = get_path('locales/move_info.json')
with open(file_, 'r') as f:
j = json.loads(f.read())
for id_ in j:
get_move_energy.info[int(id_)] = j[id_]['energy']
return get_move_energy.info.get(move_id, 'unkn')
# Returns the base height for a pokemon
def get_base_height(pokemon_id):
if not hasattr(get_base_height, 'info'):
get_base_height.info = {}
file_ = get_path('locales/base_stats.json')
with open(file_, 'r') as f:
j = json.loads(f.read())
for id_ in j:
get_base_height.info[int(id_)] = j[id_].get('height')
return get_base_height.info.get(pokemon_id)
# Returns the base weight for a pokemon
def get_base_weight(pokemon_id):
if not hasattr(get_base_weight, 'info'):
get_base_weight.info = {}
file_ = get_path('locales/base_stats.json')
with open(file_, 'r') as f:
j = json.loads(f.read())
for id_ in j:
get_base_weight.info[int(id_)] = j[id_].get('weight')
return get_base_weight.info.get(pokemon_id)
# Returns the size ratio of a pokemon
def size_ratio(pokemon_id, height, weight):
height_ratio = height / get_base_height(pokemon_id)
weight_ratio = weight / get_base_weight(pokemon_id)
return height_ratio + weight_ratio
# Returns the (appraisal) size of a pokemon:
def get_pokemon_size(pokemon_id, height, weight):
size = size_ratio(pokemon_id, height, weight)
if size < 1.5:
return 'tiny'
elif size <= 1.75:
return 'small'
elif size < 2.25:
return 'normal'
elif size <= 2.5:
return 'large'
else:
return 'big'
# Returns the gender symbol of a pokemon:
def get_pokemon_gender(gender):
if gender == 1:
return u'\u2642' # male symbol
elif gender == 2:
return u'\u2640' # female symbol
elif gender == 3:
return u'\u26b2' #neutral
return '?' # catch all
########################################################################################################################
################################################# GMAPS API UTILITIES ##################################################
# Returns a String link to Google Maps Pin at the location
def get_gmaps_link(lat, lng):
latlng = '{},{}'.format(repr(lat), repr(lng))
return 'http://maps.google.com/maps?q={}'.format(latlng)
#Returns a String link to Apple Maps Pin at the location
def get_applemaps_link(lat, lng):
latLon = '{},{}'.format(repr(lat), repr(lng))
return 'http://maps.apple.com/maps?daddr={}&z=10&t=s&dirflg=w'.format(latLon)
# Returns a static map url with <lat> and <lng> parameters for dynamic test
def get_static_map_url(settings, api_key=None): # TODO: optimize formatting
if not pars
|
lycantropos/cetus
|
cetus/data_access/connectors.py
|
Python
|
mit
| 5,800 | 0 |
from asyncio import AbstractEventLoop
import aiomysql.sa
import asyncpg
from asyncio_extras import async_contextmanager
from cetus.types import (ConnectionType,
MySQLConnectionType,
PostgresConnectionType)
from sqlalchemy.engine.url import URL
DEFAULT_MYSQL_PORT = 3306
DEFAULT_POSTGRES_PORT = 5432
DEFAULT_MIN_CONNECTIONS_LIMIT = 10
DEFAULT_CONNECTION_TIMEOUT = 60
@async_contextmanager
async def get_connection_pool(
*, db_uri: URL,
is_mysql: bool,
timeout: float = DEFAULT_CONNECTION_TIMEOUT,
min_size: int = DEFAULT_MIN_CONNECTIONS_LIMIT,
max_size: int,
loop: AbstractEventLoop):
if is_mysql:
async with get_mysql_connection_pool(
db_uri,
timeout=timeout,
min_size=min_size,
max_size=max_size,
loop=loop) as connection_pool:
yield connection_pool
else:
async with get_postgres_connection_pool(
db_uri,
timeout=timeout,
min_size=min_size,
max_size=max_size,
loop=loop) as connection_pool:
yield connection_pool
@async_contextmanager
async def get_mysql_connection_pool(
db_uri: URL, *,
timeout: float = DEFAULT_CONNECTION_TIMEOUT,
min_size: int = DEFAULT_MIN_CONNECTIONS_LIMIT,
max_size: int,
loop: AbstractEventLoop):
# `None` port causes exceptions
port = db_uri.port or DEFAULT_MYSQL_PORT
# we use engine instead of plain connection pool
# because `aiomysql` has transactions API
# only for engine-based connections
async with aiomysql.sa.create_engine(
host=db_uri.host,
port=port,
user=db_uri.username,
password=db_uri.password,
db=db_uri.database,
charset='utf8',
connect_timeout=timeout,
# TODO: check if `asyncpg` connections
# are autocommit by default
autocommit=True,
minsize=min_size,
maxsize=max_size,
loop=loop) as engine:
yield engine
@async_contextmanager
async def get_postgres_connection_pool(
db_uri: URL, *,
timeout: float = DEFAULT_CONNECTION_TIMEOUT,
min_size: int = DEFAULT_MIN_CONNECTIONS_LIMIT,
max_size: int,
loop: AbstractEventLoop):
# for symmetry with MySQL case
port = db_uri.port or DEFAULT_POSTGRES_PORT
async with asyncpg.create_pool(
host=db_uri.host,
port=port,
user=db_uri.username,
password=db_uri.password,
database=db_uri.database,
timeout=timeout,
min_size=min_size,
max_size=max_size,
loop=loop) as pool:
yield pool
@async_contextmanager
async def begin_transaction(
*, connection: ConnectionType,
is_mysql: bool):
if is_mysql:
async with begin_mysql_transaction(connection):
yield
else:
async with begin_postgres_transaction(connection):
yield
@async_contextmanager
async def begin_mysql_transaction(
connection: MySQLConnectionType):
transaction = connection.begin()
async with transaction:
yield
@async_contextmanager
async def begin_postgres_transaction(
connection: PostgresConnectionType,
*, isolation: str = 'read_committed',
read_only: bool = False,
deferrable: bool = False):
transaction = connection.transaction(
isolation=isolation,
readonly=read_only,
deferrable=deferrable)
async with transaction:
yield
@async_contextmanager
async def get_connection(
*, db_uri: URL,
is_mysql: bool,
timeout: float = DEFAULT_CONNECTION_TIMEOUT,
loop: AbstractEventLoop):
if is_mysql:
async with get_mysql_connection(
db_uri,
timeout=timeout,
loop=loop) as connection:
yield connection
else:
async with get_postgres_connection(
db_uri,
timeout=timeout,
loop=loop) as connection:
yield connection
@async_contextmanager
async def get_mysql_connection(
db_uri: URL, *,
time
|
out: float = DEFAULT_CONNECTION_TIMEOUT,
loop: AbstractEventLoop):
# `None` port causes exceptions
port = db_uri.port or DEFAULT_MYSQL_PORT
# we use engine-based connection
# instead of plain connection
# because `aiomysql` has transactions API
# only for engine-based connections
async with aiomysql.sa.create_engine(
host=db_uri.host,
port=port,
|
user=db_uri.username,
password=db_uri.password,
db=db_uri.database,
charset='utf8',
connect_timeout=timeout,
# TODO: check if `asyncpg` connections
# are autocommit by default
autocommit=True,
minsize=1,
maxsize=1,
loop=loop) as engine:
async with engine.acquire() as connection:
yield connection
@async_contextmanager
async def get_postgres_connection(
db_uri: URL, *,
timeout: float = DEFAULT_CONNECTION_TIMEOUT,
loop: AbstractEventLoop):
# for symmetry with MySQL case
port = db_uri.port or DEFAULT_POSTGRES_PORT
connection = await asyncpg.connect(
host=db_uri.host,
port=port,
user=db_uri.username,
password=db_uri.password,
database=db_uri.database,
timeout=timeout,
loop=loop)
try:
yield connection
finally:
await connection.close()
|
HybridF5/tempest_debug
|
tempest/api/orchestration/stacks/test_templates_negative.py
|
Python
|
apache-2.0
| 1,954 | 0 |
# Copyright 2014 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of t
|
he License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions a
|
nd limitations
# under the License.
from tempest.api.orchestration import base
from tempest.lib import exceptions as lib_exc
from tempest import test
class TemplateYAMLNegativeTestJSON(base.BaseOrchestrationTest):
template = """
HeatTemplateFormatVersion: '2012-12-12'
Description: |
Template which creates only a new user
Resources:
CfnUser:
Type: AWS::IAM::User
"""
invalid_template_url = 'http://www.example.com/template.yaml'
@classmethod
def resource_setup(cls):
super(TemplateYAMLNegativeTestJSON, cls).resource_setup()
cls.parameters = {}
@test.attr(type=['negative'])
@test.idempotent_id('5586cbca-ddc4-4152-9db8-fa1ce5fc1876')
def test_validate_template_url(self):
"""Validating template passing url to it."""
self.assertRaises(lib_exc.BadRequest,
self.client.validate_template_url,
template_url=self.invalid_template_url,
parameters=self.parameters)
class TemplateAWSNegativeTestJSON(TemplateYAMLNegativeTestJSON):
template = """
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "Template which creates only a new user",
"Resources" : {
"CfnUser" : {
"Type" : "AWS::IAM::User"
}
}
}
"""
invalid_template_url = 'http://www.example.com/template.template'
|
ruchee/vimrc
|
vimfiles/bundle/vim-python/submodules/pylint/tests/functional/t/too/too_many_branches.py
|
Python
|
mit
| 1,099 | 0.00273 |
""" Test for too many branches. """
# pylint: disable=using-constant-test
def wrong(): # [too-many-branches]
""" Has too many branches. """
if 1:
pass
elif 1:
pass
elif 1:
pass
elif 1:
pass
elif 1:
pass
elif 1:
pass
try:
pass
finally:
pass
if 2:
pass
while True:
pass
if 1:
pass
elif 2:
pass
elif 3:
pass
def good():
""" Too many branches only if we take
into consideration the nested functions.
"""
def nested_1():
""" empty """
if 1:
pass
elif 2:
pass
elif 3:
|
pass
elif 4:
pass
nested_1()
try:
|
pass
finally:
pass
try:
pass
finally:
pass
if 1:
pass
elif 2:
pass
elif 3:
pass
elif 4:
pass
elif 5:
pass
elif 6:
pass
elif 7:
pass
|
marscher/bhmm
|
bhmm/hidden/impl_c/__init__.py
|
Python
|
lgpl-3.0
| 59 | 0.016949 |
__author__ = 'noe'
from
|
bhmm.hidden.impl_c.hi
|
dden import *
|
lucadealfaro/crowdranker
|
controllers/feedback.py
|
Python
|
bsd-3-clause
| 10,966 | 0.005471 |
# -*- coding: utf-8 -*-
import access
import util
@auth.requires_login()
def index():
"""Produces a list of the feedback obtained for a given venue,
or for all venues."""
venue_id = request.args(0)
if venue_id == 'all':
q = (db.submission.user == get_user_email())
else:
q = ((db.submission.user == get_user_email())
& (db.submission.venue_id == venue_id))
db.submission.id.represent = lambda x, r: A(T('View'), _class='btn', _href=URL('submission', 'view_own_submission', args=['v', r.id]))
db.submission.id.label = T('Submission')
db.submission.id.readable = True
db.submission.venue_id.readable = True
grid = SQLFORM.grid(q,
fields=[db.submission.id, db.submission.venue_id,
db.submission.date_created, db.submission.date_updated, ],
csv=False, details=False, create=False, editable=False, deletable=False,
args=request.args[:1],
maxtextlength=24,
)
return dict(grid=grid)
@auth.requires_login()
def view_feedback():
"""Shows detailed feedback for a user in a venue.
This controller accepts various types of arguments:
* 's', submission_id
* 'u', venue_id, username
* 'v', venue_id (in which case, shows own submission to that venue)
|
"""
if len(request.args) == 0:
redirect(URL('default', 'index'))
if request.args(0) == 's':
# submission_id
n_args = 2
subm = db.submission(request.args(1)) or redirect(URL('default', 'index'))
c = db.venue(subm.venue_id) or redirect(URL('default', 'index'))
username = subm.user
elif request.args(0) == 'v':
# venue_
|
id
n_args = 2
c = db.venue(request.args(1)) or redirect(URL('default', 'index'))
username = get_user_email()
subm = db((db.submission.user == username) & (db.submission.venue_id == c.id)).select().first()
else:
# venue_id, username
n_args = 3
c = db.venue(request.args(1)) or redirect(URL('default', 'index'))
username = request.args(2) or redirect(URL('default', 'index'))
subm = db((db.submission.user == username) & (db.submission.venue_id == c.id)).select().first()
# Checks permissions.
props = db(db.user_properties.user == get_user_email()).select().first()
if props == None:
session.flash = T('Not authorized.')
redirect(URL('default', 'index'))
is_author = (username == get_user_email())
can_view_feedback = access.can_view_feedback(c, props) or is_author
if (not can_view_feedback):
session.flash = T('Not authorized.')
redirect(URL('default', 'index'))
if not (access.can_view_feedback(c, props) or datetime.utcnow() > c.rate_close_date):
session.flash = T('The ratings are not yet available.')
redirect(URL('feedback', 'index', args=['all']))
# Produces the link to edit the feedback.
edit_feedback_link = None
if subm is not None and access.can_observe(c, props):
edit_feedback_link = A(T('Edit feedback'), _class='btn',
_href=URL('submission', 'edit_feedback', args=[subm.id]))
# Produces the download link.
download_link = None
if subm is not None and c.allow_file_upload and subm.content is not None:
if is_author:
download_link = A(T('Download'), _class='btn',
_href=URL('submission', 'download_author', args=[subm.id, subm.content]))
else:
download_link = A(T('Download'), _class='btn',
_href=URL('submission', 'download_manager', args=[subm.id, subm.content]))
venue_link = A(c.name, _href=URL('venues', 'view_venue', args=[c.id]))
# Submission link.
subm_link = None
if subm is not None and c.allow_link_submission:
subm_link = A(subm.link, _href=subm.link)
# Submission content and feedback.
subm_comment = None
subm_feedback = None
if subm is not None:
raw_subm_comment = keystore_read(subm.comment)
if raw_subm_comment is not None and len(raw_subm_comment) > 0:
subm_comment = MARKMIN(keystore_read(subm.comment))
raw_feedback = keystore_read(subm.feedback)
if raw_feedback is not None and len(raw_feedback) > 0:
subm_feedback = MARKMIN(raw_feedback)
# Display settings.
db.submission.percentile.readable = True
db.submission.comment.readable = True
db.submission.feedback.readable = True
if access.can_observe(c, props):
db.submission.quality.readable = True
db.submission.error.readable = True
# Reads the grade information.
submission_grade = submission_percentile = None
review_grade = review_percentile = user_reputation = None
final_grade = final_percentile = None
assigned_grade = None
if c.grades_released:
grade_info = db((db.grades.user == username) & (db.grades.venue_id == c.id)).select().first()
if grade_info is not None:
submission_grade = represent_quality(grade_info.submission_grade, None)
submission_percentile = represent_percentage(grade_info.submission_percentile, None)
review_grade = represent_quality_10(grade_info.accuracy, None)
review_percentile = represent_percentage(grade_info.accuracy_percentile, None)
user_reputation = represent_01_as_percentage(grade_info.reputation, None)
final_grade = represent_quality(grade_info.grade, None)
final_percentile = represent_percentage(grade_info.percentile, None)
assigned_grade = represent_quality(grade_info.assigned_grade, None)
# Makes a grid of comments.
db.task.submission_name.readable = False
db.task.assigned_date.readable = False
db.task.completed_date.readable = False
db.task.rejected.readable = True
db.task.helpfulness.readable = db.task.helpfulness.writable = True
# Prevent editing the comments; the only thing editable should be the "is bogus" field.
db.task.comments.writable = False
db.task.comments.readable = True
ranking_link = None
if access.can_observe(c, props):
db.task.user.readable = True
db.task.completed_date.readable = True
links = [
dict(header=T('Review details'), body= lambda r:
A(T('View'), _class='btn', _href=URL('ranking', 'view_comparison', args=[r.id]))),
]
details = False
if subm is not None:
ranking_link = A(T('details'), _href=URL('ranking', 'view_comparisons_given_submission', args=[subm.id]))
reviews_link = A(T('details'), _href=URL('ranking', 'view_comparisons_given_user', args=[username, c.id]))
db.task.user.represent = lambda v, r: A(v, _href=URL('ranking', 'view_comparisons_given_user',
args=[v, c.id], user_signature=True))
else:
user_reputation = None
links = [
dict(header=T('Review feedback'), body = lambda r:
A(T('Give feedback'), _class='btn',
_href=URL('feedback', 'reply_to_review', args=[r.id], user_signature=True))),
]
details = False
ranking_link = None
reviews_link = None
if subm is not None:
q = ((db.task.submission_id == subm.id) & (db.task.is_completed == True))
# q = (db.task.submission_id == subm.id)
else:
q = (db.task.id == -1)
grid = SQLFORM.grid(q,
fields=[db.task.id, db.task.user, db.task.rejected, db.task.comments, db.task.helpfulness, ],
details = details,
csv=False, create=False, editable=False, deletable=False, searchable=False,
links=links,
args=request.args[:n_args],
maxtextlength=24,
)
return dict(subm=subm, download_link=download_link, subm_link=subm_link, username=username,
subm_comment=subm_comment, subm_feedback=subm_feedback,
edit_feedback_link=edit_feedback_link,
is_admin=is_user_admin(),
submission_grade=submission_grade,
|
AntonKhorev/spb-budget-db
|
3-db/testFileLists.py
|
Python
|
bsd-2-clause
| 1,567 | 0.051053 |
#!/usr/bin/env python3
import unittest
import fileLists
class TestFileLists(unittest.TestCase):
def testOneEntry(self):
l=fileLists.listTableFiles([
'2014.3765.1.1.department.diff.csv',
])
self.assertEqual(len(l),1)
t=l[0]
self.assertEqual(t.stageYear,2014)
self.assertEqual(t.documentNumber,3765)
self.assertEqual(t.paragraphNumber,'1.1')
self.assertEqual(t.table,'department')
self.assertIsInstance(t.action,fileLists.DiffAction)
def testOneEntryWithDirectory(self):
l=fileLists.listTableFiles([
'tables\\2014.3574.3.department.set(2014).csv'
])
self.assertEqual(len(l),1)
def testSort(self):
l=fileLists.listTableFiles([
'2014.3765.7.1.department.diff.csv',
'2014.3765.10.4.department.diff.csv',
'2014.3765.1.1.department.diff.csv',
])
self.assertEqual(len(l),3)
self.assertEqual(l[0].paragraphNumber,'1.1')
self.assertEqual(l[1].paragraphNumber,'7.1')
self.assertEqual(l[2].paragraphNumber,'10.4')
def testSet(self):
l=fileLists.listTableFiles([
'2014.3765.1.1.department.set(2015,2016).csv',
])
self.assertEqual(len(l),1)
self.assertIsInstance(l[0].
|
action,fileLists.SetAction)
self.assertEqual(l[0].action.fiscalYears,{2015,2016})
def testDiffset(self):
l=fileLists.listTableFiles([
'2014.3765.1.1.department.diffset(1234,2015,2016).csv',
])
self.assertEqual(len(l),1)
self.assertIsInstance(l[0].action,fileLists.DiffsetAction)
self.assertEqual(l[0].action.documentNumber,1234)
self.
|
assertEqual(l[0].action.fiscalYears,{2015,2016})
if __name__=='__main__':
unittest.main()
|
autotest/virt-test
|
tools/github/github_issues.py
|
Python
|
gpl-2.0
| 29,542 | 0.000609 |
"""
Classes to cache and read specific items from github issues in a uniform way
"""
from functools import partial as Partial
import datetime
import time
import shelve
# Requires PyGithub version >= 1.13 for access to raw_data attribute
import github
# Needed to not confuse cached 'None' objects
class Nothing(object):
raw_data = None
# Needed to signal list cache, not github object
class SearchResults(object):
def __init__(self, *stuff):
self.raw_data = stuff
class GithubCache(object):
"""
Auto-refreshing github.GithubObject.GithubObject from dict
"""
cache_hits = 0
cache_misses = 0
cache_lifetimes = {
'default': datetime.timedelta(hours=2),
github.GitCommit.GitCommit: datetime.timedelta(days=30),
github.NamedUser.NamedUser: datetime.timedelta(days=30),
github.Commit.Commit: datetime.timedelta(days=30),
github.Issue.Issue: datetime.timedelta(minutes=30),
github.PullRequest.PullRequest: datetime.timedelta(hours=1),
# Special case for github.Issue.Issue
'closed': datetime.timedelta(days=30),
SearchResults: datetime.timedelta(minutes=10),
github.NamedUser.NamedUser: datetime.timedelta(hours=2),
github.GitAuthor.GitAuthor: datetime.timedelta(days=9999),
'total_issues': datetime.timedelta(days=9999)
}
def __init__(self, github_obj, cache_get_partial, cache_set_partial,
cache_del_partial, pre_fetch_partial, fetch_partial):
self.github = github_obj
self.cache_get = cache_get_partial # Returns native dict
self.cache_set = cache_set_partial # called with value=dict
self.cache_del = cache_del_partial
self.pre_fetch = pre_fetch_partial # called with nothing
self.fetch = fetch_partial # Returns github.GithubObject.GithubObject
def __call__(self):
"""
Retrieve instance from fresh or cached data
"""
# microseconds aren't useful when fetch takes ~1 second
now = datetime.datetime.utcnow()
now = datetime.datetime(year=now.year, month=now.month,
day=now.day, hour=now.hour,
minute=now.minute, second=0, microsecond=0)
try:
data = self.cached_data()
if data['expires'] < now:
raise KeyError # refresh cache
self.cache_hits += 1
except KeyError:
data = self.fetched_data(now)
self.cache_set(value=data)
self.cache_misses += 1
# Any exceptions thrown during conversion should purge cache entry
try:
# Format data for consumption
if data['klass'] == github.PaginatedList.PaginatedList:
inside_klass = data['inside_klass']
result = []
for item in data['raw_data']:
result.append(
self.github.create_from_raw_data(inside_klass,
item))
return result
elif data['klass'] == Nothing:
return None # it's a None object
elif data['klass'] == SearchResults:
return data['raw_data'] # just the contents
else:
return self.github.create_from_raw_data(data['klass'],
data['raw_data'])
except:
try:
self.cache_del()
except KeyError:
pass # doesn't exist in cache, ignore
raise # original exception
@staticmethod
def format_data(klass, expires, raw_data, inside_klass=None):
"""
Enforce uniform data format for fetched data
"""
if inside_klass is None:
return {'klass': klass,
'fetched': datetime.datetime.utcnow(),
'expires': expires,
'raw_data': raw_data}
else:
return {'klass': klass,
'inside_klass': inside_klass,
'fetched': datetime.datetime.utcnow(),
'expires': expires,
'raw_data': raw_data}
def fetched_data(self, now):
"""
Return dictionary containing freshly fetched values
"""
try:
if callable(self.pre_fetch):
self.pre_fetch()
fetched_obj = self.fetch()
except github.GithubException, detail:
if detail.status == 404:
raise KeyError('Github item not-found error while calling %s '
|
'with args=%s and dargs=%s' % (self.fetch.func,
self.fetch.args,
self.fetch.keywords))
else:
raise
if fetched_obj is None:
|
fetched_obj = Nothing()
klass = fetched_obj.__class__
# github.PaginatedList.PaginatedList need special handling
if isinstance(fetched_obj, github.PaginatedList.PaginatedList):
raw_data = [item.raw_data for item in fetched_obj]
inside_klass = fetched_obj[0].__class__
expires = now + self.cache_lifetimes.get(inside_klass,
self.cache_lifetimes['default'])
return self.__class__.format_data(klass,
now + self.cache_lifetimes.get(
inside_klass,
self.cache_lifetimes[
'default']),
raw_data, inside_klass)
else:
expires = now + self.cache_lifetimes.get(klass,
# else default
self.cache_lifetimes['default'])
# closed issues/pull requests don't change much
if hasattr(fetched_obj, 'closed_at'):
if fetched_obj.closed_at is not None:
expires = now + self.cache_lifetimes['closed']
return self.__class__.format_data(klass, expires,
fetched_obj.raw_data)
def cached_data(self):
"""
Return dictionary containing cached values or raise KeyError
"""
try:
return self.cache_get() # maybe raise KeyError or TypeError
except KeyError:
raise
except:
# Try to delete the entry
self.cache_del()
raise
class GithubIssuesBase(list):
"""
Base class for cached list of github issues
"""
# Force static pickle protocol version
protocol = 2
# Class to use for cache management
cache_class = GithubCache
def __init__(self, github_obj, repo_full_name, cache_filename):
"""
Initialize cache and reference github repository issues
"""
self.github = github_obj
self.repo_full_name = repo_full_name
self.shelf = shelve.open(filename=cache_filename,
protocol=self.protocol,
writeback=True)
# Avoid exceeding rate-limit per hour
requests = self.github.rate_limiting[1] # requests per hour
period = 60.0 * 60.0 # one hour in seconds
sleeptime = period / requests
self.pre_fetch_partial = Partial(time.sleep, sleeptime)
# self.pre_fetch_partial = None # cheat-mode enable (no delays)
repo_cache_key = 'repo_%s' % self.repo_full_name
# get_repo called same way throughout instance life
cache_get_partial = Partial(self.shelf.__getitem__, repo_cache_key)
cache_set_partial = Partial(self.shelf.__setitem__, repo_cache_key)
cache_del_partial = Partial(self.shelf.__delitem__, repo_cache_key)
fetch_partial = Partial(self.gi
|
davjohnst/fundamentals
|
fundamentals/backtracking/all_permutations.py
|
Python
|
apache-2.0
| 772 | 0.001295 |
#!/usr/bin/env python
class AllPermutations(object):
def __init__(self, arr)
|
:
self.arr = arr
def all_permutations(self):
results = []
used = []
self._all_permutations(self.arr, used, results)
return re
|
sults
def _all_permutations(self, to_use, used, results):
if len(to_use) == 0:
results.append(used)
for i, x in enumerate(to_use):
new_used = used + [x]
new_to_use = to_use[:i] + to_use[i+1:]
self._all_permutations(new_to_use, new_used, results)
def main():
arr = [1, 2, 3, 4]
ap = AllPermutations(arr)
results = ap.all_permutations()
for x in results:
print x
print len(results)
if __name__ == "__main__":
main()
|
kkaarreell/ci-dnf-stack
|
dnf-behave-tests/features/steps/repo.py
|
Python
|
gpl-3.0
| 3,594 | 0.004174 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import print_function
import behave
import os
import parse
from common import *
@behave.step("I use the repository \"{repo}\"")
def step_repo_condition(context, repo):
if "repos" not in context.dnf:
context.dnf["repos"] = []
if repo not in context.dnf["repos"]:
context.dnf["repos"].append(repo)
@behave.step('I require client certificate verification with certificate "{client_cert}" and key "{client_key}"')
def step_impl(context, client_cert, client_key):
if "client_ssl" not in context.dnf:
context.dnf["client_ssl"] = dict()
context.dnf["client_ssl"]["certificate"] = os.path.join(context.dnf.fixturesdir,
client_cert)
context.dnf["client_ssl"]["key"] = os.path.join(context.dnf.fixturesdir,
client_key)
@parse.with_pattern(r"http|https")
def parse_repo_type(text):
if text in ("http", "https"):
return text
assert False
behave.register_type(repo_type=parse_repo_type)
@behave.step("I use the {rtype:repo_type} repository based on \"{repo}\"")
def step_impl(context, rtype, repo):
assert hasattr(context, 'httpd'), 'Httpd fixture not set. Use @fixture.httpd tag.'
if rtype == "http":
host, port = context.httpd.new_http_server(context.dnf.repos_location)
else:
cacert = os.path.join(context.dnf.fixturesdir,
|
'certificates/testcerts/ca/cert.pem')
cert = os.path.join(context.dnf.fixturesdir,
|
'certificates/testcerts/server/cert.pem')
key = os.path.join(context.dnf.fixturesdir,
'certificates/testcerts/server/key.pem')
client_ssl = context.dnf._get(context, "client_ssl")
if client_ssl:
client_cert = client_ssl["certificate"]
client_key = client_ssl["key"]
host, port = context.httpd.new_https_server(
context.dnf.repos_location, cacert, cert, key,
client_verification=bool(client_ssl))
http_reposdir = "/http.repos.d"
repo_id = '{}-{}'.format(rtype, repo)
repocfg = ("[{repo_id}]\n"
"name={repo_id}\n"
"baseurl={rtype}://{host}:{port}/{repo}/\n"
"enabled=1\n"
"gpgcheck=0\n"
)
if rtype == "https":
repocfg += "sslcacert={cacert}\n"
if client_ssl:
repocfg += "sslclientcert={client_cert}\n"
repocfg += "sslclientkey={client_key}\n"
# generate repo file based on "repo" in /http.repos.d
repos_path = os.path.join(context.dnf.installroot, http_reposdir.lstrip("/"))
ensure_directory_exists(repos_path)
repo_file_path = os.path.join(repos_path, '{}.repo'.format(repo_id))
create_file_with_contents(
repo_file_path,
repocfg.format(**locals()))
# add /http.repos.d to reposdir
current_reposdir = context.dnf._get(context, "reposdir")
if not repos_path in current_reposdir:
context.dnf._set("reposdir", "{},{}".format(current_reposdir, repos_path))
# enable newly created http repo
context.execute_steps('Given I use the repository "{}"'.format(repo_id))
@behave.step("I disable the repository \"{repo}\"")
def step_repo_condition(context, repo):
if "repos" not in context.dnf:
context.dnf["repos"] = []
context.dnf["repos"].remove(repo)
@behave.given("There are no repositories")
def given_no_repos(context):
context.dnf["reposdir"] = "/dev/null"
|
MarxMustermann/OfMiceAndMechs
|
src/itemFolder/military/shocker.py
|
Python
|
gpl-3.0
| 1,603 | 0.004991 |
import src
import random
class Shocker(src.items.Item):
"""
ingame item used as ressource to build bombs and stuff
should have the habit to explode at inconvienent times
"""
type = "Shocker"
def __init__(self):
"""
set up internal state
"""
super().__init__(display="/\\")
def apply(self, character):
"""
Parameters:
character: the character trying to use the item
"""
compressorFound = None
for item in character.inventory:
if isinstance(item,src.items.itemMap["CrystalCompressor"]):
compressorFound = item
break
if compressorFound:
if self.container and isinstance(self.container,src.rooms.Room):
if hasattr(self.container,"electricalCharges"):
if self.container.electricalCharges < self.container.maxElectricalCharges:
self.container.electricalCharges += 1
character.addMessage("you activate the shocker and increase the rooms charges to %s"%(self.container.electricalCharges,))
character.inventory.remove(compressorFound)
else:
c
|
haracter.addMessage("this room is fully charged")
|
else:
character.addMessage("this room can't be charged")
else:
character.addMessage("no room found")
else:
character.addMessage("no crystal compressor found in inventory")
src.items.addType(Shocker)
|
LucidWorks/fusion-seed-app
|
pipelines.py
|
Python
|
mit
| 151 | 0.039735 |
#!/usr/bin/python
impor
|
t sys
#what is the command
command = sys.argv[1];
source = sys.argv[2];
print "Command: ", command;
print "Source: ", sourc
|
e;
|
PKRoma/python-for-android
|
pythonforandroid/recipes/six/__init__.py
|
Python
|
mit
| 236 | 0 |
from pythonforandroid.recipe import PythonRecipe
class SixRecipe(PythonRecipe):
version = '1.15.0'
url =
|
'htt
|
ps://pypi.python.org/packages/source/s/six/six-{version}.tar.gz'
depends = ['setuptools']
recipe = SixRecipe()
|
joostrijneveld/eetvoudig
|
meals/migrations/0002_auto_20161006_1640.py
|
Python
|
cc0-1.0
| 576 | 0 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrati
|
ons
class Migration(migrations.Migration):
dependencies = [
('meals', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='wbw_list',
name='list_id',
field=models.CharF
|
ield(max_length=40, unique=True),
),
migrations.AlterField(
model_name='participant',
name='wbw_id',
field=models.CharField(max_length=40, unique=True),
),
]
|
mozilla/zamboni
|
mkt/purchase/management/commands/post_bluevia_payment.py
|
Python
|
bsd-3-clause
| 2,183 | 0 |
import calendar
from optparse import make_option
import time
from urllib import urlencode
from
|
django.core.management.base import BaseCommand
import jwt
import requests
class Command(BaseCommand):
help = 'Simulate a BlueVia postback to mark a payment as complete.'
option_list = BaseCommand.option_list + (
make_option('--trans-id', action='store',
help='BlueVia transaction ID', default='1234'),
make_option('--secret', action='store',
help='Mark
|
etplace secret for signature verification'),
make_option('--contrib', action='store',
help='Contribution UUID'),
make_option('--addon', action='store',
help='ID of addon that was purchased'),
make_option('--url', action='store',
help='Postback URL. Default: %default',
default='http://localhost:8001/services/bluevia/postback'),
)
def handle(self, *args, **options):
assert 'contrib' in options, 'require --contrib'
assert 'addon' in options, 'require --addon'
issued_at = calendar.timegm(time.gmtime())
prod_data = urlencode({'contrib_uuid': options['contrib'],
'addon_id': options['addon']})
purchase = {'iss': 'tu.com',
'aud': 'marketplace.mozilla.org',
'typ': 'tu.com/payments/inapp/v1',
'iat': issued_at,
'exp': issued_at + 3600, # expires in 1 hour
'request': {
'name': 'Simulated Product',
'description': 'Simulated Product Description',
'price': '0.99',
'currencyCode': 'USD',
'productData': prod_data},
'response': {
'transactionID': options['trans_id']
}}
purchase_jwt = jwt.encode(purchase, options['secret'])
print 'posting JWT to %s' % options['url']
res = requests.post(options['url'], purchase_jwt, timeout=5)
res.raise_for_status()
print 'OK'
|
beiko-lab/gengis
|
bin/Lib/site-packages/numpy/version.py
|
Python
|
gpl-3.0
| 238 | 0 |
# THIS FILE IS GENERAT
|
ED FROM NUMPY SETUP.PY
short_version = '1.7.2'
version = '1.7.2'
full_version = '1.7.2'
git_revision = 'f3ee0735c1c372dfb9e0efcaa6846bd05e53b836'
release = True
if not release:
|
version = full_version
|
paulharter/fam
|
src/fam/tests/test_couchdb/test_mapping.py
|
Python
|
mit
| 535 | 0.011215 |
import unittest
from fam.tests.models.test01 import Dog, Cat, Person, JackRussell, Monarch
from fam.mapper import Class
|
Mapper
class MapperTests(unittest.TestCase):
def setUp(self):
self.mapper = ClassMapper([Dog, Cat, Person, JackRussell, Monarch])
def tearDown(self):
|
pass
def test_sub_class_refs(self):
self.assertEqual(set(Monarch.fields.keys()), set(["name", "country", "cats", "dogs", "animals", "callbacks"]))
self.assertEqual(set(Monarch.cls_fields.keys()), {"country"})
|
ria-ee/X-Road
|
src/systemtest/op-monitoring/integration/testcases/test_zero_buffer_size.py
|
Python
|
mit
| 5,787 | 0.003283 |
#!/usr/bin/env python3
# The MIT License
# Copyright (c) 2016 Estonian Information System Authority (RIA), Pop
|
ulation Register Centre (VRK)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyrig
|
ht notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# Test case for verifying that in case the operational monitoring buffer size
# has been set to zero, the operational monitoring data of X-Road requests is
# not stored by the operational monitoring daemon and can't be queried.
# Expecting that the value of op-monitor-buffer.size has been set
# to 0 via run_tests.py.
import os
import sys
sys.path.append('..')
import python_common as common
def run(client_security_server_address, producer_security_server_address,
ssh_user, request_template_dir):
xroad_request_template_filename = os.path.join(
request_template_dir, "simple_xroad_query_template.xml")
query_data_client_template_filename = os.path.join(
request_template_dir, "query_operational_data_client_template.xml")
query_data_producer_template_filename = os.path.join(
request_template_dir, "query_operational_data_producer_template.xml")
client_timestamp_before_requests = common.get_remote_timestamp(
client_security_server_address, ssh_user)
producer_timestamp_before_requests = common.get_remote_timestamp(
producer_security_server_address, ssh_user)
xroad_message_id = common.generate_message_id()
print("\nGenerated message ID %s for X-Road request" % (xroad_message_id, ))
### Regular and operational data requests and the relevant checks
print("\n---- Sending an X-Road request to the client's security server ----\n")
request_contents = common.format_xroad_request_template(
xroad_request_template_filename, xroad_message_id)
print("Generated the following X-Road request: \n")
print(request_contents)
response = common.post_xml_request(
client_security_server_address, request_contents)
print("Received the following X-Road response: \n")
xml = common.parse_and_clean_xml(response.text)
print(xml.toprettyxml())
common.check_soap_fault(xml)
common.wait_for_operational_data()
client_timestamp_after_requests = common.get_remote_timestamp(
client_security_server_address, ssh_user)
producer_timestamp_after_requests = common.get_remote_timestamp(
producer_security_server_address, ssh_user)
# Now make operational data requests to both security servers and check the
# response payloads.
print("\n---- Sending an operational data request to the client's security server ----\n")
message_id = common.generate_message_id()
print("Generated message ID %s for query data request" % (message_id, ))
request_contents = common.format_query_operational_data_request_template(
query_data_client_template_filename, message_id,
client_timestamp_before_requests, client_timestamp_after_requests)
print("Generated the following query data request for the client's security server: \n")
print(request_contents)
response = common.post_xml_request(
client_security_server_address, request_contents,
get_raw_stream=True)
mime_parts, raw_response = common.parse_multipart_response(response)
if mime_parts:
soap_part, record_count = common.get_multipart_soap_and_record_count(mime_parts[0])
common.print_multipart_soap_and_record_count(soap_part, record_count)
# op-monitor-buffer.size=0 must result in an empty response
common.check_record_count(record_count, 0)
else:
common.parse_and_check_soap_response(raw_response)
message_id = common.generate_message_id()
print("\nGenerated message ID %s for operational data request" % (message_id, ))
request_contents = common.format_query_operational_data_request_template(
query_data_producer_template_filename, message_id,
producer_timestamp_before_requests, producer_timestamp_after_requests)
print("Generated the following operational data request for the producer's " \
"security server: \n")
print(request_contents)
response = common.post_xml_request(
producer_security_server_address, request_contents,
get_raw_stream=True)
mime_parts, raw_response = common.parse_multipart_response(response)
if mime_parts:
soap_part, record_count = common.get_multipart_soap_and_record_count(mime_parts[0])
common.print_multipart_soap_and_record_count(soap_part, record_count, is_client=False)
# op-monitor-buffer.size=0 must result in an empty response
common.check_record_count(record_count, 0)
else:
common.parse_and_check_soap_response(raw_response)
|
jhakonen/wot-teamspeak-mod
|
test/fute/test_helpers/constants.py
|
Python
|
lgpl-2.1
| 199 | 0.040201 |
PLUGIN_INFO = {
"versions": [
{
"plugin_version": 1,
"supported_mod_versions": ["0.6"],
"download_url": "https://www.myteamspeak.com/addons/01a0f828-894c-45b7-a85
|
2-937b4
|
7ceb1ed"
}
]
}
|
allembedded/python_web_framework
|
WebApplication/Controllers/ControllerMain.py
|
Python
|
gpl-3.0
| 630 | 0.007937 |
"""
Main controller.
"""
import json
from Server.Importer import ImportFromModule
class ControllerMain(ImportFromModu
|
le("Server.ControllerBase", "ControllerBase")):
"""
Main controller.
"""
def ShowPage(self, uriParameters, p
|
ostedParameters):
"""
Shows the home page.
"""
webPage = ImportFromModule("WebApplication.Views.PageView", "PageView")("Main")
self.SetOutput(webPage.GetContent())
def EchoText(self, uriParameters, postedParameters):
"""
Echo the incomming text.
"""
self.SetOutput(json.dumps(uriParameters, indent=4))
|
techlover10/StochasticSoundscape
|
src/audio.py
|
Python
|
mit
| 1,411 | 0.007092 |
#!/usr/bin/python3
#
# Copyright © 2017 jared <jared@jared-devstation>
#
from pydub import AudioSegment, scipy_effects, effects
import os
import settings, util
# combine two audio samples with a crossfade
def combine_samples(acc, file2, CROSSFADE_DUR=100):
util.debug_print('combining ' + file2)
sample2 = AudioSegment.from_wav(file2)
output = acc.append(sample2, crossfade=CROSSFADE_DUR)
output = effects.normalize(output)
return output
# combine audio samples with crossfade, from within program
def combine_prog_samples(acc, nsamp, CROSSFADE_DUR=100):
output = acc.append(nsamp, crossf
|
ade=CROSSFADE_DUR)
return output
# split an audio file into low, mid, high bands
def split_file(fname):
curr_file = AudioSegment.from_file(fname)
low_seg = scipy_effects.low_pass_filter(cu
|
rr_file, settings.LOW_FREQUENCY_LIM).export(fname + '_low.wav', 'wav')
mid_seg = scipy_effects.band_pass_filter(curr_file, settings.LOW_FREQUENCY_LIM, settings.HIGH_FREQUENCY_LIM).export(fname + '_mid.wav', 'wav')
high_seg = scipy_effects.high_pass_filter(curr_file, settings.HIGH_FREQUENCY_LIM).export(fname + '_high.wav', 'wav')
## add a sample to an existing wav
#def add_sample(fname, samplefile, CROSSFADE_DUR=100):
# new_file = combine_samples(fname, samplefile, CROSSFADE_DUR)[0]
# os.rename(fname, 'old_' + fname)
# os.rename(new_file, fname)
# return new_file[1]
|
cloudbase/nova-virtualbox
|
nova/api/openstack/compute/plugins/v3/keypairs.py
|
Python
|
apache-2.0
| 7,665 | 0.000261 |
# Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Keypair management extension."""
import webob
import webob.exc
from nova.api.openstack.compute.schemas.v3 import keypairs
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
from nova.api import validation
from nova.compute import api as compute_api
from nova import exception
from nova.i18n import _
ALIAS = 'os-keypairs'
authorize = extensions.extension_authorizer('compute', 'v3:' + ALIAS)
soft_authorize = extensions.soft_extension_authorizer('compute', 'v3:' + ALIAS)
class KeypairController(wsgi.Controller):
"""Keypair API controller for the OpenStack API."""
def __init__(self):
self.api = compute_api.KeypairAPI()
def _filter_keypair(self, keypair, **attrs):
clean = {
'name': keypair.name,
'public_key': keypair.public_key,
'fingerprint': keypair.fingerprint,
}
for attr in attrs:
clean[attr] = keypair[attr]
return clean
# TODO(oomichi): Here should be 201(Created) instead of 200 by v2.1
# +microversions because the keypair creation finishes when returning
# a response.
@extensions.expected_errors((400, 403, 409))
@validation.schema(keypairs.create)
def create(self, req, body):
"""Create or import keypair.
Sending name will generate a key and return private_key
and fingerprint.
You can send a public_key to add an existing ssh key
params: keypair object with:
name (required) - string
public_key (optional) - string
"""
context = req.environ['nova.context']
authorize(context, action='create')
params = body['keypair']
name = params['name']
try:
if 'public_key' in params:
keypair = self.api.import_key_pair(context,
context.user_id, name,
params['public_key'])
keypair = self._filter_keypair(keypair, user_id=True)
else:
keypair, private_key = self.api.create_key_pair(
context, context.user_id, name)
keypair = self._filter_keypair(keypair, user_id=True)
keypair['private_key'] = private_key
return {'keypair': keypair}
except exception.KeypairLimitExceeded:
msg = _("Quota exceeded, too many key pairs.")
raise webob.exc.HTTPForbidden(explanation=msg)
except exception.InvalidKeypair as exc:
raise webob.exc.HTTPBadRequest(explanation=exc.format_message())
except exception.KeyPairExists as exc:
raise webob.exc.HTTPConflict(explanation=exc.format_message())
# TODO(oomichi): Here should be 204(No Content) instead of 202 by v2.1
# +microversions because the resource keypair has been deleted completely
# when returning a response.
@wsgi.response(202)
@extensions.expected_errors(404)
def delete(self, req, id):
"""Delete a keypair with a given name."""
context = req.environ['nova.context']
authorize(context, action='delete')
try:
self.api.delete_key_pair(context, context.user_id, id)
except exception.KeypairNotFound as exc:
raise webob.exc.HTTPNotFound(explanation=exc.format_message())
@extensions.expected_errors(404)
def show(self, req, id):
"""Return data for the given key name."""
context = req.environ['nova.context']
authorize(context, action='show')
try:
# Since this method returns the whole object, functional test
# test_keypairs_get is failing, receiving an unexpected field
# 'type', which was added to the keypair object.
# TODO(claudiub): Revert the changes in the next commit, which will
# enable nova-api to return the keypair type.
keypair = self.api.get_key_pair(context, context.user_id, id)
keypair = self._filter_keypair(keypair, created_at=True,
deleted=True, deleted_at=True,
id=True, user_id=True,
updated_at=True)
except exception.KeypairNotFound as exc:
raise webob.exc.HTTPNotFound(explanation=exc.format_message())
# TODO(oomichi): It is necessary to filter a response of keypair with
# _filter_keypair() when v2.1+microversions for implementing consistent
# behaviors in this keypair resource.
return {'keypair': keypair}
@extensions.expected_errors(())
def index(self, req):
"""List of keypairs for a user."""
context = req.environ['nova.context']
authorize(context, action='index')
key_pairs = self.api.get_key_pairs(context, context.user_id)
rval = []
for key_pair in key_pairs:
rval.append({'keypair': self._filter_keypair(key_pair)})
return {'keypairs': rval}
class Controller(wsgi.Controller):
def _add_key_name(self, req, servers):
for server in servers:
db_server = req.get_db_instance(server['id'])
# server['id'] is guaranteed to be in the cache due to
# the core API adding it in its 'show'/'detail' methods.
server['key_name'] = db_server['key_name']
def _show(self, req, resp_obj):
if 'server' in resp_obj.obj:
server = resp_obj.obj['server']
self._add_key_name(req, [server])
@wsgi.extends
def show(self, req, resp_obj, id):
context = req.environ['nova.context']
if soft_authorize(context):
self._show(req, resp_obj)
@wsgi.extends
def detail(self, req, resp_obj):
contex
|
t = req.environ['nova.context']
if 'servers' in resp_obj.obj and soft_authorize(context):
servers = resp_obj.obj['servers']
self._add_key_name(req, servers)
class Keypairs(extensions.V3APIExtensionBase):
"""Keypair Support."""
name = "Keypairs"
alias = ALIAS
version = 1
def get_resources(self):
resources = [
extensions.ResourceExtension(ALIAS,
KeypairController())]
r
|
eturn resources
def get_controller_extensions(self):
controller = Controller()
extension = extensions.ControllerExtension(self, 'servers', controller)
return [extension]
# use nova.api.extensions.server.extensions entry point to modify
# server create kwargs
# NOTE(gmann): This function is not supposed to use 'body_deprecated_param'
# parameter as this is placed to handle scheduler_hint extension for V2.1.
def server_create(self, server_dict, create_kwargs, body_deprecated_param):
create_kwargs['key_name'] = server_dict.get('key_name')
def get_server_create_schema(self):
return keypairs.server_create
|
carlmod/Analys24h
|
a24tim/tests/test_models.py
|
Python
|
agpl-3.0
| 7,591 | 0.001188 |
# -*- coding: utf-8 -*-
"""
a24tim.tests.test_models
************************
The model tests for the a24tim app.
:copyright: (c) 2012 by Carl Modén
:licence: AGPL3, see LICENSE for more details.
"""
from datetime import date
import math
import urllib2
from django.test import TestCase
import a24tim.models as models
import helpers
class TestPoints(TestCase):
"""Tests for the Point model."""
def setUp(self):
self.allegro = models.Point(number=580, name=u'Allegrogrund',
definition=u'Mellan två platser',
lat=59.5, lon=18.25, startpoint=True)
self.allegro.save()
def test_latstr(self):
"""Verifies that a latitude is correctly rendered as a string."""
self.assertEqual(self.allegro.latstr(), u'N 59° 30.00')
def test_latstr_south(self):
"""Move allegro to south half of earth and ches the coordinates there.
"""
self.allegro.lat = -self.allegro.lat
self.assertEqual(self.allegro.latstr(), u'S 59° 30.00')
def test_lonstr(self):
"""Verifies that a latitude is correctly rendered as a string."""
self.assertEqual(self.allegro.lonstr(), u'E 18° 15.00')
def test_lonstr_south(self):
"""Move allegro to south half of earth and ches the coordinates there.
"""
self.allegro.lon = -self.allegro.lon
self.assertEqual(self.allegro.lonstr(), u'W 18° 15.00')
def test_latstr_singledigit_minute(self):
"""Move allegro to someplace with a single digit minute and check the
coordinates there."""
self.allegro.lat = 59.1
self.assertEqual(self.allegro.latstr(), u'N 59° 06.00')
def test_unicode(self):
"""Verifies the unicode representation"""
self.assertEqual(
self.allegro.__unicode__(),
u"580, Allegrogrund, Mellan två platser, N 59° 30.00, E 18° 15.00, startpunkt")
def test_unicode_no_start_point(self):
"""Unicode representation where the point is not startpoint"""
self.allegro.startpoint = False
self.assertEqual(self.allegro.__unicode__(),
u"580, Allegrogrund, Mellan två platser, N 59° 30.00, E 18° 15.00")
def test_merging_of_related_points(self):
"""Checks that one can access all points distances from one query."""
self.stina = models.Point(number=579, name=u'Stinasgrund',
definition=u'En sten',
lat=59.7, lon=18.02)
self.stina.save()
self.linanas = models.Point(number=581, name=u'Linanäs',
definition=u'En prick', lat=59.8, lon=18.1)
self.linanas.save()
self.stina_allegro = models.Distance(from_point=self.stina,
to_point=self.allegro,
distance=5.8)
self.stina_allegro.save()
self.allegro_lina = models.Distance(from_point=self.allegro,
to_point=self.linanas,
distance=5.1)
self.allegro_lina.save()
distances = self.allegro.distances.all()
self.assertEqual(len(distances), 2)
class TestPointWithForecast(TestCase):
def setUp(self):
self.allegro = models.Point(number=580, name=u'Allegrogrund',
definition=u'Mellan två platser',
lat=59.5, lon=18.25, startpoint=True)
self.allegro.save()
self.patcher, self.mock_urlopen = helpers.patch_urlopen()
def tearDown(self):
self.patcher.stop()
def test_forecast_url_called(self):
"""Checks that the correct url is called"""
self.allegro.forecast()
self.mock_urlopen.assert_called_once_with('http://api.met.no/weatherapi/locationforecast/'
'1.8/?lat=59.5;lon=18.25;msl=10')
def test_forecast_url_not_found(self):
"""If the url cannot be loaded the we want handle som errors."""
self.mock_urlopen.side_effect = urllib2.URLError(
'A simulated problem with loading the url')
forecast = self.allegro.forecast()
self.assertIsNotNone(forecast[0])
def test_forcast_new
|
_protocol(self):
"""If the protocol is updated a status 203 will be sent."""
self.mock_urlopen.return_value.code = 203
forecast = self.allegro.forecast()
self.assertIsNotNone(forecast[0])
def test_forcast_is
|
_rendered(self):
"""Sees that a parsable prognosis is rendered"""
forecast = self.allegro.forecast()
# Check some sample values
self.assertEqual(forecast[1]['wind_direction'], 'NE')
self.assertEqual(str(forecast[1]['pressure']), str(1012.4))
def test_forcast_is_not_xml(self):
"""Sees what happens of the forcast is not valid xml."""
self.mock_urlopen.return_value.read.return_value = u"a\nstring\n"
forecast = self.allegro.forecast()
self.assertIsNotNone(forecast[0])
class TestSailings(TestCase):
def test_unicode(self):
"""Checking the unicode representation of a sailing"""
sail = models.Sailing(slug='2011h', finish_date=date(2011, 8, 3))
self.assertEqual(sail.__unicode__(), u'2011h, 2011-08-03')
class TestDistance(TestCase):
def setUp(self):
self.allegro = models.Point(number=580, name=u'Allegrogrund',
definition=u'Mellan två platser',
lat=59.0, lon=18.0, startpoint=True)
self.stina = models.Point(number=579, name=u'Stinasgrund',
definition=u'En sten', lat=59.1, lon=18.0)
self.allegro.save()
self.stina.save()
self.distance = models.Distance(from_point=self.stina,
to_point=self.allegro,
distance=4.2)
def test_unicode(self):
"""Checking the unicode representation of a sailing."""
representation = self.distance.__unicode__()
self.assertIn(u'580', representation)
self.assertIn(u'579', representation)
self.assertIn(u'4.2', representation)
def test_save_order(self):
"""Check that the order of from and to is in order."""
self.distance.save()
self.assertEqual(self.distance.from_point, self.stina)
self.assertEqual(self.distance.to_point, self.allegro)
def test_save_order_swapped(self):
"""Check that the order of from and to swaps to be in order."""
self.distance = models.Distance(from_point=self.allegro,
to_point=self.stina,
distance=4.2)
self.distance.save()
self.assertEqual(self.distance.from_point, self.stina)
self.assertEqual(self.distance.to_point, self.allegro)
def test_birds_distance(self):
"""Check approx forumla for north-south distances."""
self.assertAlmostEqual(self.distance.birds_distance(), 6, places=2)
def test_birds_distance_east_west(self):
"""Moves one point of the distance to check that distances in pure
east west direction also works."""
self.stina.lat = 59
self.stina.lon = 18.1
self.assertAlmostEqual(self.distance.birds_distance(),
6 * math.cos(math.radians(59)), places=2)
|
bonnieblueag/farm_log
|
core/urls.py
|
Python
|
gpl-3.0
| 158 | 0.006329 |
from django.conf.urls import url
from core.
|
views import add_feedback
urlpatterns = [
u
|
rl('^add/core/feedback', name='add_feedback', view=add_feedback),
]
|
rahulpalamuttam/weld
|
examples/python/nditer/nditer_test.py
|
Python
|
bsd-3-clause
| 3,542 | 0.006494 |
import weldnumpy as wn
import numpy as np
def assert_correct(np_a, z):
'''
common part of the check.
'''
shape = []
for s in z.shape:
shape.append(s)
shape = tuple(shape)
np_a = np.reshape(np_a, shape)
for i in range((z.shape[0])):
for j in range(z.shape[1]):
assert np_a[i][j] == z[i][j]
def test_view():
'''
Adding the iter code to a view.
In general, do we deal correctly with wn.array(view)??
'''
orig = np.random.rand(20,20)
a = orig[3:15:1,4:20:2]
# orig = np.random.rand(20,20,20)
# a = orig[3:15:3,:,:]
print(a.flags)
assert not a.flags.contiguous
a = wn.array(a)
z = np.copy(a)
shapes = a.weldobj.update(np.array(list(a.shape)))
strides = []
for s in a.strides:
strides.append(s/z.itemsize)
strides = a.weldobj.update(np.array(strides))
end = 1
for s in a.shape:
end = end*s
end = end
iter_code = 'result(for(nditer({arr}, 0L, {end}L, 1L, {shapes}, {strides}), appender, \
|b, i, e| merge(b,exp(e))))'.format(shapes=shapes, strides=strides, end=str(end), arr=a.name)
a.weldobj.weld_code = iter_code
z = np.exp(z)
# convert the data represented by weldarray 'a', to a multi-dimensional numpy array of shape as z,
# and then compare the values.
np_a = a._eval()
assert_correct(np_a, z)
def test_start():
'''
Has a different start from the base array.
'''
orig = np.random.rand(20,20)
a = orig[0:20:1,0:20:1]
start = (wn.addr(a) - wn.addr(orig)) / a.itemsize
orig = wn.array(orig)
z = np.copy(a)
z = np.exp(z)
shapes = orig.weldobj.update(np.array(list(a.shape)))
strides = []
for s in a.strides:
strides.append(s/8)
strides = orig.weldobj.update(np.array(strides))
end = 1
for s in a.shape:
end = end*s
end = end + start
iter_code = 'result(for(ndite
|
r({arr}, {start}L, {end}L, 1L, {shapes}, {strides}), appender, \
|b, i, e| merge(b,exp(e))))'.format(shapes=shapes, strides=strides, end=str(end),
start=str(start), arr=orig.name)
orig.weldobj.weld_code = iter_code
np_a = orig._eval()
assert_correct(np_a, z)
def test_zip():
|
'''
Has a different start from the base array.
'''
orig = np.random.rand(20,20)
orig2 = np.random.rand(20,20)
a = orig[5:20:1,3:20:2]
b = orig2[5:20:1,3:20:2]
start = (wn.addr(a) - wn.addr(orig)) / a.itemsize
orig = wn.array(orig)
# copying so we can test them later.
z = np.copy(a)
z2 = np.copy(b)
# added orig2 to orig's weldobject.
orig_2_name = orig.weldobj.update(orig2)
shapes = orig.weldobj.update(np.array(list(a.shape)))
strides = []
for s in a.strides:
strides.append(s/8)
strides = orig.weldobj.update(np.array(strides))
end = 1
for s in a.shape:
end = end*s
end = end + start
iter_code = 'result(for(zip(nditer({arr}, {start}l, {end}l, 1l, {shapes}, {strides}), \
nditer({arr2}, {start}l, {end}l, 1l, {shapes}, {strides})), \
appender, |b, i, e| merge(b,e.$0+e.$1)))'.format(shapes=shapes, strides=strides, end=str(end),
start=str(start), arr=orig.name, arr2=orig_2_name)
orig.weldobj.weld_code = iter_code
# gives us a numpy array after evaluating the nditer code above.
np_a = orig._eval()
# update the copied array.
z3 = z+z2;
# test values are equal.
assert_correct(np_a, z3)
# few different tests.
test_view()
test_start()
test_zip()
|
andrebellafronte/stoq
|
stoq/gui/calendar.py
|
Python
|
gpl-2.0
| 17,713 | 0.000847 |
# -*- Mode: Python; coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
##
## Copyright (C) 2011 Async Open Source <http://www.async.com.br>
## All rights reserved
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., or visit: http://www.gnu.org/.
##
## Author(s): Stoq Team <stoq-devel@async.com.br>
##
"""
stoq/gui/calendar.py:
Calendar application.
"""
import urllib
from dateutil.parser import parse
from dateutil.relativedelta import MO, relativedelta
from dateutil.tz import tzlocal, tzutc
import gtk
from stoqlib.api import api
from stoqlib.domain.person import Client
from stoqlib.gui.editors.callseditor import CallsEditor
from stoqlib.gui.editors.paymenteditor import (InPaymentEditor,
OutPaymentEditor)
from stoqlib.gui.editors.workordereditor import WorkOrderEditor
from stoqlib.gui.stockicons import (STOQ_CALENDAR_TODAY,
STOQ_CALENDAR_WEEK,
STOQ_CALENDAR_MONTH,
STOQ_CALENDAR_LIST)
from stoqlib.gui.utils.keybindings import get_accels
from stoqlib.gui.widgets.webview import WebView
from stoqlib.lib import dateutils
from stoqlib.lib.daemonutils import start_daemon
from stoqlib.lib.defaults import get_weekday_start
from stoqlib.lib.translation import stoqlib_gettext as _
from stoq.gui.shell.shellapp import ShellApp
def parse_javascript_date(jsdate):
dt = parse(jsdate, fuzzy=True)
dt = dt.replace(tzinfo=tzlocal())
date = dt.astimezone(tzutc())
date += relativedelta(months=-1)
return date
class CalendarView(WebView):
def __init__(self, app):
self._loaded = False
WebView.__init__(self)
self.app = app
self.get_view().connect(
'load-finished',
self._on_view__document_load_finished)
self._load_user_settings()
def _load_finished(self):
self._startup()
self._loaded = True
view = self.get_view()
view.connect('size-allocate', self._on_view__size_allocate)
x, y, width, height = view.get_allocation()
self._update_calendar_size(width, height)
def _startup(self):
options = {}
options['monthNames'] = dateutils.get_month_names()
options['monthNamesShort'] = dateutils.get_short_month_names()
options['dayNames'] = dateutils.get_day_names()
options['dayNamesShort'] = dateutils.get_short_day_names()
options['buttonText'] = {"today": _('today'),
"month": _('month'),
"week": _('week'),
"day": _('day')}
options['defaultView'] = api.user_settings.get(
'calendar-view', 'month')
# FIXME: This should not be tied to the language, rather be
# picked up from libc, but it's a bit of work to translate
# one into another so just take a shortcut
options['columnFormat'] = {
# month column format, eg "Mon", see:
# http://arshaw.com/fullcalendar/docs/text/columnFormat/
'month': _('ddd'),
# week column format: eg, "Mon 9/7", see:
# http://arshaw.com/fullcalendar/docs/text/columnFormat/
'week': _('ddd M/d'),
# day column format : eg "Monday 9/7", see:
# http://arshaw.com/fullcalendar/docs/text/columnFormat/
'day': _('dddd M/d'),
}
options['timeFormat'] = {
# for agendaWeek and agendaDay, eg "5:00 - 6:30", see:
# http://arshaw.com/fullcalendar/docs/text/timeFormat/
'agenda': _('h:mm{ - h:mm}'),
# for all other views, eg "7p", see:
# http://arshaw.com/fullcalendar/docs/text/timeFormat/
'': _('h(:mm)t'),
}
options['titleFormat'] = {
# month title, eg "September 2009", see:
# http://arshaw.com/fullcalendar/docs/text/titleFormat/
'month': _('MMMM yyyy'),
# week title, eg "Sep 7 - 13 2009" see:
# http://arshaw.com/fullcalendar/docs/text/titleFormat/
'week': _("MMM d[ yyyy]{ '—'[ MMM] d yyyy}"),
# day time, eg "Tuesday, Sep 8, 2009" see:
# http://arshaw.com/fullcalendar/docs/text/titleFormat/
'day': _('dddd, MMM d, yyyy'),
}
if get_weekday_start() == MO:
firstday = 1
else:
firstday = 0
options['firstDay'] = firstday
options['isRTL'] = (
gtk.widget_get_default_direction() == gtk.TEXT_DIR_RTL)
options['data'] = self._show_events
options['loading_msg'] = _('Loading calendar content, please wait...')
self.js_function_call('startup', options)
self._update_title()
def _calendar_run(self, name, *args):
if not self._loaded:
return
self.js_function_call("$('#calendar').fullCalendar", name, *args)
def _load_daemon_path(self, path):
uri = '%s/%s' % (self._daemon_uri, path)
self.load_uri(uri)
def _load_user_settings(self):
events = api.user_settings.get('calendar-events', {})
self._show_events = dict(
in_payments=events.get('in-payments', True),
out_payments=events.get('out-payments', True),
purchase_orders=events.get('purchase-orders', True),
client_calls=events.get('client-calls', True),
client_birthdays=events.get('client-birthdays', True),
work_orders=events.get('work-orders', True),
)
def _save_user_settings(self):
events = api.user_settings.get('calendar-events', {})
events['in-payments'] = self._show_events['in_payments']
events['out-payments'] = self._show_events['out_payments']
events['purchase-orders'] = self._show_events['purchase_orders']
events['client
|
-calls'] = self._show_events['client_calls']
events['client-birthdays'] = self._show_events['client_birthdays']
events['work-orders'] = self._show_events['work_orders']
def _up
|
date_calendar_size(self, width, height):
self._calendar_run('option', 'aspectRatio', float(width) / height)
def _update_title(self):
# Workaround to get the current calendar date
view = self.get_view()
view.execute_script("document.title = $('.fc-header-title').text()")
title = view.get_property('title')
self.app.date_label.set_markup(
'<big><b>%s</b></big>' % api.escape(title))
#
# Callbacks
#
def _on_view__document_load_finished(self, view, frame):
self._load_finished()
def _on_view__size_allocate(self, widget, req):
x, y, width, height = req
self._update_calendar_size(width, height)
#
# WebView
#
def web_open_uri(self, kwargs):
if kwargs['method'] == 'changeView':
view = kwargs['view']
if view == 'basicDay':
self.app.ViewDay.set_active(True)
jsdate = urllib.unquote(kwargs['date'])
date = parse_javascript_date(jsdate)
self._calendar_run('gotoDate', date.year, date.month, date.day)
#
# Public API
#
def set_daemon_uri(self, uri):
self._daemon_uri = uri
def load(self):
self._load_daemon_path('web/static/calendar-app.html')
def go_prev(self):
self._calendar_run('prev')
self._update_title()
def show_today(self):
|
vprusso/us_patent_scraper
|
patent_spider/patent_spider/models.py
|
Python
|
gpl-2.0
| 80 | 0.0125 |
# -*- coding: utf-8 -*
|
-
"""Models for database connection"""
import
|
settings
|
AunShiLord/Tensor-analysis
|
tensor_analysis/tests/test_tensor_fields.py
|
Python
|
mit
| 17,326 | 0.006065 |
# -*- coding: utf-8 -*-
from sympy.matrices import Matrix
from tensor_analysis.arraypy import Arraypy, TensorArray, list2arraypy, list2tensor
from tensor_analysis.tensor_fields import df, grad, curl, diverg, lie_xy, dw, \
lie_w, inner_product, g_tensor, g_wedge
from sympy import symbols, cos, sin
def test_df_varlist():
x1, x2, x3 = symbols('x1 x2 x3')
f = x1**2 * x2 + sin(x2 * x3 - x2)
var_list = [x1, x2, x3]
assert df(f, var_list) == [
2 * x1 * x2, x1**2 + (x3 - 1) * cos(x2 * x3 - x2), x2 *
cos(x2 * x3 - x2)]
assert isinstance(df(f, var_list), list)
assert df(f, var_list, 'l') == [
2 * x1 * x2, x1**2 + (x3 - 1) * cos(x2 * x3 - x2), x2 *
cos(x2 * x3 - x2)]
assert isinstance(df(f, var_list, 'l'), list)
assert df(f, var_list, 'a') == list2arraypy(
[2 * x1 * x2, x1**2 + (x3 - 1) * cos(x2 * x3 - x2), x2 *
cos(x2 * x3 - x2)])
assert isinstance(df(f, var_list, 'a'), Arraypy)
assert df(f, var_list, 't') == list2tensor(
[2 * x1 * x2, x1**2 + (x3 - 1) * cos(x2 * x3 - x2), x2 *
cos(x2 * x3 - x2)])
assert isinstance(df(f, var_list, 't'), TensorArray)
assert df(f, var_list, 't').type_pq == (0, 1)
def test_df_var_tnsr0():
x1, x2, x3 = symbols('x1 x2 x3')
f = x1**2 * x2 + sin(x2 * x3 - x2)
var_tnsr0 = TensorArray(Arraypy(3), (1))
var_tnsr0[0] = x1
var_tnsr0[1] = x2
var_tnsr0[2] = x3
assert df(f, var_tnsr0) == [
2 * x1 * x2, x1**2 + (x3 - 1) * cos(x2 * x3 - x2), x2 *
cos(x2 * x3 - x2)]
assert isinstance(df(f, var_tnsr0), list)
assert df(f, var_tnsr0, 'l') == [
2 * x1 * x2, x1**2 + (x3 - 1) * cos(x2 * x3 - x2), x2 *
cos(x2 * x3 - x2)]
assert isinstance(df(f, var_tnsr0, 'l'), list)
assert df(f, var_tnsr0, 'a') == list2arraypy(
[2 * x1 * x2, x1**2 + (x3 - 1) * cos(x2 * x3 - x2), x2 *
cos(x2 * x3 - x2)])
assert isinstance(df(f, var_tnsr0, 'a'), Arraypy)
assert df(f, var_tnsr0, 't') == list2tensor(
[2 * x1 * x2, x1**2 + (x3 - 1) * cos(x2 * x3 - x2), x2 *
cos(x2 * x3 - x2)])
assert isinstance(df(f, var_tnsr0, 't'), TensorArray)
assert df(f, var_tnsr0, 't').type_pq == (0, 1)
def test_df_var_tnsr1():
x1, x2, x3 = symbols('x1 x2 x3')
f = x1**2 * x2 + sin(x2 * x3 - x2)
var_tnsr1 = Arraypy([1, 3, 1]).to_tensor(1)
var_tnsr1[1] = x1
var_tnsr1[2] = x2
var_tnsr1[3] = x3
res_ar1 = Arraypy([1, 3, 1])
res_ar1[1] = 2 * x1 * x2
res_ar1[2] = x1**2 + (x3 - 1) * cos(x2 * x3 - x2)
res_ar1[3] = x2 * cos(x2 * x3 - x2)
res_ten1 = res_ar1.to_tensor(-1)
assert df(f, var_tnsr1) == [
2 * x1 * x2, x1**2 + (x3 - 1) * cos(x2 * x3 - x2), x2 *
cos(x2 * x3 - x2)]
assert isinstance(df(f, var_tnsr1), list)
assert df(f, var_tnsr1, 'l') == [
2 * x1 * x2, x1**2 + (x3 - 1) * cos(x2 * x3 - x2), x2 *
cos(x2 * x3 - x2)]
assert isinstance(df(f, var_tnsr1, 'l'), list)
assert df(f, var_tnsr1, 'a') == res_ar1
assert isinstance(df(f, var_tnsr1, 'a'), Arraypy)
assert df(f, var_tnsr1, 't') == res_ten1
assert isinstance(df(f, var_tnsr1, 't'), TensorArray)
assert df(f, var_tnsr1, 't').type_pq == (0, 1)
def test_diverg_var_x_list():
x1, x2, x3 = symbols('x1 x2 x3')
X = [x1 * x2**3, x2 - cos(x3), x3**3 - x1]
var = [x1, x2, x3]
g = Matrix([[2, 1, 0], [1, 3, 0], [0, 0, 1]])
ten = Arraypy([2, 3, 0]).to_tensor((-1, -1))
ten[0, 0] = 2
ten[0, 1] = 1
ten[0, 2] = 0
ten[1, 0] = 1
ten[1, 1] = 3
ten[1, 2] = 0
ten[2, 0] = 0
ten[2, 1] = 0
ten[2, 2] = 1
ten1 = Arraypy([2, 3, 1]).to_tensor((-1, -1))
ten1[1, 1] = 2
ten1[1, 2] = 1
ten1[1, 3] = 0
ten1[2, 1] = 1
ten1[2, 2] = 3
ten1[2, 3] = 0
ten1[3, 1] = 0
ten1[3, 2] = 0
ten1[3, 3] = 1
assert diverg(X, var) == x2**3 + 3 * x3**2 + 1
assert diverg(X, var, g) == x2**3 + 3 * x3**2 + 1
assert diverg(X, var, ten) == x2**3 + 3 * x3**2 + 1
assert diverg(X, var, ten1) == x2**3 + 3 * x3**2 + 1
def test_grad_varlist():
x1, x2, x3 = symbols('x1 x2 x3')
f = x1**2 * x2 + sin(x2 * x3 - x2)
var1 = [x1, x2, x3]
res_ar1 = Arraypy([1, 3, 0])
res_ar1[0] = 2 * x1 * x2
res_ar1[1] = x1**2 + (x3 - 1) * cos(x2 * x3 - x2)
res_ar1[2] = x2 * cos(x2 * x3 - x2)
res_ten1 = res_ar1.to_tensor(1)
res_ar = Arraypy([1, 3, 0])
res_ar[0] = -x1**2 / 5 + 6 * x1 * x2 / 5 - (x3 - 1) * cos(x2 * x3 - x2) / 5
res_ar[1] = 2 * x1**2 / 5 - 2 * x1 * x2 / \
5 + cos(x2 * x3 - x2) * 2 * (x3 - 1) / 5
res_ar[2] = x2 * cos(x2 * x3 - x2)
res_ten = res_ar.to_tensor(1)
g = Matrix([[2, 1, 0], [1, 3, 0], [0, 0, 1]])
assert grad(f, var1, output_type='l') == [
2 * x1 * x2, x1**2 + (x3 - 1) * cos(x2 * x3 - x2), x2 *
cos(x2 * x3 - x2)]
assert isinstance(grad(f, var1, output_type='l'), list)
assert grad(f, var1) == [
2 * x1 * x2, x1**2 + (x3 - 1) * cos(x2 * x3 - x2), x2 *
cos(x2 * x3 - x2)]
assert isinstance(grad(f, var1), list)
assert grad(f, var1, output_type='a') == res_ar1
assert isinstance(grad(f, var1, output_type='t'), Arraypy)
assert grad(f, var1, output_type='t') == res_ten1
assert isinstance(grad(f, var1, output_type='t'), TensorArray)
assert grad(f, var1, output_type='t').type_pq == (1, 0)
assert str(
grad(
f,
var1,
g,
output_type='l')) == '[-x1**2/5 + 6*x1*x2/5 - (x3 - 1)*cos(x2*x3 - x2)/5, 2*x1**2/5 - 2*x1*x2/5 + 2*(x3 - 1)*cos(x2*x3 - x2)/5, x2*cos(x2*x3 - x2)]'
assert isinstance(grad(f, var1, g, output_type='l'), list)
assert grad(f, var1, g, output_type='a') == res_ar
assert isinstance(grad(f, var1, g, output_type='a'), Arraypy)
assert grad(f, var1, g, output_type='t') == res_ten
assert isinstance(grad(f, var1, g, output_type='t'), TensorArray)
assert grad(f, var1, g, output_type='t').type_pq == (1, 0)
def test_grad_gtnsr():
x1, x2, x3 = symbols('x1 x2 x3')
f = x1**2 * x2 + sin(x2 * x3 - x2)
var1 = [x1, x2, x3]
k1 = Arraypy([1, 3, 0]).to_tensor(1)
k1[0] = x1
k1[1] = x2
k1[2] = x3
# g задано tensor, индекс с 1 и var-list
a = Arraypy([2, 3, 1])
b = a.to_tensor((-1, -1))
b[1, 1] = 2
b[1, 2] = 1
b[1, 3] = 0
b[2, 1] = 1
b[2, 2] = 3
b[2, 3] = 0
b[3, 1] = 0
b[3, 2] = 0
b[3, 3] = 1
res_ar = Arraypy([1, 3, 1])
res_ar[1] = -x1**2 / 5 + 6 * x1 * x2 / 5 - (x3 - 1) * cos(x2 * x3 - x2) / 5
res_ar[2] = 2 * x1**2 / 5 - 2 * x1 * x2 / \
5 + cos(x2 * x3 - x2) * 2 * (x3 - 1) / 5
res_ar[3] = x2 * cos(x2 * x3 - x2)
res_ten = res_ar.to_tensor(1)
res_ar1 = Arraypy([1, 3, 0])
res_ar1[0] = 2 * x1 * x2
res_ar1[1] = x1**2 + (x3 - 1) * cos(x2 * x3 - x2)
res_ar1[2] = x2 * cos(x2 * x3 - x2)
assert str(
grad(
f,
var1,
b,
'l')) == '[-x1**2/5 + 6*x1*x2/5 - (x3 - 1)*cos(x2*x3 - x2)/5, 2*x1**2/5 - 2*x1*x2/5 + 2*(x3 - 1)*cos(x2*x3 - x2)/5, x2*cos(x2*x3 - x2)]'
assert isinstance(grad(f, var1, b, 'l'), list)
|
assert grad(f, var1, b, 'a') == res_ar
assert isinstance(grad(f, var1, b, 'a'), Arraypy)
assert grad(f, k1, output_type='a') == res_ar1
assert isinstance(grad(f, k1, output_type='a'), Arraypy)
assert grad(f, var1, b, 't') == res_ten
assert isinstance(grad(f, var1, b, 't'), TensorArray)
assert grad(f, var1, b, 't').type_pq == (1, 0)
|
assert grad(f, var1, b) == res_ten
assert isinstance(grad(f, var1, b, 't'), TensorArray)
assert grad(f, var1, b, 't').type_pq == (1, 0)
def test_grad_gm_vl():
x1, x2, x3 = symbols('x1 x2 x3')
f = x1**2 * x2 + sin(x2 * x3 - x2)
var1 = [x1, x2, x3]
g = Matrix([[2, 1, 0], [1, 3, 0], [0, 0, 1]])
k0 = Arraypy([1, 3, 1]).to_tensor(1)
k0[1] = x1
k0[2] = x2
k0[3] = x3
res_ar = Arraypy([1, 3, 0])
res_ar[0] = -x1**2 / 5 + 6 * x1 * x2 / 5 - (x3 - 1) * cos(x2 * x3 - x2) / 5
res_ar[1] = 2 * x1**2 / 5 - 2 * x1 * x2 / \
5 + cos(x2 * x3 - x2)
|
peap/djarzeit
|
djarzeit/wsgi.py
|
Python
|
mit
| 1,425 | 0.000702 |
"""
WSGI config for djarzeit project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's `
|
`runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will
|
have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "djarzeit.settings"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "djarzeit.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
DavidLutton/EngineeringProject
|
labtoolkit/SignalGenerator/AgilentN5182A.py
|
Python
|
mit
| 417 | 0 |
from .SCPISignalGenerator imp
|
ort SCPISignalGenerator
from .helper import SignalGenerator, amplitudelimiter
class AgilentN5182A(SCPISignalGenerator, SignalGenerator):
"""Agilent N5182A 100e3, 6e9.
.. figure:: images/SignalGenerator/AgilentN5182A.jpg
"""
def __init__(self, inst):
super().__init__(inst)
self.inst.read_termination = '\n'
|
self.inst.write_termination = '\n'
|
nikhilpanicker/SecureVault
|
tools/modified/androguard/core/binaries/idapipe.py
|
Python
|
gpl-3.0
| 6,602 | 0.010906 |
# This file is part of Androguard.
#
# Copyright (C) 2012, Anthony Desnos <desnos at t0t0.fr>
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LIC
|
ENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the
|
License.
from subprocess import Popen, PIPE, STDOUT
import os, sys
import xmlrpclib
import cPickle
class _Method(object):
def __init__(self, proxy, name):
self.proxy = proxy
self.name = name
def __call__(self, *args):
#print "CALL", self.name, args
z = getattr( self.proxy, self.name, None )
#print "SEND", repr(cPickle.dumps( args ) )
try:
if len(args) == 1:
ret = z( cPickle.dumps( args[0] ) )
else:
ret = z( cPickle.dumps( args ) )
#print "RECEIVE", repr(ret)
return cPickle.loads( ret )
except xmlrpclib.ProtocolError:
return []
class MyXMLRPC(object):
def __init__(self, proxy):
self.proxy = proxy
def __getattr__(self, name):
return _Method(self.proxy, name)
class BasicBlock(object):
def __init__(self, ins):
self.ins = ins
def show(self):
for i in self.ins:
print i
class Function(object):
def __init__(self, name, start_ea, instructions, information):
#print name, start_ea
self.name = name
self.start_ea = start_ea
self.information = information
self.basic_blocks = []
self.instructions = instructions
r = {}
idx = 0
for i in instructions:
r[ i[0] ] = idx
idx += 1
for i in information[0]:
try:
start = r[i[0]]
end = r[i[1]] + 1
self.basic_blocks.append( BasicBlock( instructions[start:end] ) )
except KeyError:
pass
def get_instructions(self):
return [ i for i in self.instructions ]
def run_ida(idapath, wrapper_init_path, binpath):
os.environ["TVHEADLESS"] = "1"
pid = os.fork()
if pid == 0:
wrapper_path = "-S" + wrapper_init_path
l = [ idapath, "-A", wrapper_path, binpath ]
print l
compile = Popen(l, stdout=open('/dev/null', 'w'), stderr=STDOUT)
stdout, stderr = compile.communicate()
# print stdout, stderr
sys.exit(0)
class IDAPipe(object):
def __init__(self, idapath, binpath, wrapper_init_path):
self.idapath = idapath
self.binpath = binpath
self.proxy = None
run_ida(self.idapath, self.binpath, wrapper_init_path)
while 1:
try:
self.proxy = xmlrpclib.ServerProxy("http://localhost:9000/")
self.proxy.is_connected()
break
except:
pass
#print self.proxy
self.proxy = MyXMLRPC( self.proxy )
def quit(self):
try:
self.proxy.quit()
except:
pass
def _build_functions(self, functions):
F = {}
for i in functions:
F[ i ] = Function( functions[i][0], i, functions[i][1:-1], functions[i][-1] )
return F
def get_quick_functions(self):
functions = self.get_raw()
return self._build_functions( functions )
def get_raw(self):
return self.proxy.get_raw()
def get_nb_functions(self):
return len(self.proxy.Functions())
def get_functions(self):
for function_ea in self.proxy.Functions():
self.get_function_addr( function_ea )
def get_function_name(self, name):
function_ea = self.proxy.get_function( name )
self.get_function_addr( function_ea )
def get_function_addr(self, function_ea):
if function_ea == -1:
return
f_start = function_ea
f_end = self.proxy.GetFunctionAttr(function_ea, 4) #FUNCATTR_END)
edges = set()
boundaries = set((f_start,))
for head in self.proxy.Heads(f_start, f_end):
if self.proxy.isCode( self.proxy.GetFlags( head ) ):
refs = self.proxy.CodeRefsFrom(head, 0)
refs = set(filter(lambda x: x>=f_start and x<=f_end, refs))
#print head, f_end, refs, self.proxy.GetMnem(head), self.proxy.GetOpnd(head, 0), self.proxy.GetOpnd(head, 1)
if refs:
next_head = self.proxy.NextHead(head, f_end)
if self.proxy.isFlow(self.proxy.GetFlags(next_head)):
refs.add(next_head)
# Update the boundaries found so far.
boundaries.update(refs)
# For each of the references found, and edge is
# created.
for r in refs:
# If the flow could also come from the address
# previous to the destination of the branching
# an edge is created.
if self.proxy.isFlow(self.proxy.GetFlags(r)):
edges.add((self.proxy.PrevHead(r, f_start), r))
edges.add((head, r))
#print edges, boundaries
# Let's build the list of (startEA, startEA) couples
# for each basic block
sorted_boundaries = sorted(boundaries, reverse = True)
end_addr = self.proxy.PrevHead(f_end, f_start)
bb_addr = []
for begin_addr in sorted_boundaries:
bb_addr.append((begin_addr, end_addr))
# search the next end_addr which could be
# farther than just the previous head
# if data are interlaced in the code
# WARNING: it assumes it won't epicly fail ;)
end_addr = self.proxy.PrevHead(begin_addr, f_start)
while not self.proxy.isCode(self.proxy.GetFlags(end_addr)):
end_addr = self.proxy.PrevHead(end_addr, f_start)
# And finally return the result
bb_addr.reverse()
#print bb_addr, sorted(edges)
def display_function(f):
print f, f.name, f.information
for i in f.basic_blocks:
print i
i.show()
|
elin-moco/ffclub
|
ffclub/person/migrations/0005_auto__add_field_person_education__add_field_person_birthday.py
|
Python
|
bsd-3-clause
| 5,302 | 0.007922 |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'P
|
erson.education'
db.add_column('person_person', 'education',
self.gf('django.db.models.fields.CharField')(default='', max_length=255, blank=True),
keep_default=False)
# Adding field 'Person.birthday'
db.add_column('person_person', 'birthday',
self.gf('django.db.models.fields.DateTimeField')(null=True),
keep_default=False)
def backwards(self, orm):
|
# Deleting field 'Person.education'
db.delete_column('person_person', 'education')
# Deleting field 'Person.birthday'
db.delete_column('person_person', 'birthday')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'person.person': {
'Meta': {'object_name': 'Person'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'birthday': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'education': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'fullname': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'gender': ('django.db.models.fields.CharField', [], {'default': "'unknown'", 'max_length': '7'}),
'nickname': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'occupation': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'normal'", 'max_length': '20'}),
'subscribing': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True', 'primary_key': 'True'})
}
}
complete_apps = ['person']
|
rickyrish/rickyblog
|
build/lib.linux-i686-2.7/publicaciones/urls.py
|
Python
|
gpl-2.0
| 226 | 0.00885 |
from django.conf.urls impor
|
t patterns, url
from publicaciones import views
urlpatterns = patterns('',
url(r'^$', views.index, name='index'),
|
url(r'^(?P<articulo_id>\d+)/$', views.ver_articulo, name='ver_articulo'),
)
|
calpaterson/recall
|
src/recall/people.py
|
Python
|
agpl-3.0
| 3,665 | 0.00191 |
# Recall is a program for storing bookmarks of different things
# Copyright (C) 2012 Cal Paterson
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import uuid
from bottle import Bottle, request, response, abort
import bcrypt
from recall.data import whitelist, blacklist
from recall import convenience as c
from recall import plugins, jobs, messages
app = Bottle()
app.install(plugins.exceptions)
app.install(plugins.ppjson)
app.install(plugins.auth)
app.install(plugins.cors)
app.error_handler = plugins.handler_dict
logger = c.logger("people")
@app.get("/")
def users():
abort(503, "Not yet implemented")
@app.get("/<who>/")
def user_(who):
try:
return whitelist(c.db().users.find_one({"email": who}), [
|
"email",
"firstName",
"pseudonym"
])
except TypeError:
logger.warn("Asked about {email}, but that is not a user".format(
email=who))
abort(404, "User not found")
@app.get("/<who>/self")
def _self(who, user):
if who != user["email"]:
response.status = 400
else:
return whitelist(user, [
|
"pseudonym",
"firstName",
"surname",
"email",
"private_email"])
@app.post("/<who>/")
def request_invite(who):
# FIXME: Don't allow the pseudonym "public"
user = whitelist(request.json, [
"pseudonym",
"firstName",
"surname",
"private_email",
"token",
])
if "private_email" not in user:
abort(400, "You must provide a private_email field")
user["email_key"] = str(uuid.uuid4())
user["registered"] = c.unixtime()
user["email"] = who
c.db().users.ensure_index("email", unique=True)
c.db().users.insert(user, safe=True)
response.status = 202
logger.info("{email} subscribed".format(email=who))
jobs.enqueue(messages.SendInvite(user))
@app.post("/<who>/<email_key>")
def verify_email(who, email_key):
if "RECALL_TEST_MODE" in c.settings or "RECALL_DEBUG_MODE" in c.settings:
salt = bcrypt.gensalt(1)
else:
salt = bcrypt.gensalt()
password_hash = bcrypt.hashpw(request.json["password"], salt)
spec = {"email_key": email_key, "verified": {"$exists": False}}
update = {"$set": {"password_hash": password_hash,
"verified": c.unixtime()}}
success = c.db().users.update(spec, update, safe=True)["updatedExisting"]
if not success:
if c.db().users.find_one({"email_key": email_key}):
logger.warn("{email} tried to verify a second time".format(email=who))
abort(403, "Already verified")
else:
logger.warn("Someone tried to verify with a key, but it doesn't exist")
abort(404, "Don't know that key")
user = c.db().users.find_one({"email_key": email_key})
response.status = 201
return blacklist(user, ["_id", "email_key", "password_hash"])
|
apagac/cfme_tests
|
sprout/appliances/migrations/0008_appliance_uuid.py
|
Python
|
gpl-2.0
| 493 | 0.002028 |
# -*- coding: utf-8 -*-
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
|
('appliances', '0007_provider_num_simultaneous_configuring'),
]
operations = [
migrations.AddField(
model_name='appliance',
name='uuid',
field=models.CharField(
help_text=b'UUID of the machine', max_length=36, null=True, blank=True
|
),
preserve_default=True,
),
]
|
yasharmaster/scancode-toolkit
|
src/packagedcode/phpcomposer.py
|
Python
|
apache-2.0
| 11,144 | 0.001884 |
#
# Copyright (c) 2015 nexB Inc. and others. All rights reserved.
# http://nexb.com and https://github.com/nexB/scancode-toolkit/
# The ScanCode software is licensed under the Apache License version 2.0.
# Data generated with ScanCode require an acknowledgment.
# ScanCode is a trademark of nexB Inc.
#
# You may not use this software except in compliance with the License.
# You may obtain a copy of the License at: http://apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#
# When you publish or redistribute any data created with ScanCode or any ScanCode
# derivative work, you must accompany this data with the following acknowledgment:
#
# Generated with ScanCode and provided on an "AS IS" BASIS, WITHOUT WARRANTIES
# OR CONDITIONS OF ANY KIND, either express or implied. No content created from
# ScanCode should be considered or used as legal advice. Consult an Attorney
# for any legal advice.
# ScanCode is a free software code scanning tool from nexB Inc. and others.
# Visit https://github.com/nexB/scancode-toolkit/ for support and download.
from __future__ import absolute_import
from __future__ import print_function
import codecs
import logging
import json
from collections import OrderedDict
from functools import partial
from commoncode import filetype
from commoncode import fileutils
from packagedcode import models
from packagedcode.utils import parse_repo_url
"""
Handle PHP composer packages, refer to https://getcomposer.org/
"""
logger = logging.getLogger(__name__)
# import sys
# logging.basicConfig(level=logging.DEBUG, stream=sys.stdout)
# logger.setLevel(logging.DEBUG)
class PHPComposerPackage(models.Package):
metafiles = ('composer.json')
filetypes = ('.json',)
mimetypes = ('application/json')
repo_types = (models.repo_phpcomposer,)
type = models.StringType(default='phpcomposer')
primary_language = models.StringType(default='PHP')
@classmethod
def recognize(cls, location):
return parse(location)
def is_phpcomposer_json(location):
return (filetype.is_file(location)
and fileutils.file_name(location).lower() == 'composer.json')
def parse(location):
"""
Return a Package object from a composer.json file or None.
"""
if not is_phpcomposer_json(location):
return
# mapping of top level composer.json items to the Package object field name
plain_fields = OrderedDict([
('name', 'name'),
('description', 'summary'),
('keywords', 'keywords'),
('version', 'version'),
('homepage', 'homepage_url'),
])
# mapping of top level composer.json items to a function accepting as arguments
# the composer.json element value and returning an iterable of key, values Package Object to update
field_mappers = OrderedDict([
('authors', author_mapper),
('license', licensing_mapper),
('require', dependencies_mapper),
('require-dev', dev_dependencies_mapper),
('repositories', repository_mapper),
('support', support_mapper),
])
with codecs.open(location, encoding='utf-8') as loc:
data = json.load(loc, object_pairs_hook=OrderedDict)
if not data.get('name') or not data.get('description'):
# a composer.json without name and description is not a usable PHP composer package
# name and description fields are required: https://getcomposer.org/doc/04-schema.md#name
return
package = PHPComposerPackage()
# a composer.json is at the root of a PHP composer package
base_dir = fileutils.parent_directory(location)
package.location = base_dir
package.metafile_locations = [location]
for source, target in plain_fields.items():
value = data.get(source)
if value:
if isinstance(value, basestring):
value = value.strip()
if value:
setattr(package, target, value)
for source, func in field_mappers.items():
logger.debug('parse: %(source)r, %(func)r' % locals())
value = data.get(source)
if value:
if isinstance(value, basestring):
value = value.strip()
if value:
func(value, package)
vendor_mapper(package) # Parse vendor from name value
return package
def licensing_mapper(licenses, package):
"""
Update package licensing and return package.
Licensing data structure has evolved over time and is a tad messy.
https://getcomposer.org/doc/04-schema.md#license
licenses is either:
- a string with:
- an SPDX id or expression { "license": "(LGPL-2.1 or GPL-3.0+)" }
- array:
"license": [
"LGPL-2.1",
"GPL-3.0+"
]
"""
if not licenses:
return package
if isinstance(licenses, basestring):
package.asserted_licenses.append(m
|
odels.AssertedLicense(license=licenses))
elif isinstance(licenses, list):
"""
"license": [
"LGPL-2.1",
|
"GPL-3.0+"
]
"""
for lic in licenses:
if isinstance(lic, basestring):
package.asserted_licenses.append(models.AssertedLicense(license=lic))
else:
# use the bare repr
if lic:
package.asserted_licenses.append(models.AssertedLicense(license=repr(lic)))
else:
# use the bare repr
package.asserted_licenses.append(models.AssertedLicense(license=repr(licenses)))
return package
def author_mapper(authors_content, package):
"""
Update package authors and return package.
https://getcomposer.org/doc/04-schema.md#authors
"""
authors = []
for name, email, url in parse_person(authors_content):
authors.append(models.Party(type=models.party_person, name=name, email=email, url=url))
package.authors = authors
return package
def support_mapper(support, package):
"""
Update support and bug tracking url.
https://getcomposer.org/doc/04-schema.md#support
"""
package.support_contacts = [support.get('email')]
package.bug_tracking_url = support.get('issues')
package.code_view_url = support.get('source')
return package
def vendor_mapper(package):
"""
Vender is part of name element.
https://getcomposer.org/doc/04-schema.md#name
"""
name = package.name
if name and '/' in name:
vendors = name.split('/')
if vendors[0]:
package.vendors = [models.Party(name=vendors[0])]
return package
def repository_mapper(repos, package):
"""
https://getcomposer.org/doc/04-schema.md#repositories
"repositories": [
{
"type": "composer",
"url": "http://packages.example.com"
},
{
"type": "composer",
"url": "https://packages.example.com",
"options": {
"ssl": {
"verify_peer": "true"
}
}
},
{
"type": "vcs",
"url": "https://github.com/Seldaek/monolog"
},
{
"type": "pear",
"url": "https://pear2.php.net"
},
{
"type": "package",
"package": {
"name": "smarty/smarty",
"version": "3.1.7",
"dist": {
"url": "http://www.smarty.net/files/Smarty-3.1.7.zip",
"type": "zip"
},
"source": {
"url": "https://smarty-php.googlecode.com/svn/",
"type": "svn",
"reference": "tags/Smarty_3_1_7/distribution/"
}
}
}
]
"""
if not repos:
return package
if isinstance(repos, basestring):
p
|
317070/kaggle-heart
|
configurations/je_ss_smcrps_nrmsc_dropoutput.py
|
Python
|
mit
| 8,198 | 0.009758 |
"""Single slice vgg with normalised scale.
"""
import functools
import lasagne as nn
import numpy as np
import theano
import theano.tensor as T
import data_loader
import deep_learning_layers
import layers
import preprocess
import postprocess
import objectives
import theano_printer
import updates
# Random params
rng = np.random
take_a_dump = False # dump a lot of data in a pkl-dump file. (for debugging)
dump_network_loaded_data = False # dump the outputs from the dataloader (for debugging)
# Memory usage scheme
caching = None
# Save and validation frequency
validate_every = 10
validate_train_set = True
save_every = 10
restart_from_save = False
dump_network_loaded_data = False
# Training (schedule) parameters
# - batch sizes
batch_size = 32
sunny_batch_size = 4
batches_per_chunk = 16
AV_SLICE_PER_PAT = 11
num_epochs_train = 50 * AV_SLICE_PER_PAT
# - learning rate and method
base_lr = .0001
learning_rate_schedule = {
0: base_lr,
4*num_epochs_train/5: base_lr/10,
}
momentum = 0.9
build_updates = updates.build_adam_updates
# Preprocessing stuff
cleaning_processes = [
preprocess.set_upside_up,]
cleaning_processes_post = [
functools.partial(preprocess.normalize_contrast_zmuv, z=2)]
augmentation_params = {
"rotation": (-16, 16),
"shear": (0, 0),
"translation": (-8, 8),
"flip_vert": (0, 1),
"roll_time": (0, 0),
"flip_time": (0, 0),
}
preprocess_train = preprocess.preprocess_normscale
preprocess_validation = functools.partial(preprocess_train, augment=False)
preprocess_test = preprocess_train
sunny_preprocess_train = preprocess.sunny_preprocess_with_augmentation
sunny_preprocess_validation = preprocess.sunny_preprocess_validation
sunny_preprocess_test = preprocess.sunny_preprocess_validation
# Data generators
create_train_gen = data_loader.generate_train_batch
create_eval_valid_gen = functools.partial(data_loader.generate_validation_batch, set="validation")
create_eval_train_gen = functools.partial(data_loader.generate_validation_batch, set="train")
create_test_gen = functools.partial(data_loader.generate_test_batch, set=["validation", "test"])
# Input sizes
image_size = 128
data_sizes = {
"sliced:data:singleslice:difference:middle": (batch_size, 29, image_size, image_size), # 30 time steps, 30 mri_slices, 100 px wide, 100 px high,
"sliced:data:singleslice:difference": (batch_size, 29, image_size, image_size), # 30 time steps, 30 mri_slices, 100 px wide, 100 px high,
"sliced:data:singleslice": (batch_size, 30, image_size, image_size), # 30 time steps, 30 mri_slices, 100 px wide, 100 px high,
"sliced:data:ax": (batch_size, 30, 15, image_size, image_size), # 30 time steps, 30 mri_slices, 100 px wide, 100 px high,
"sliced:data:shape": (batch_size, 2,),
"sunny": (sunny_batch_size, 1, image_size, image_size)
# TBC with the metadata
}
# Objective
l2_weight = 0.000
l2_weight_out = 0.000
def build_objective(interface_layers):
# l2 regu on certain layers
l2_penalty = nn.regularization.regularize_layer_params_weighted(
interface_layers["regularizable"], nn.regularization.l2)
# build objective
return objectives.KaggleObjective(interface_layers["outputs"], penalty=l2_penalty)
# Testing
postprocess = postprocess.postprocess
test_time_augmentations = 100 * AV_SLICE_PER_PAT # More augmentations since a we only use single slices
# Architecture
def build_model():
#################
# Regular model #
#################
input_size = data_sizes["sliced:data:singleslice"]
l0 = nn.layers.InputLayer(input_size)
l1a = nn.layers.dnn.Conv2DDNNLayer(l0, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=64, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l1b = nn.layers.dnn.Conv2DDNNLayer(l1a, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=64, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l1 = nn.layers.dnn.MaxPool2DDNNLayer(l1b, pool_size=(2,2), stride=(2,2))
l2a = nn.layers.dnn.Conv2DDNNLayer(l1, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=128, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l2b = nn.layers.dnn.Conv2DDNNLayer(l2a, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=128, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l2 = nn.layers.dnn.MaxPool2DDNNLayer(l2b, pool_size=(2,2), stride=(2,2))
l3a = nn.layers.dnn.Conv2DDNNLayer(l2, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=256, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l3b = nn.layers.dnn.Conv2DDNNLayer(l3a, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=256, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l3c = nn.layers.dnn.Conv2DDNNLayer(l3b, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=256, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l3 = nn.layers.dnn.MaxPool2DDNNLayer(l3c, pool_size=(2,2), stride=(2,2))
l4a = nn.layers.dnn.Conv2DDNNLayer(l3, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=512, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l4b = nn.layers.dnn.Conv2DDNNLayer(l4a, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=512, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l4c = nn.layers.dnn.Conv2DDNNLayer(l4b, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=512, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l4 = nn.layers.dnn.MaxPool2DDNNLayer(l4c, pool_size=(2,2), stride=(2,2))
l5a = nn.layers.dnn.Conv2DDNNLayer(l4, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=512, stride=(1,1), pad="same", nonlinearity=nn.non
|
linearities.rectify)
l5b = nn.layers.dnn.Conv2DDNNLayer(l5a, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=512, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify
|
)
l5c = nn.layers.dnn.Conv2DDNNLayer(l5b, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=512, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l5 = nn.layers.dnn.MaxPool2DDNNLayer(l5c, pool_size=(2,2), stride=(2,2))
# Systole Dense layers
ldsys1 = nn.layers.DenseLayer(l5, num_units=1024, W=nn.init.Orthogonal("relu"), b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.rectify)
ldsys1drop = nn.layers.dropout(ldsys1, p=0.5)
ldsys2 = nn.layers.DenseLayer(ldsys1drop, num_units=1024, W=nn.init.Orthogonal("relu"),b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.rectify)
ldsys2drop = nn.layers.dropout(ldsys2, p=0.5)
ldsys3 = nn.layers.DenseLayer(ldsys2drop, num_units=600, W=nn.init.Orthogonal("relu"), b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.softmax)
ldsys3drop = nn.layers.dropout(ldsys3, p=0.5) # dropout at the output might encourage adjacent neurons to correllate
l_systole = layers.CumSumLayer(ldsys3)
# Diastole Dense layers
lddia1 = nn.layers.DenseLayer(l5, num_units=1024, W=nn.init.Orthogonal("relu"), b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.rectify)
lddia1drop = nn.layers.dropout(lddia1, p=0.5)
lddia2 = nn.layers.DenseLayer(lddia1drop, num_units=1024, W=nn.init.Orthogonal("relu"),b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.rectify)
lddia2drop = nn.layers.dropout(lddia2, p=0.5)
lddia3 = nn.layers.DenseLayer(lddia2drop, num_units=600, W=nn.init.Orthogonal("relu"), b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.softmax)
lddia3drop = nn.layers.dropout(lddia3, p=0.5) # dropout at the output might encourage adjacent neurons to correllate
l_diastole = layers.CumSumLayer(lddia3drop)
return {
"inputs":{
"sliced:data:singleslice": l0
},
"outputs": {
"systole": l_systole,
"diastole": l_diastole,
},
"regularizable": {
ldsys1: l2_weight,
ldsys2: l2_weight,
ldsys3: l2_weight_out,
lddia1: l2_weight,
lddia2: l2_weight,
lddia3: l2_weight_out,
|
Geekly/framepy
|
pump.py
|
Python
|
gpl-2.0
| 68 | 0.014706 |
__author__ = 'ENG-5 USER'
from numpy import *
import nu
|
mpy as
|
np
|
richardingham/octopus
|
octopus/blocktopus/blocks/logic.py
|
Python
|
mit
| 2,889 | 0.039114 |
from ..workspace import Block
from twisted.internet import defer
from .variables import lexical_variable
import operator
class logic_null (Block):
def eval (self):
return defer.succeed(None)
class logic_boolean (Block):
def eval (self):
return defer.succeed(self.fields['BOOL'] == 'TRUE')
class logic_negate (Block):
outputType = bool
def eval (self):
def negate (result):
if result is None:
return None
return
|
result == False
self._complete = self.getInputValue('BOOL').addCallback(negate)
return self._comple
|
te
_operators_map = {
"EQ": operator.eq,
"NEQ": operator.ne,
"LT": operator.lt,
"LTE": operator.le,
"GT": operator.gt,
"GTE": operator.ge
}
def _compare (lhs, rhs, op_id):
if lhs is None or rhs is None:
return None
op = _operators_map[op_id]
return op(lhs, rhs)
# Emit a warning if bad op given
class logic_compare (Block):
outputType = bool
def eval (self):
lhs = self.getInputValue('A')
rhs = self.getInputValue('B')
op_id = self.fields['OP']
def _eval (results):
lhs, rhs = results
return _compare(lhs, rhs, op_id)
self._complete = defer.gatherResults([lhs, rhs]).addCallback(_eval)
return self._complete
class lexical_variable_compare (lexical_variable):
outputType = bool
def eval (self):
variable = self._getVariable()
if variable is None:
self.emitLogMessage(
"Unknown variable: " + str(self.getFieldValue('VAR')),
"error"
)
return defer.succeed(None)
value = self.getFieldValue('VALUE')
op_id = self.getFieldValue('OP')
unit = self.getFieldValue('UNIT', None)
if isinstance(unit, (int, float)):
value *= unit
return defer.succeed(_compare(variable.value, value, op_id))
class logic_operation (Block):
outputType = bool
def eval (self):
@defer.inlineCallbacks
def _run ():
op = self.fields['OP']
lhs = yield self.getInputValue('A')
if lhs is None:
return
if op == "AND":
if bool(lhs):
rhs = yield self.getInputValue('B')
if rhs is None:
return
defer.returnValue(bool(rhs))
else:
defer.returnValue(False)
elif op == "OR":
if bool(lhs):
defer.returnValue(True)
else:
rhs = yield self.getInputValue('B')
if rhs is None:
return
defer.returnValue(bool(rhs))
# Emit a warning
return
self._complete = _run()
return self._complete
class logic_ternary (Block):
# TODO: outputType of then and else should be the same.
# this is then the outputType of the logic_ternary block.
def eval (self):
@defer.inlineCallbacks
def _run ():
test = yield self.getInputValue('IF')
if test is None:
return
if bool(test):
result = yield self.getInputValue('THEN')
defer.returnValue(result)
else:
result = yield self.getInputValue('ELSE')
defer.returnValue(result)
self._complete = _run()
return self._complete
|
kenorb-contrib/BitTorrent
|
twisted/web/_version.py
|
Python
|
gpl-3.0
| 175 | 0 |
# This is an auto-generated file. Use admin/chang
|
e-versions to update.
from twisted.python import versions
version = versions.Version(__name__[:__name__.rfind(
|
'.')], 0, 6, 0)
|
ramwin1/environment_spider
|
weather/getCountryList.py
|
Python
|
gpl-2.0
| 1,285 | 0.012451 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'wangx'
import urllib2
from bs4 import BeautifulSoup
import getProvinceList
provinceList = getProvinceList.main()
global coun
coun=[]
def get(net):
result = []
try:
html = urllib2.urlopen(net,timeout=10).read()
except:
html=''
while not html:
html = urllib2.urlopen(net,timeout=10).read()
soup = BeautifulSoup(html)
temp = soup.body.find(class_='lqcontentBoxH').find(class_='contentboxTab').find(class_='contentboxTab1').find(class_='conMidtab').find_all(class_='conMidtab3')
p
|
rint len(temp)
for i in temp:
city = i.td.text
j = i.find_all('tr')
|
for k in j:
result.append((city,k.a.text,k.a.get('href')))
coun.append(k.a.text)
return result
def gettotal():
totalCountryList = []
for i in provinceList.keys():
net = provinceList[i]
temp = get(net)
for j in temp:
row = (i,)+j
totalCountryList.append(row)
file = open('totalCountryList','w')
text=''
text = str(totalCountryList)
file.write(text)
file.close()
def test():
test=[]
for i in provinces:
if i in test:
print i
test.append(i)
|
bslatkin/8-bits
|
appengine-mapreduce/python/test/mapreduce/output_writers_end_to_end_test.py
|
Python
|
apache-2.0
| 7,023 | 0.004129 |
#!/usr/bin/env python
#
# Copyright 2011 Google Inc. All Rights Reserved.
import unittest
from google.appengine.api import files
from google.appengine.ext import db
from mapreduce import control
from mapreduce import model
from mapreduce import output_writers
from mapreduce import test_support
from testlib import testutil
BLOBSTORE_WRITER_NAME = (output_writers.__name__ + "." +
output_writers.BlobstoreOutputWriter.__name__)
FILE_WRITER_NAME = (output_writers.__name__ + "." +
output_writers.FileOutputWriter.__name__)
class TestEntity(db.Model):
"""Test entity class."""
def test_handler_yield_key_str(entity):
"""Test handler which yields entity key."""
yield str(entity.key()) + "\n"
class FileOutputWriterEndToEndTest(testutil.HandlerTestBase):
"""End-to-end tests for FileOutputWriter using googlestore."""
def testSingleShard(self):
entity_count = 1000
for _ in range(entity_count):
TestEntity().put()
mapreduce_id = control.start_map(
"test_map",
__name__ + ".test_handler_yield_key_str",
"mapreduce.input_readers.DatastoreInputReader",
{
"entity_kind": __name__ + "." + TestEntity.__name__,
"filesystem": "gs",
"gs_bucket_name": "bucket"
},
shard_count=4,
base_path="/mapreduce_base_path",
output_writer_spec=FILE_WRITER_NAME)
test_support.execute_until_empty(self.taskqueue)
mapreduce_state = model.MapreduceState.get_by_job_id(mapreduce_id)
filenames = output_writers.FileOutputWriter.get_filenames(mapreduce_state)
self.assertEqual(1, len(filenames))
self.assertTrue(filenames[0].startswith("/gs/bucket/"))
with files.open(filenames[0], "r") as f:
data = f.read(10000000)
self.assertEquals(1000, len(data.strip().split("\n")))
def testDedicatedParams(self):
entity_count = 1000
for _ in range(entity_count):
TestEntity().put()
mapreduce_id = control.start_map(
"test_map",
__name__ + ".test_handler_yield_key_str",
"mapreduce.input_readers.DatastoreInputReader",
{
"input_reader": {
"entity_kind": __name__ + "." + TestEntity.__name__,
},
"output_writer": {
"filesystem": "gs",
"gs_bucket_name": "bucket",
},
},
shard_count=4,
base_path="/mapreduce_base_path",
output_writer_spec=FILE_WRITER_NAME)
test_support.execute_until_empty(self.taskqueue)
mapreduce_state = model.MapreduceState.get_by_job_id(mapreduce_id)
filenames = output_writers.FileOutputWriter.get_filenames(mapreduce_state)
self.assertEqual(1, len(filenames))
self.assertTrue(filenames[0].startswith("/gs/bucket/"))
with files.open(filenames[0], "r") as f:
data = f.read(10000000)
self.assertEquals(1000, len(data.strip().split("\n")))
def testMultipleShards(self):
entity_count = 1000
for _ in range(entity_count):
TestEntity().put()
mapreduce_id = control.start_map(
"test_map",
__name__ + ".test_handler_yield_key_str",
"mapreduce.input_readers.DatastoreInputReader",
{
"entity_kind": __name__ + "." + TestEntity.__name__,
"output_sharding": "input",
"filesystem": "gs",
},
shard_count=4,
base_path="/mapreduce_base_path",
output_writer_spec=BLOBSTORE_WRITER_NAME)
test_support.execute_until_empty(self.taskqueue)
mapreduce_state = model.MapreduceState.get_by_job_id(mapreduce_id)
filenames = output_writers.BlobstoreOutputWriter.get_filenames(
mapreduce_state)
self.assertEqual(4, len(filenames))
file_lengths = []
for filename in filenames:
self.assertTrue(filename.startswith("/blobstore/"))
self.assertFalse(filename.startswith("/blobstore/writable:"))
with files.open(filename, "r") as f:
data = f.read(10000000)
file_lengths.append(len(data.strip().split("\n")))
# these numbers are totally random and depend on our sharding,
# which is quite deterministic.
expected_lengths = [199, 210, 275, 316]
self.assertEqual(1000, sum(expected_lengths))
self.assertEquals(expected_lengths, file_lengths)
class BlobstoreOutputWriterEndToEndTest(testutil.HandlerTestBase):
"""End-to-end tests for BlobstoreOutputWriter.
BlobstoreOutputWriter isn't complex enough yet to do extensive
unit tests. Do end-to-end tests just to check that it works.
"""
def testSingleShard(self):
entity_count = 1000
for _ in range(entity_count):
TestEntity().put()
mapreduce_id = control.start_map(
"test_map",
__name__ + ".test_handler_yield_key_str",
"mapreduce.input_readers.DatastoreInputReader",
{
"entity_kind": __name__ + "." + TestEntity.__name__,
},
shard_count=4,
base_path="/mapreduce_base_path",
output_writer_spec=BLOBSTORE_WRITER_NAME)
test_support.execute_until_empty(self.taskqueue)
mapreduce_state = model.MapreduceState.get_by_job_id(mapreduce_id)
filenames = output_writers.BlobstoreOutputWriter.get_filenames(
mapreduce_state)
self.assertEqual(1, len(filenames))
blob_name = filenames[0]
self.assertTrue(blob_name.startswith("/blobstore/"))
self.assertFalse(blob_name.startswith("/blobstore/writable:"))
with files.open(blob_name, "r") as f:
data = f.read(10000000)
self.assertEquals(1000, len(data.strip().split("\n")))
def testMultipleShards(self):
entity_count = 1000
for _ in range(entity_count):
TestEntity().put()
mapreduce_id = control.start_map(
"test_map",
__name__ + ".test_handler_yield_key_str",
"mapreduce.input_readers.DatastoreInputReader",
{
"entity_kind": __name__ + "." + TestEntity.__name__,
"output_sharding": "input",
},
shard_count=4,
base_path="/mapreduce_base_path",
output_writer_spec=BLOBSTORE_WRITER_NAME)
test_support.execute_unti
|
l_empty(self.taskqueue)
mapreduce_state = model.MapreduceState.get_by_job_id(mapreduce_id)
filenames = output_writers.BlobstoreOutputWriter.get_filenames(
mapreduce_state)
self.assertEqual(4, len(filenames))
file_lengths = []
for filename in filenames:
s
|
elf.assertTrue(filename.startswith("/blobstore/"))
self.assertFalse(filename.startswith("/blobstore/writable:"))
with files.open(filename, "r") as f:
data = f.read(10000000)
file_lengths.append(len(data.strip().split("\n")))
# these numbers are totally random and depend on our sharding,
# which is quite deterministic.
expected_lengths = [199, 210, 275, 316]
self.assertEqual(1000, sum(expected_lengths))
self.assertEquals(expected_lengths, file_lengths)
if __name__ == "__main__":
unittest.main()
|
buxx/synergine
|
tests/src/event/test_actions.py
|
Python
|
apache-2.0
| 337 | 0.026706 |
from synergine.synergy.event.Action import Action
class A(Action):
_depend = []
class B(Action):
_depend = [A]
class C(Action):
_depend = [B]
class D(Action):
_depend = []
class F(Action):
_depend = [C]
class E(Action):
_depend = [B, F]
class G(
|
Action):
_depend =
|
[]
class H(Action):
_depend = [B]
|
tiregram/algo-E3FI
|
tp4/exo1.py
|
Python
|
gpl-3.0
| 1,042 | 0.028791 |
import fifo
import random
import time
def insertion_sort(elemToTry):
tab = []
for a in elemToTry:
tab.append(a);
place(tab , len(tab)-1)
return tab
def invertPlace(tableau,indiceOne,indiceTwo):
tableau[indiceTwo], tableau[indiceOne] = tableau[indiceOne],tableau[indiceTwo]
def place(tableau,indice):
while tableau[indice] < tableau[indice-1]:
if(indice-1 < 0 ):
return
invertPlace(tableau,indice-1,indice)
indice = indice - 1
#############################################################
def bucke
|
tSort(table, index = lambda a : a>>6):
tab = [None]
for a in table:
if len(tab)-1 < index(a):
tab = tab + [None] * (index(a) - len(tab)+1)
if tab[index(a)] == None:
tab[index(a)] = fifo.Deque(a)
else:
tab[index(a)].push_last(a)
tabret = []
for a in tab:
tabret = tabret + insertio
|
n_sort(a)
return tabret
|
npilon/planterbox
|
planterbox/util.py
|
Python
|
mit
| 236 | 0 |
def clean_dict_repr(mw):
"""Produce a repr()-like output o
|
f dict mw with ordered ke
|
ys"""
return '{' + \
', '.join('{k!r}: {v!r}'.format(k=k, v=v) for k, v in
sorted(mw.items())) +\
'}'
|
kwilliams-mo/iris
|
lib/iris/fileformats/grib/grib_phenom_translation.py
|
Python
|
gpl-3.0
| 12,009 | 0.000167 |
# (C) British Crown Copyright 2013, Met Office
#
# This file is part of Iris.
#
# Iris is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Iris is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Iris.
|
If not, see <http://www.gnu.org/licenses/>.
'''
Provide grib 1 and 2 phenomenon translations to + from CF terms.
This is done by wrapping '_grib_cf_map.py',
which is in a format provided by the metadata translation p
|
roject.
Currently supports only these ones:
* grib1 --> cf
* grib2 --> cf
* cf --> grib2
'''
import collections
import warnings
import numpy as np
from iris.fileformats.grib import _grib_cf_map as grcf
import iris.std_names
import iris.unit
class LookupTable(dict):
"""
Specialised dictionary object for making lookup tables.
Returns None for unknown keys (instead of raising exception).
Raises exception for any attempt to change an existing entry,
(but it is still possible to remove keys)
"""
def __init__(self, *args, **kwargs):
self._super = super(LookupTable, self)
self._super.__init__(*args, **kwargs)
def __getitem__(self, key):
if not key in self:
return None
return self._super.__getitem__(key)
def __setitem__(self, key, value):
if key in self and self[key] is not value:
raise KeyError('Attempted to set dict[{}] = {}, '
'but this is already set to {}.'.format(
key, value, self[key]))
self._super.__setitem__(key, value)
# Define namedtuples for keys+values of the Grib1 lookup table.
_Grib1ToCfKeyClass = collections.namedtuple(
'Grib1CfKey',
('table2_version', 'centre_number', 'param_number'))
# NOTE: this form is currently used for both Grib1 *and* Grib2
_GribToCfDataClass = collections.namedtuple(
'Grib1CfData',
('standard_name', 'long_name', 'units', 'set_height'))
# Create the grib1-to-cf lookup table.
def _make_grib1_cf_table():
""" Build the Grib1 to CF phenomenon translation table. """
table = LookupTable()
def _make_grib1_cf_entry(table2_version, centre_number, param_number,
standard_name, long_name, units, set_height=None):
"""
Check data, convert types and create a new _GRIB1_CF_TABLE key/value.
Note that set_height is an optional parameter. Used to denote
phenomena that imply a height definition (agl),
e.g. "2-metre tempererature".
"""
grib1_key = _Grib1ToCfKeyClass(table2_version=int(table2_version),
centre_number=int(centre_number),
param_number=int(param_number))
if standard_name is not None:
if standard_name not in iris.std_names.STD_NAMES:
warnings.warn('{} is not a recognised CF standard name '
'(skipping).'.format(standard_name))
return None
# convert units string to iris Unit (i.e. mainly, check it is good)
iris_units = iris.unit.Unit(units)
cf_data = _GribToCfDataClass(standard_name=standard_name,
long_name=long_name,
units=iris_units,
set_height=set_height)
return (grib1_key, cf_data)
# Interpret the imported Grib1-to-CF table.
for (grib1data, cfdata) in grcf.GRIB1Local_TO_CF.iteritems():
assert grib1data.edition == 1
association_entry = _make_grib1_cf_entry(
table2_version=grib1data.t2version,
centre_number=grib1data.centre,
param_number=grib1data.iParam,
standard_name=cfdata.standard_name,
long_name=cfdata.long_name,
units=cfdata.unit)
if association_entry is not None:
key, value = association_entry
table[key] = value
# Do the same for special Grib1 codes that include an implied height level.
for (grib1data, (cfdata, extra_dimcoord)) \
in grcf.GRIB1LocalConstrained_TO_CF.iteritems():
assert grib1data.edition == 1
if extra_dimcoord.standard_name != 'height':
raise ValueError('Got implied dimension coord of "{}", '
'currently can only handle "height".'.format(
extra_dimcoord.standard_name))
if extra_dimcoord.units != 'm':
raise ValueError('Got implied dimension units of "{}", '
'currently can only handle "m".'.format(
extra_dimcoord.units))
if len(extra_dimcoord.points) != 1:
raise ValueError('Implied dimension has {} points, '
'currently can only handle 1.'.format(
len(extra_dimcoord.points)))
association_entry = _make_grib1_cf_entry(
table2_version=int(grib1data.t2version),
centre_number=int(grib1data.centre),
param_number=int(grib1data.iParam),
standard_name=cfdata.standard_name,
long_name=cfdata.long_name,
units=cfdata.unit,
set_height=extra_dimcoord.points[0])
if association_entry is not None:
key, value = association_entry
table[key] = value
return table
_GRIB1_CF_TABLE = _make_grib1_cf_table()
# Define a namedtuple for the keys of the Grib2 lookup table.
_Grib2ToCfKeyClass = collections.namedtuple(
'Grib2CfKey',
('param_discipline', 'param_category', 'param_number'))
# Create the grib2-to-cf lookup table.
def _make_grib2_to_cf_table():
""" Build the Grib2 to CF phenomenon translation table. """
table = LookupTable()
def _make_grib2_cf_entry(param_discipline, param_category, param_number,
standard_name, long_name, units):
"""
Check data, convert types and make a _GRIB2_CF_TABLE key/value pair.
Note that set_height is an optional parameter. Used to denote
phenomena that imply a height definition (agl),
e.g. "2-metre tempererature".
"""
grib2_key = _Grib2ToCfKeyClass(param_discipline=int(param_discipline),
param_category=int(param_category),
param_number=int(param_number))
if standard_name is not None:
if standard_name not in iris.std_names.STD_NAMES:
warnings.warn('{} is not a recognised CF standard name '
'(skipping).'.format(standard_name))
return None
# convert units string to iris Unit (i.e. mainly, check it is good)
iris_units = iris.unit.Unit(units)
cf_data = _GribToCfDataClass(standard_name=standard_name,
long_name=long_name,
units=iris_units,
set_height=None)
return (grib2_key, cf_data)
# Interpret the grib2 info from grib_cf_map
for grib2data, cfdata in grcf.GRIB2_TO_CF.iteritems():
assert grib2data.edition == 2
association_entry = _make_grib2_cf_entry(
param_discipline=grib2data.discipline,
param_category=grib2data.category,
param_number=grib2data.number,
standard_name=cfdata.standard_name,
long_name=cfdata.long_name,
units=cfdata.unit)
if association_entry is not None:
key, value = association_entry
table[key] = value
return table
_GRIB2_CF_TABLE = _make_grib2_
|
purduerov/XX-Core
|
rov/movement/controls/PID_Tuner.py
|
Python
|
mit
| 174 | 0 |
#
|
Ill attempt to research and see the practicality of making a pid tuner
# possib
|
ly hard coding and using error
# or maybe using tensor flow
# but just an idea at the moment
|
Galithil/genologics_sql
|
tests/test_default.py
|
Python
|
mit
| 308 | 0.019481 |
import genolo
|
gics_sql.utils
from genologics_sql.tables import *
def test_connection():
session=genologics_sql.utils.get_session()
assert(session is not None)
def test_project_query():
session=genologics_sql.utils.get_session()
pj=session.query(Project).limit(1)
|
assert(pj is not None)
|
yoshiweb/keras-mnist
|
keras-mnist/mnist_cnn/mnist_cnn_train.py
|
Python
|
mit
| 2,369 | 0 |
'''Trains a simple convnet on the MNIST dataset.
Gets to 99.25% test accuracy after 12 epochs
(there is still a lot of margin for parameter tuning).
16 seconds per epoch on a GRID K520 GPU.
'''
from __future__ import print_function
import keras
from keras.datasets import mnist
from keras.models import Sequential
from keras.la
|
yers import Dense, Dropout, Flatten
from keras.layers import Conv2D, MaxPooling2D
from keras import backend as K
from keras.models import load_model
batch_size = 128
num_classes = 10
epochs = 12
# input image dimensions
img_rows, img_cols = 28, 28
# the data, shuffled and split between train and test sets
(x_train, y_train), (x_test, y_test) = mnist.load_data()
i
|
f K.image_data_format() == 'channels_first':
x_train = x_train.reshape(x_train.shape[0], 1, img_rows, img_cols)
x_test = x_test.reshape(x_test.shape[0], 1, img_rows, img_cols)
input_shape = (1, img_rows, img_cols)
else:
x_train = x_train.reshape(x_train.shape[0], img_rows, img_cols, 1)
x_test = x_test.reshape(x_test.shape[0], img_rows, img_cols, 1)
input_shape = (img_rows, img_cols, 1)
x_train = x_train.astype('float32')
x_test = x_test.astype('float32')
x_train /= 255
x_test /= 255
print('x_train shape:', x_train.shape)
print(x_train.shape[0], 'train samples')
print(x_test.shape[0], 'test samples')
# convert class vectors to binary class matrices
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)
model = Sequential()
model.add(Conv2D(32, kernel_size=(3, 3),
activation='relu',
input_shape=input_shape))
model.add(Conv2D(64, (3, 3), activation='relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Flatten())
model.add(Dense(128, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(num_classes, activation='softmax'))
model.compile(loss=keras.losses.categorical_crossentropy,
optimizer=keras.optimizers.Adadelta(),
metrics=['accuracy'])
model.fit(x_train, y_train,
batch_size=batch_size,
epochs=epochs,
verbose=1,
validation_data=(x_test, y_test))
score = model.evaluate(x_test, y_test, verbose=0)
print('Test loss:', score[0])
print('Test accuracy:', score[1])
# HDF5ファイルを作成
model.save('mnist_cnn_model.h5')
|
Rhilip/PT-help-server
|
modules/infogen/__init__.py
|
Python
|
mit
| 1,231 | 0.000812 |
#!/usr/bin/python3
# -*- coding: utf-8 -*-
# Copyright (c) 2017-2020 Rhilip <rhilipruan@gmail.com>
import time
from flask import Blueprint, request, jsonify, redirect
from app import cache
from .gen import Gen
getinfo_blueprint = Blueprint('infogen', __name__, url_prefix="/movieinfo")
docs_url = "https://github.com/Rhilip/PT-help/blob/master/modules/infogen/README.md"
def get_key(key):
ret = ""
if request.method == "POST":
ret = request.form[key]
elif request.method == "GET":
ret = request.args.get(key)
return ret
@getinfo_blueprint.route("/gen", methods=["GET"
|
, "POST"])
def gen():
url = get_key("url")
if url is None:
site = get_key('site')
|
sid = get_key('sid')
if site is not None and sid is not None:
url = {'site': site, 'sid': sid}
if url:
t0 = time.time()
@cache.memoize(timeout=86400)
def gen_data(uri):
return Gen(url=uri).gen()
nocache = get_key("nocache")
if nocache:
cache.delete_memoized(gen_data, url)
data = gen_data(url)
data["cost"] = time.time() - t0
return jsonify(data)
else:
return redirect(docs_url, code=301)
|
alxgu/ansible
|
lib/ansible/modules/cloud/azure/azure_rm_servicebusqueue.py
|
Python
|
gpl-3.0
| 12,400 | 0.002581 |
#!/usr/bin/python
#
# Copyright (c) 2018 Yuwei Zhou, <yuwzho@microsoft.com>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: azure_rm_servicebusqueue
version_added: "2.8"
short_description: Manage Azure Service Bus queue.
description:
- Create, update or delete an Azure Service Bus queue.
options:
resource_group:
description:
- name of resource group.
required: true
name:
description:
- name of the queue.
required: true
namespace:
description:
- Servicebus namespace name.
- A namespace is a scoping container for all messaging components.
- Multiple queues and topics can reside within a single namespace, and namespaces often serve as application containers.
required: true
state:
description:
- Assert the state of the queue. Use 'present' to create or update and
'absent' to delete.
default: present
choices:
- absent
- present
auto_delete_on_idle_in_seconds:
description:
- Time idle interval after which a queue is automatically deleted.
- The minimum duration is 5 minutes.
type: int
dead_lettering_on_message_expiration:
description:
- A value that indicates whether a queue has dead letter support when a message expires.
type: bool
default_message_time_to_live_seconds:
description:
- Default message timespan to live value.
- This is the duration after which the message expires, starting from when the message is sent to Service Bus.
- This is the default value used when TimeToLive is not set on a message itself.
type: int
enable_batched_operations:
description:
- Value that indicates whether server-side batched operations are enabled.
type: bool
enable_express:
description:
- Value that indicates whether Express Entities are enabled.
- An express topic or queue holds a message in memory temporarily before writing it to persistent storage.
type: bool
enable_partitioning:
description:
- A value that indicates whether the topic or queue is to be partitioned across multiple message brokers.
type: bool
forward_dead_lettered_messages_to:
description:
- Queue or topic name to forward the Dead Letter message for a queue.
forward_to:
description:
- Queue or topic name to forward the messages for a queue.
lock_duration_in_seconds:
description:
- Timespan duration of a peek-lock.
- The amount of time that the message is locked for other receivers.
- The maximum value for LockDuration is 5 minutes.
type: int
max_delivery_count:
description:
- he maximum delivery count.
- A message is automatically deadlettered after this number of deliveries.
type: int
max_size_in_mb:
description:
- The maximum size of the queue in megabytes, which is the size of memory allocated for the queue.
type: int
requires_duplicate_detection:
description:
- A value indicating if this queue or topic requires duplicate detection.
type: bool
duplicate_detection_time_in_seconds:
description:
- TimeSpan structure that defines the duration of the duplicate detection history.
type: int
requires_session:
description:
- A value that indicates whether the queue supports the concept of sessions.
type: bool
status:
description:
- Status of the entity.
choices:
- active
- disabled
- send_disabled
- receive_disabled
extends_documentation_fragment:
- azure
- azure_tags
author:
- "Yuwei Zhou (@yuwzho)"
'''
EXAMPLES = '''
- name: Create a queue
azure_rm_servicebusqueue:
name: subqueue
resource_group: myResourceGroup
namespace: bar
duplicate_detection_time_in_seconds: 600
'''
RETURN = '''
id:
description: Current state of the queue.
returned: success
type: str
'''
try:
from msrestazure.azure_exceptions import CloudError
except ImportError:
# This is handled in azure_rm_common
pass
from ansible.module_utils.azure_rm_common import AzureRMModuleBase
from ansible.module_utils.common.dict_transformations import _snake_to_camel, _camel_to_snake
from ansible.module_utils._text import to_native
from datetime import datetime, timedelta
duration_spec_map = dict(
default_message_time_to_live='default_message_time_to_live_seconds',
duplicate_detection_history_time_window='duplicate_detection_time_in_seconds',
auto_delete_on_idle='auto_delete_on_idle_in_seconds',
lock_duration='lock_duration_in_seconds'
)
sas_policy_spec = dict(
state=dict(type='str', default='present', choices=['present', 'absent']),
name=dict(type='str', required=True),
regenerate_key=dict(type='bool'),
rights=dict(type='str', choices=['manage', 'listen', 'send', 'listen_send'])
)
class AzureRMServiceBusQueue(AzureRMModuleBase):
def __init__(self):
self.module_arg_spec = dict(
resource_group=dict(type='str', required=True),
name=dict(type='str', required=True),
state=dict(type='str', default='present', choices=['present', 'absent']),
namespace=dict(type='str', required=True),
auto_delete_on_idle_in_seconds=dict(type='int'),
dead_letterin
|
g_on_message_expiration=dict(type='bool'),
default_message_time_to_live_seconds=dict(type='int'),
duplicate_detection_time_in_seconds=dict(type='int'),
enable_batched_operations=dict(type
|
='bool'),
enable_express=dict(type='bool'),
enable_partitioning=dict(type='bool'),
forward_dead_lettered_messages_to=dict(type='str'),
forward_to=dict(type='str'),
lock_duration_in_seconds=dict(type='int'),
max_delivery_count=dict(type='int'),
max_size_in_mb=dict(type='int'),
requires_duplicate_detection=dict(type='bool'),
requires_session=dict(type='bool'),
status=dict(type='str',
choices=['active', 'disabled', 'send_disabled', 'receive_disabled'])
)
self.resource_group = None
self.name = None
self.state = None
self.namespace = None
self.location = None
self.type = None
self.subscription_topic_name = None
self.auto_delete_on_idle_in_seconds = None
self.dead_lettering_on_message_expiration = None
self.default_message_time_to_live_seconds = None
self.enable_batched_operations = None
self.enable_express = None
self.enable_partitioning = None
self.forward_dead_lettered_messages_to = None
self.forward_to = None
self.lock_duration_in_seconds = None
self.max_delivery_count = None
self.max_size_in_mb = None
self.requires_duplicate_detection = None
self.status = None
self.results = dict(
changed=False,
id=None
)
super(AzureRMServiceBusQueue, self).__init__(self.module_arg_spec,
supports_check_mode=True)
def exec_module(self, **kwargs):
for key in list(self.module_arg_spec.keys()):
setattr(self, key, kwargs[key])
changed = False
original = self.get()
if self.state == 'present':
# Create the resource instance
params = dict(
|
partofthething/home-assistant
|
homeassistant/components/zha/core/const.py
|
Python
|
apache-2.0
| 10,279 | 0.000292 |
"""All constants related to the ZHA component."""
import enum
import logging
from typing import List
import bellows.zigbee.application
from zigpy.config import CONF_DEVICE_PATH # noqa: F401 # pylint: disable=unused-import
import zigpy_cc.zigbee.application
import zigpy_deconz.zigbee.application
import zigpy_xbee.zigbee.application
import zigpy_zigate.zigbee.application
import zigpy_znp.zigbee.application
from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR
from homeassistant.components.climate import DOMAIN as CLIMATE
from homeassistant.components.cover import DOMAIN as COVER
from homeassistant.components.device_tracker import DOMAIN as DEVICE_TRACKER
from homeassistant.components.fan import DOMAIN as FAN
from homeassistant.components.light import DOMAIN as LIGHT
from homeassistant.components.lock import DOMAIN as LOCK
from homeassistant.components.number import DOMAIN as NUMBER
from homeassistant.components.sensor import DOMAIN as SENSOR
from homeassistant.components.switch import DOMAIN as SWITCH
from .typing import CALLABLE_T
ATTR_ARGS = "args"
ATTR_ATTRIBUTE = "attribute"
ATTR_ATTRIBUTE_ID = "attribute_id"
ATTR_ATTRIBUTE_NAME = "attribute_name"
ATTR_AVAILABLE = "available"
ATTR_CLUSTER_ID = "cluster_id"
ATTR_CLUSTER_TYPE = "cluster_type"
ATTR_COMMAND_TYPE = "command_type"
ATTR_DEVICE_IEEE = "device_ieee"
ATTR_DEVICE_TYPE = "device_type"
ATTR_ENDPOINTS = "endpoints"
ATTR_ENDPOINT_NAMES = "endpoint_names"
ATTR_ENDPOINT_ID = "endpoint_id"
ATTR_IEEE = "ieee"
ATTR_IN_CLUSTERS = "in_clusters"
ATTR_LAST_SEEN = "last_seen"
ATTR_LEVEL = "level"
ATTR_LQI = "lqi"
ATTR_MANUFACTURER = "manufacturer"
ATTR_MANUFACTURER_CODE = "manufacturer_code"
ATTR_MEMBERS = "members"
ATTR_MODEL = "model"
ATTR_NEIGHBORS = "neighbors"
ATTR_NODE_DESCRIPTOR = "node_descriptor"
ATTR_NWK = "nwk"
ATTR_OUT_CLUSTERS = "out_clusters"
ATTR_POWER_SOURCE = "power_source"
ATTR_PROFILE_ID = "profile_id"
ATTR_QUIRK_APPLIED = "quirk_applied"
ATTR_QUIRK_CLASS = "quirk_class"
ATTR_RSSI = "rssi"
ATTR_SIGNATURE = "signature"
ATTR_TYPE = "type"
ATTR_UNIQUE_ID = "unique_id"
ATTR_VALUE = "value"
ATTR_WARNING_DEVICE_DURATION = "duration"
ATTR_WARNING_DEVICE_MODE = "mode"
ATTR_WARNING_DEVICE_STROBE = "strobe"
ATTR_WARNING_DEVICE_STROBE_DUTY_CYCLE = "duty_cycle"
ATTR_WARNING_DEVICE_STROBE_INTENSITY = "intensity"
BAUD_RATES = [2400, 4800, 9600, 14400, 19200, 38400, 57600, 115200, 128000, 256000]
BINDINGS = "bindings"
CHANNEL_ACCELEROMETER = "accelerometer"
CHANNEL_ANALOG_INPUT = "analog_input"
CHANNEL_ANALOG_OUTPUT = "analog_output"
CHANNEL_ATTRIBUTE = "attribute"
CHANNEL_BASIC = "basic"
CHANNEL_COLOR = "light_color"
CHANNEL_COVER = "window_covering"
CHANNEL_DOORLOCK = "door_lock"
CHANNEL_ELECTRICAL_MEASUREMENT = "electrical_measurement"
CHANNEL_EVENT_RELAY = "event_relay"
CHANNEL_FAN = "fan"
CHANNEL_HUMIDITY = "humidity"
CHANNEL_IAS_WD = "ias_wd"
CHANNEL_IDENTIFY = "identify"
CHANNEL_ILLUMINANCE = "illuminance"
CHANNEL_LEVEL = ATTR_LEVEL
CHANNEL_MULTISTATE_INPUT = "multistate_input"
CHANNEL_OCCUPANCY = "occupancy"
CHANNEL_ON_OFF = "on_off"
CHANNEL_POWER_CONFIGURATION = "power"
CHANNEL_PRESSURE = "pressure"
CHANNEL_SHADE = "shade"
CHANNEL_SMARTENERGY_METERING = "smartenergy_metering"
CHANNEL_TEMPERATURE = "temperature"
CHANNEL_THERMOSTAT = "thermostat"
CHANNEL_ZDO = "zdo"
CHANNEL_ZONE = ZONE = "ias_zone"
CLUSTER_COMMAND_SERVER = "server"
CLUSTER_COMMANDS_CLIENT = "client_commands"
CLUSTER_COMMANDS_SERVER = "server_commands"
CLUSTER_TYPE_IN = "in"
CLUSTER_TYPE_OUT = "out"
PLATFORMS = (
BINARY_SENSOR,
CLIMATE,
COVER,
DEVICE_TRACKER,
FAN,
LIGHT,
LOCK,
NUMBER,
SENSOR,
SWITCH,
)
CONF_BAUDRATE = "baudrate"
CONF_DATABASE = "database_path"
CONF_DEVICE_CONFIG = "device_config"
CONF_ENABLE_QUIRKS = "enable_quirks"
CONF_FLOWCONTROL = "flow_control"
CONF_RADIO_TYPE = "radio_type"
CONF_USB_PATH = "usb_path"
CONF_ZIGPY = "zigpy_config"
DATA_DEVICE_CONFIG = "zha_device_config"
DATA_ZHA = "zha"
DATA_ZHA_CONFIG = "config"
DATA_ZHA_BRIDGE_ID = "zha_bridge_id"
DATA_ZHA_CORE_EVENTS = "zha_core_events"
DATA_ZHA_DISPATCHERS = "zha_dispatchers"
DATA_ZHA_GATEWAY = "zha_gateway"
DATA_ZHA_PLATFORM_LOADED = "platform_loaded"
DEBUG_COMP_BELLOWS = "bellows"
DEBUG_COMP_ZHA = "homeassistant.components.zha"
DEBUG_COMP_ZIGPY = "zigpy"
DEBUG_COMP_ZIGPY_CC = "zigpy_cc"
DEBUG_COMP_ZIGPY_DECONZ = "zigpy_deconz"
DEBUG_COMP_ZIGPY_XBEE = "zigpy_xbee"
DEBUG_COMP_ZIGPY_ZIGATE = "zigpy_zigate"
DEBUG_LEVEL_CURRENT = "current"
DEBUG_LEVEL_ORIGINAL = "original"
DEBUG_LEVELS = {
DEBUG_COMP_BELLOWS: logging.DEBUG,
DEBUG_COMP_ZHA: logging.DEBUG,
DEBUG_COMP_ZIGPY: logging.DEBUG,
DEBUG_COMP_ZIGPY_CC: logging.DEBUG,
DEBUG_COMP_ZIGPY_DECONZ: logging.DEBUG,
DEBUG_COMP_ZIGPY_XBEE: logging.DEBUG,
DEBUG_COMP_ZIGPY_ZIGATE: logging.DEBUG,
}
DEBUG_RELAY_LOGGERS = [DEBUG_COMP_ZHA, DEBUG_COMP_ZIGPY]
DEFAULT_RADIO_TYPE = "ezsp"
DEFAULT_BAUDRATE = 57600
DEFAULT_DATABASE_NAME = "zigbee.db"
DEVICE_PAIRING_STATUS = "pairing_status"
DISCOVERY_KEY = "zha_discovery_info"
DOMAIN = "zha"
GROUP_ID = "group_id"
GROUP_IDS = "group_ids"
GROUP_NAME = "group_name"
MFG_CLUSTER_ID_START = 0xFC00
POWER_MAINS_POWERED = "Mains"
POWER_BATTERY_OR_UNKNOWN = "Battery or Unknown"
class RadioType(enum.Enum):
# pylint: disable=invalid-name
"""Possible options for radio type."""
znp = (
"ZNP = Texas Instruments Z-Stack ZNP protocol: CC253x, CC26x2, CC13x2",
zigpy_znp.zigbee.application.ControllerApplication,
)
ezsp = (
"EZSP = Silicon Labs EmberZNet protocol: Elelabs, HUSBZB-1, Telegesis",
bellows.zigbee.application.ControllerApplication,
)
deconz = (
"deCONZ = dresden elektronik deCONZ protocol: ConBee I/II, RaspBee I/II",
zigpy_deconz.zigbee.application.ControllerApplication,
)
ti_cc = (
"Legacy TI_CC = Texas Instruments Z-Stack ZNP protocol: CC253x, CC26x2, CC13x2",
zigpy_cc.zigbee.application.ControllerApplication,
)
zigate = (
"ZiGate = ZiGate Zigbee radios: PiZiGate, ZiGate USB-TTL, ZiGate WiFi",
zigpy_zigate.zigbee.application.ControllerApplication,
)
xbee = (
"XBee = Digi XBee Zigbee radios: Digi XBee Series 2, 2C, 3",
zigpy_xbee.zigbee.application.ControllerApplication,
)
@classmethod
def list(cls) -> List[str]:
"""Return a list of descriptions."""
return [e.description for e in RadioType]
@classmethod
def get_by_description(cls, description: str) -> str:
"""Get radio by description."""
for radio in cls:
if radio.description == description:
return radio.name
raise ValueError
def __init__(self, description: str, controller_cls: CALLABLE_T):
"""Init instance."""
self._desc = description
self._ctrl_cls = controller_cls
@property
def controller(self) -> CALLABLE_T:
"""Return controller class."""
return self._ctrl_cls
@property
def description(self) -> str:
"""Return radio type description."""
return self._desc
REPORT_CONFIG_MAX_INT = 900
REPORT_CONFIG_MAX_INT_BATTERY_SAVE = 10800
REPORT_CONFIG_MIN_INT = 30
REPORT_CONFIG_MIN_INT_ASAP = 1
REPORT_CONFIG_MIN_INT_IMMEDIATE = 0
REPORT_CONFIG_MIN_INT_OP = 5
REPORT_CONFIG_MIN_INT_BATTERY_SAVE = 3600
REPORT_CONFIG_RPT_CHANGE = 1
REPORT_CONFIG_DEFAULT = (
REPORT_CONFIG_MIN_INT,
REPORT_CONFIG_MAX_INT,
REPORT_CONFIG_RPT_CHANGE,
)
REPORT_CONFIG_ASAP = (
REPORT_CONFIG_MIN_INT_ASAP,
REPORT_CONFIG_MAX_INT,
REPORT_CONFIG_RPT_CHANGE,
)
REPORT_CONFIG_BATTERY_SAVE = (
REPORT_CONFIG_MIN_INT_BATTERY_SAVE,
REPORT_C
|
ONFIG_MAX_INT_BATTERY_SAVE,
REPORT_CONFIG_RPT_CHANGE,
)
REPORT_CONFIG_IMMEDIATE = (
REPORT_CONFIG_MIN_INT_IMMEDIATE,
REPORT_CONFIG_MAX_INT,
REPORT_CONFIG_RPT_CHANGE,
)
REPORT_CONFIG_OP = (
REPORT_CONFIG_MIN_INT_OP,
REPORT_CONFIG_MAX_INT,
REPORT_CONFIG_RPT_CHANGE,
)
SENSOR_ACCELERATION = "acceleration
|
"
SENSOR_BATTERY = "battery"
SENSOR_ELECTRICAL_MEASUREMENT = CHANNEL_ELECTRICAL_MEASUREMENT
SENSOR_GENERIC = "generic"
SENSOR_HUMIDITY = CHANNEL
|
all-of-us/raw-data-repository
|
rdr_service/lib_fhir/fhirclient_4_0_0/models/composition.py
|
Python
|
bsd-3-clause
| 13,565 | 0.007667 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated from FHIR 4.0.0-a53ec6ee1b (http://hl7.org/fhir/StructureDefinition/Composition) on 2019-05-07.
# 2019, SMART Health IT.
from . import domainresource
class Composition(domainresource.DomainResource):
""" A set of resources composed into a single coherent clinical statement with
clinical attestation.
A set of healthcare-related information that is assembled together into a
single logical package that provides a single coherent statement of
meaning, establishes its own context and that has clinical attestation with
regard to who is making the statement. A Composition defines the structure
and narrative content necessary for a document. However, a Composition
alone does not constitute a document. Rather, the Composition must be the
first entry in a Bundle where Bundle.type=document, and any other resources
referenced from Composition must be included as subsequent entries in the
Bundle (for example Patient, Practitioner, Encounter, etc.).
"""
resource_type = "Composition"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.attester = None
""" Attests to accuracy of composition.
List of `CompositionAttester` items (represented as `dict` in JSON). """
self.author = None
""" Who and/or what authored the composition.
List of `FHIRReference` items (represented as `dict` in JSON). """
self.category = None
""" Categorization of Composition.
List of `CodeableConcept` items (represented as `dict` in JSON). """
self.confidentiality = None
""" As defined by affinity domain.
Type `str`. """
self.custodian = None
""" Organization which maintains the composition.
Type `FHIRReference` (represented as `dict` in JSON). """
self.date = None
""" Composition editing time.
Type `FHIRDate` (represented as `str` in JSON).
|
"""
self.encounter = None
""" Context of the Composition.
Type `FHIRReference` (represented as `dict` in JSON). """
self.event = None
""" The clinical service(s) being documented.
List of `CompositionEvent` items (represented as `dict` in JSON). """
self.identifier = None
""" Version-independent identifier for the Composition.
Type `Identifier` (represented as `dict` in JSON). """
|
self.relatesTo = None
""" Relationships to other compositions/documents.
List of `CompositionRelatesTo` items (represented as `dict` in JSON). """
self.section = None
""" Composition is broken into sections.
List of `CompositionSection` items (represented as `dict` in JSON). """
self.status = None
""" preliminary | final | amended | entered-in-error.
Type `str`. """
self.subject = None
""" Who and/or what the composition is about.
Type `FHIRReference` (represented as `dict` in JSON). """
self.title = None
""" Human Readable name/title.
Type `str`. """
self.type = None
""" Kind of composition (LOINC if possible).
Type `CodeableConcept` (represented as `dict` in JSON). """
super(Composition, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(Composition, self).elementProperties()
js.extend([
("attester", "attester", CompositionAttester, True, None, False),
("author", "author", fhirreference.FHIRReference, True, None, True),
("category", "category", codeableconcept.CodeableConcept, True, None, False),
("confidentiality", "confidentiality", str, False, None, False),
("custodian", "custodian", fhirreference.FHIRReference, False, None, False),
("date", "date", fhirdate.FHIRDate, False, None, True),
("encounter", "encounter", fhirreference.FHIRReference, False, None, False),
("event", "event", CompositionEvent, True, None, False),
("identifier", "identifier", identifier.Identifier, False, None, False),
("relatesTo", "relatesTo", CompositionRelatesTo, True, None, False),
("section", "section", CompositionSection, True, None, False),
("status", "status", str, False, None, True),
("subject", "subject", fhirreference.FHIRReference, False, None, False),
("title", "title", str, False, None, True),
("type", "type", codeableconcept.CodeableConcept, False, None, True),
])
return js
from . import backboneelement
class CompositionAttester(backboneelement.BackboneElement):
""" Attests to accuracy of composition.
A participant who has attested to the accuracy of the composition/document.
"""
resource_type = "CompositionAttester"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.mode = None
""" personal | professional | legal | official.
Type `str`. """
self.party = None
""" Who attested the composition.
Type `FHIRReference` (represented as `dict` in JSON). """
self.time = None
""" When the composition was attested.
Type `FHIRDate` (represented as `str` in JSON). """
super(CompositionAttester, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(CompositionAttester, self).elementProperties()
js.extend([
("mode", "mode", str, False, None, True),
("party", "party", fhirreference.FHIRReference, False, None, False),
("time", "time", fhirdate.FHIRDate, False, None, False),
])
return js
class CompositionEvent(backboneelement.BackboneElement):
""" The clinical service(s) being documented.
The clinical service, such as a colonoscopy or an appendectomy, being
documented.
"""
resource_type = "CompositionEvent"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.code = None
""" Code(s) that apply to the event being documented.
List of `CodeableConcept` items (represented as `dict` in JSON). """
self.detail = None
""" The event(s) being documented.
List of `FHIRReference` items (represented as `dict` in JSON). """
self.period = None
""" The period covered by the documentation.
Type `Period` (represented as `dict` in JSON). """
super(CompositionEvent, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(CompositionEvent, self).elementProperties()
js.extend([
("code", "code", codeableconcept.CodeableConcept, True, None, False),
("detail", "detail", fhirreference.FHIRReference, True, None, False),
("period", "period", period.Period, False, None, False),
]
|
edwardbadboy/vdsm-ubuntu
|
tests/fileUtilTests.py
|
Python
|
gpl-2.0
| 8,296 | 0 |
#
# Copyright 2012 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Refer to the README and COPYING files for full details of the license
#
import tempfile
import os
import storage.fileUtils as fileUtils
import testValidation
from testrunner import VdsmTestCase as TestCaseBase
class DirectFileTests(TestCaseBase):
@classmethod
def getConfigTemplate(cls):
return {}
def testRead(self):
data = """Vestibulum. Libero leo nostra, pede nunc eu. Pellentesque
platea lacus morbi nisl montes ve. Ac. A, consectetuer erat, justo eu.
Elementum et, phasellus fames et rutrum donec magnis eu bibendum. Arcu,
ante aliquam ipsum ut facilisis ad."""
srcFd, srcPath = tempfile.mkstemp()
f = os.fdopen(srcFd, "wb")
f.write(data)
f.flush()
f.close()
with fileUtils.open_ex(srcPath, "dr") as f:
self.assertEquals(f.read(), data)
os.unlink(srcPath)
def testSeekRead(self):
data = """
Habitasse ipsum at fusce litora metus, placerat dui purus aenean ante,
ve. Pede hymenaeos ut primis cum, rhoncus, lectus, nunc. Vestibulum
curabitur vitae etiam magna auctor velit, mi tempus vivamus orci eros.
Pellentesque curabitur risus fermentum eget. Elementum curae, donec
nisl egestas ve, ut odio eu nunc elit felis primis id. Ridiculus metus
morbi nulla erat, amet nisi. Amet ligula nisi, id penatibus risus in.
Purus velit duis. Aenean eget, pellentesque eu rhoncus arcu et
consectetuer laoreet, augue nisi dictum lacinia urna. Fermentum
torquent. Ut interdum vivamus duis. Felis consequat nec pede. Orci
sollicitudin parturient orci felis. Enim, diam velit sapien
condimentum fames semper nibh. Integer at, egestas pede consectetuer
ac augue pharetra dolor non placerat quisque id cursus ultricies.
Ligula mi senectus sit. Habitasse. Integer sollicitudin dapibus cum
quam.
"""
self.assertTrue(len(data) > 512)
srcFd, srcPath = tempfile.mkstemp()
f = os.fdopen(srcFd, "wb")
f.write(data)
f.flush()
f.close()
with fileUtils.open_ex(srcPath, "dr") as f:
f.seek(512)
self.assertEquals(f.read(), data[512:])
os.unlink(srcPath)
def testWrite(self):
data = """In ut non platea egestas, quisque magnis nunc nostra ac etiam
suscipit nec integer sociosqu. Fermentum. Ante orci luctus, ipsum
ullamcorper enim arcu class neque inceptos class. Ut, sagittis
torquent, commodo facilisi."""
srcFd, srcPath = tempfile.mkstemp()
os.close(srcFd)
with fileUtils.open_ex(srcPath, "dw") as f:
f.write(data)
with fileUtils.open_ex(srcPath, "r") as f:
self.assertEquals(f.read(len(data)), data)
os.unlink(srcPath)
def testSmallWrites(self):
data = """
Aliquet habitasse tellus. Fringilla faucibus tortor parturient
consectetuer sodales, venenatis platea habitant. Hendrerit nostra nunc
odio. Primis porttitor consequat enim ridiculus. Taciti nascetur,
nibh, convallis sit, cum dis mi. Nonummy justo odio cursus, ac hac
curabitur nibh. Tellus. M
|
ontes, ut taci
|
ti orci ridiculus facilisis
nunc. Donec. Risus adipiscing habitant donec vehicula non vitae class,
porta vitae senectus. Nascetur felis laoreet integer, tortor ligula.
Pellentesque vestibulum cras nostra. Ut sollicitudin posuere, per
accumsan curabitur id, nisi fermentum vel, eget netus tristique per,
donec, curabitur senectus ut fusce. A. Mauris fringilla senectus et
eni facilisis magna inceptos eu, cursus habitant fringilla neque.
Nibh. Elit facilisis sed, elit, nostra ve torquent dictumst, aenean
sapien quam, habitasse in. Eu tempus aptent, diam, nisi risus
pharetra, ac, condimentum orci, consequat mollis. Cras lacus augue
ultrices proin fermentum nibh sed urna. Ve ipsum ultrices curae,
feugiat faucibus proin et elementum vivamus, lectus. Torquent. Tempus
facilisi. Cras suspendisse euismod consectetuer ornare nostra. Fusce
amet cum amet diam.
"""
self.assertTrue(len(data) > 512)
srcFd, srcPath = tempfile.mkstemp()
os.close(srcFd)
with fileUtils.open_ex(srcPath, "dw") as f:
f.write(data[:512])
f.write(data[512:])
with fileUtils.open_ex(srcPath, "r") as f:
self.assertEquals(f.read(len(data)), data)
os.unlink(srcPath)
def testUpdateRead(self):
data = """
Aliquet. Aliquam eni ac nullam iaculis cras ante, adipiscing. Enim
eget egestas pretium. Ultricies. Urna cubilia in, hac. Curabitur.
Nibh. Purus ridiculus natoque sed id. Feugiat lacus quam, arcu
maecenas nec egestas. Hendrerit duis nunc eget dis lacus porttitor per
sodales class diam condimentum quisque condimentum nisi ligula.
Dapibus blandit arcu nam non ac feugiat diam, dictumst. Ante eget
fames eu penatibus in, porta semper accumsan adipiscing tellus in
sagittis. Est parturient parturient mi fermentum commodo, per
fermentum. Quis duis velit at quam risus mi. Facilisi id fames.
Turpis, conubia rhoncus. Id. Elit eni tellus gravida, ut, erat morbi.
Euismod, enim a ante vestibulum nibh. Curae curae primis vulputate
adipiscing arcu ipsum suspendisse quam hymenaeos primis accumsan
vestibulum.
"""
self.assertTrue(len(data) > 512)
srcFd, srcPath = tempfile.mkstemp()
os.close(srcFd)
with fileUtils.open_ex(srcPath, "wd") as f:
f.write(data[:512])
with fileUtils.open_ex(srcPath, "r+d") as f:
f.seek(512)
f.write(data[512:])
with fileUtils.open_ex(srcPath, "r") as f:
self.assertEquals(f.read(len(data)), data)
os.unlink(srcPath)
class ChownTests(TestCaseBase):
@testValidation.ValidateRunningAsRoot
def test(self):
targetId = 666
srcFd, srcPath = tempfile.mkstemp()
os.close(srcFd)
fileUtils.chown(srcPath, targetId, targetId)
stat = os.stat(srcPath)
self.assertTrue(stat.st_uid == stat.st_gid == targetId)
os.unlink(srcPath)
@testValidation.ValidateRunningAsRoot
def testNames(self):
# I convert to some id because I have no
# idea what users are defined and what
# there IDs are apart from root
tmpId = 666
srcFd, srcPath = tempfile.mkstemp()
os.close(srcFd)
fileUtils.chown(srcPath, tmpId, tmpId)
stat = os.stat(srcPath)
self.assertTrue(stat.st_uid == stat.st_gid == tmpId)
fileUtils.chown(srcPath, "root", "root")
stat = os.stat(srcPath)
self.assertTrue(stat.st_uid == stat.st_gid == 0)
class CopyUserModeToGroupTests(TestCaseBase):
MODE_MASK = 0777
# format: initialMode, expectedMode
modesList = [
(0770, 0770), (0700, 0770), (0750, 0770), (0650, 0660),
]
def testCopyUserModeToGroup(self):
fd, path = tempfile.mkstemp()
try:
os.close(fd)
for initialMode, expectedMode in self.modesList:
os.chmod(path, initialMode)
fileUtils.copyUserModeToGroup(path)
self.assertEquals(os.stat(path).st_
|
fishtown-analytics/dbt
|
core/dbt/contracts/graph/compiled.py
|
Python
|
apache-2.0
| 7,700 | 0 |
from dbt.contracts.graph.parsed import (
HasTestMetadata,
ParsedNode,
ParsedAnalysisNode,
ParsedDataTestNode,
ParsedHookNode,
ParsedModelNode,
ParsedResource,
ParsedRPCNode,
ParsedSchemaTestNode,
ParsedSeedNode,
ParsedSnapshotNode,
ParsedSourceDefinition,
SeedConfig,
TestConfig,
)
from dbt.node_types import NodeType
from dbt.contracts.util import Replaceable
from dbt.exceptions import RuntimeException
from hologram import JsonSchemaMixin
from dataclasses import dataclass, field
import sqlparse # type: ignore
from typing import Optional, List, Union, Dict, Type
@dataclass
class InjectedCTE(JsonSchemaMixin, Replaceable):
id: str
sql: str
# for some frustrating reason, we can't subclass from ParsedNode directly,
# or typing.Union will flatten CompiledNode+ParsedNode into just ParsedNode.
# TODO: understand that issue and come up with some way for these two to share
# logic
@dataclass
class CompiledNode(ParsedNode):
compiled: bool = False
compiled_sql: Optional[str] = None
extra_ctes_injected: bool = False
extra_ctes: List[InjectedCTE] = field(default_factory=list)
injected_sql: Optional[str] = None
def prepend_ctes(self, prepended_ctes: List[InjectedCTE]):
self.extra_ctes_injected = True
self.extra_ctes = prepended_ctes
if self.compiled_sql is None:
raise RuntimeException(
'Cannot prepend ctes to an unparsed node', self
)
self.injected_sql = _inject_ctes_into_sql(
self.compiled_sql,
prepended_ctes,
)
self.validate(self.to_dict())
def set_cte(self, cte_id: str, sql: str):
"""This is the equivalent of what self.extra_ctes[cte_id] = sql would
do if extra_ctes were an OrderedDict
"""
for cte in self.extra_ctes:
if cte.id == cte_id:
cte.sql = sql
break
else:
self.extra_ctes.append(InjectedCTE(id=cte_id, sql=sql))
@dataclass
class CompiledAnalysisNode(CompiledNode):
resource_type: NodeType = field(metadata={'restrict': [NodeType.Analysis]})
@dataclass
class CompiledHookNode(CompiledNode):
resource_type: NodeType = field(
metadata={'restrict': [NodeType.Operation]}
)
index: Optional[int] = None
@dataclass
class CompiledModelNode(CompiledNode):
resource_type: NodeType = field(metadata={'restrict': [NodeType.Model]})
@dataclass
class CompiledRPCNode(CompiledNode):
resource_type: NodeType = field(metadata={'restrict': [NodeType.RPCCall]})
@dataclass
class CompiledSeedNode(CompiledNode):
resource_type: NodeType = field(metadata={'restrict': [NodeType.Seed]})
config: SeedConfig = field(default_factory=SeedConfig)
@property
def empty(self):
""" Seeds are never empty"""
return False
@dataclass
class CompiledSnapshotNode(CompiledNode):
resource_type: NodeType = field(metadata={'restrict': [NodeType.Snapshot]})
@dataclass
class CompiledDat
|
aTestNode(CompiledNode):
|
resource_type: NodeType = field(metadata={'restrict': [NodeType.Test]})
config: TestConfig = field(default_factory=TestConfig)
@dataclass
class CompiledSchemaTestNode(CompiledNode, HasTestMetadata):
resource_type: NodeType = field(metadata={'restrict': [NodeType.Test]})
column_name: Optional[str] = None
config: TestConfig = field(default_factory=TestConfig)
CompiledTestNode = Union[CompiledDataTestNode, CompiledSchemaTestNode]
def _inject_ctes_into_sql(sql: str, ctes: List[InjectedCTE]) -> str:
"""
`ctes` is a list of InjectedCTEs like:
[
InjectedCTE(
id="cte_id_1",
sql="__dbt__CTE__ephemeral as (select * from table)",
),
InjectedCTE(
id="cte_id_2",
sql="__dbt__CTE__events as (select id, type from events)",
),
]
Given `sql` like:
"with internal_cte as (select * from sessions)
select * from internal_cte"
This will spit out:
"with __dbt__CTE__ephemeral as (select * from table),
__dbt__CTE__events as (select id, type from events),
with internal_cte as (select * from sessions)
select * from internal_cte"
(Whitespace enhanced for readability.)
"""
if len(ctes) == 0:
return sql
parsed_stmts = sqlparse.parse(sql)
parsed = parsed_stmts[0]
with_stmt = None
for token in parsed.tokens:
if token.is_keyword and token.normalized == 'WITH':
with_stmt = token
break
if with_stmt is None:
# no with stmt, add one, and inject CTEs right at the beginning
first_token = parsed.token_first()
with_stmt = sqlparse.sql.Token(sqlparse.tokens.Keyword, 'with')
parsed.insert_before(first_token, with_stmt)
else:
# stmt exists, add a comma (which will come after injected CTEs)
trailing_comma = sqlparse.sql.Token(sqlparse.tokens.Punctuation, ',')
parsed.insert_after(with_stmt, trailing_comma)
token = sqlparse.sql.Token(
sqlparse.tokens.Keyword,
", ".join(c.sql for c in ctes)
)
parsed.insert_after(with_stmt, token)
return str(parsed)
PARSED_TYPES: Dict[Type[CompiledNode], Type[ParsedResource]] = {
CompiledAnalysisNode: ParsedAnalysisNode,
CompiledModelNode: ParsedModelNode,
CompiledHookNode: ParsedHookNode,
CompiledRPCNode: ParsedRPCNode,
CompiledSeedNode: ParsedSeedNode,
CompiledSnapshotNode: ParsedSnapshotNode,
CompiledDataTestNode: ParsedDataTestNode,
CompiledSchemaTestNode: ParsedSchemaTestNode,
}
COMPILED_TYPES: Dict[Type[ParsedResource], Type[CompiledNode]] = {
ParsedAnalysisNode: CompiledAnalysisNode,
ParsedModelNode: CompiledModelNode,
ParsedHookNode: CompiledHookNode,
ParsedRPCNode: CompiledRPCNode,
ParsedSeedNode: CompiledSeedNode,
ParsedSnapshotNode: CompiledSnapshotNode,
ParsedDataTestNode: CompiledDataTestNode,
ParsedSchemaTestNode: CompiledSchemaTestNode,
}
# for some types, the compiled type is the parsed type, so make this easy
CompiledType = Union[Type[CompiledNode], Type[ParsedResource]]
CompiledResource = Union[ParsedResource, CompiledNode]
def compiled_type_for(parsed: ParsedNode) -> CompiledType:
if type(parsed) in COMPILED_TYPES:
return COMPILED_TYPES[type(parsed)]
else:
return type(parsed)
def parsed_instance_for(compiled: CompiledNode) -> ParsedResource:
cls = PARSED_TYPES.get(type(compiled))
if cls is None:
# how???
raise ValueError('invalid resource_type: {}'
.format(compiled.resource_type))
# validate=False to allow extra keys from compiling
return cls.from_dict(compiled.to_dict(), validate=False)
NonSourceCompiledNode = Union[
CompiledAnalysisNode,
CompiledDataTestNode,
CompiledModelNode,
CompiledHookNode,
CompiledRPCNode,
CompiledSchemaTestNode,
CompiledSeedNode,
CompiledSnapshotNode,
]
NonSourceParsedNode = Union[
ParsedAnalysisNode,
ParsedDataTestNode,
ParsedHookNode,
ParsedModelNode,
ParsedRPCNode,
ParsedSchemaTestNode,
ParsedSeedNode,
ParsedSnapshotNode,
]
# This is anything that can be in manifest.nodes.
NonSourceNode = Union[
NonSourceCompiledNode,
NonSourceParsedNode,
]
# We allow either parsed or compiled nodes, or parsed sources, as some
# 'compile()' calls in the runner actually just return the original parsed
# node they were given.
CompileResultNode = Union[
NonSourceNode,
ParsedSourceDefinition,
]
|
nathankrueger/ncmd
|
ncmd_server.py
|
Python
|
gpl-2.0
| 4,429 | 0.031836 |
#!/usr/bin/python
import socket
import os
import time
import shutil
import sys
import re
import datetime
import argparse
# NCMD Libs
import ncmd_print as np
from ncmd_print import MessageLevel as MessageLevel
import ncmd_commands as ncmds
import ncmd_fileops as nfops
MAX_TRANSFER_BYTES=2048
QUIT_CMD = "quit now"
HOST = ""
PORT = 10123
ROOT_DIR_PATH = "/share/CACHEDEV1_DATA"
# Set up the server socket
def bindServerSocket(port):
server_sock = None
try:
server_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
server_sock.bind((HOST, port))
np.print_msg("Successfully bound server socket to port:{0}".format(PORT), MessageLevel.INFO)
except Exception as err:
np.print_msg("Failed to bind server socket to port:{0}".format(PORT), MessageLevel.ERROR)
server_sock = None
return server_sock
# Accept incoming socket connections
def acceptConnection(server_sock):
server_sock.listen(1)
conn, addr = server_sock.accept()
return (conn, addr)
# Validate a path against the server mount
def validatePath(path, server_mnt):
result = False
# Paths beginning with the server mount are considered 'valid'
if path.find(server_mnt) == 0:
result = True
return result
# Validate source / destination paths
def validatePaths(paths, server_mnt):
result = True
for path in paths:
if not validatePath(path, server_mnt):
result = False
break
return result
# Deal with generating the appropriate response for a command
def processResponse(ncmd, success):
nresp = ''
if ncmds.getCommandBlock(ncmd):
if success:
nresp = ncmds.genCmdSuccessResp(ncmd)
else:
nresp = ncmds.genCmdFailureResp(ncmd)
else:
pass # No response for non-blocking
return nresp
# Handle the current command string -- the actual file operations occur here
def processCmd(ncmd, args):
quit = False
cmd_success = True
np.print_msg("Received command: {0}".format(ncmd), MessageLevel.INFO)
dest = ncmds.getCommandDest(ncmd)
srcs = ncmds.getCommandSrcs(ncmd)
if ncmds.isQuitSequence(ncmd):
quit = True
else:
if args.validate_server_mount:
srcs_valid = validatePaths(srcs, args.validate_server_mount)
dest_valid = validatePath(dest, args.validate_server_mount)
cmd_success = srcs_valid and dest_valid
# Only try and conduct file operations when validation is disabled,
# or if validation is enabled, and it passes.
if cmd_success:
if ncmds.isMove(ncmd):
for src in srcs:
if not nfops.move(src, dest):
cmd_success = False
elif ncmds.isCopy(ncmd):
for src in srcs:
if not nfops.copy(src, dest):
cmd_success = False
elif ncmds.is
|
Remove(ncmd):
# The naming here isn't ideal, but this code gets the job done!
for src in srcs:
if not nfops.remove(src):
cmd_success = False
if not nfops.remove(dest):
cmd_success = False
return quit, cmd_success
# Deal with the current connection, getting, sending, and closing
def processConnection(conn, args):
ncmd = conn.recv(ncm
|
ds.MAX_CMD_SIZE)
quit, cmd_success = processCmd(ncmd, args)
resp = processResponse(ncmd, cmd_success)
if len(resp) > 0:
try:
conn.send(resp)
except Exception as err:
np.print_msg(msg, MessageLevel.ERROR)
conn.close()
return quit
def getArgs():
parser = argparse.ArgumentParser(description='Copy, move, remove quickly on a remotely mounted folder.')
parser.add_argument('--port', type=int, help='Specify a custom port.')
parser.add_argument('--validate_server_mount', type=str, help='Specify a mount on the server to validate incoming paths against.')
return parser.parse_args()
def main():
# Get the port
args = getArgs()
server_port = PORT
if args.port:
server_port = args.port
# Bind the sever socket
server_sock = bindServerSocket(server_port)
if server_sock:
while True:
conn = None
try:
conn, addr = acceptConnection(server_sock)
np.print_msg("Successfully connected to client: {0}:{1}".format(addr[0], PORT), MessageLevel.INFO)
except socket.error as msg:
np.print_msg(msg, MessageLevel.ERROR)
conn = None
if conn:
quit = processConnection(conn, args)
if quit:
np.print_msg("Server shutdown requested @ {0}...".format(datetime.datetime.now()), MessageLevel.INFO)
break
# Keep this at the end for safety!
if server_sock:
server_sock.close()
if __name__ == '__main__':
main()
|
chris-statzer/knuckle-python
|
game/game_state.py
|
Python
|
mit
| 339 | 0 |
i
|
mport knuckle
class GameState(knuckle.State):
def on_keyup(self, e):
pass
def on_keydown(se
|
lf, e):
if e == 'Escape':
self.window.pop_state()
def on_draw(self):
self.window.clear()
self.batch.draw()
self.window.flip()
def __str__(self):
return 'GameState()'
|
SheffieldML/GPyOpt
|
GPyOpt/core/task/cost.py
|
Python
|
bsd-3-clause
| 2,686 | 0.00484 |
# Copyright (c) 2016, the GPyOpt Authors
# Licensed under the BSD 3-clause license (see LICENSE.txt)
from ...models import GPModel
import numpy as np
class CostModel(object):
"""
Class to handle the cost of evaluating the function.
param cost_withGradients: function that returns the cost of evaluating the function and its gradient. By default
no cost is used. Options are:
- cost_withGradients is some pre-defined cost function. Should return numpy array as outputs.
- cost_withGradients = 'evaluation_time'.
.. Note:: if cost_withGradients = 'evaluation time' the evaluation time of the function is used to model a GP whose
mean is used as cost.
"""
def __init__(self, cost_withGradients):
super(CostModel, self).__init__()
self.cost_type = cost_withGradients
# --- Set-up evaluation cost
if self.cost_type is None:
self.cost_withGradients = constant_cost_withGradients
self.cost_type = 'Constant cost'
elif self.cost_type == 'evaluation_time':
self.cost_model = GPModel()
self.cost_withGradients = self._cost_gp_withGradients
self.num_updates = 0
else:
self.cost_withGradients = cost_withGradients
self.cost_type = 'User defined cost'
def _cost_gp(self,x):
"""
Predicts the time cost of evaluating the function at x.
"""
m, _, _, _ = self.cost_model.predict_withGradients(x)
return np.exp(m)
def _cost_gp_withGradients(self,x):
"""
Predicts the time cost and its gradient of evaluating the function at x.
"""
m, _, dmdx, _= self.cost_model.predict_withGradients(x)
return np.exp(m), np.exp(m)*dmdx
def update_cost_model(self, x, cost_x):
"""
Updates the GP used to handle the cost.
param x: input of the GP for the cost model.
param x_cost: values of the time cost at the input locations.
"""
if self.cost_type == 'evaluation_time':
cost_evals = np.log(np.atleast_2d(np.asarray(cost_x)).T)
if self.num_updates == 0:
X_all = x
costs_all = cost_evals
else:
X_all = np.vstack((self.cost_model.model.X,x))
costs_all = np.vstack((self.cost_mode
|
l.model.Y,cost_evals))
self.num_updates += 1
self.cost_model.updateModel(X_all, costs_all, None, Non
|
e)
def constant_cost_withGradients(x):
"""
Constant cost function used by default: cost = 1, d_cost = 0.
"""
return np.ones(x.shape[0])[:,None], np.zeros(x.shape)
|
firstflamingo/python_utilities
|
markup.py
|
Python
|
apache-2.0
| 13,400 | 0.007687 |
# coding=utf-8
#
# Copyright (c) 2011-2015 First Flamingo Enterprise B.V.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# markup.py
# firstflamingo/python_utilities
#
# Created by Berend Schotanus on 23-Nov-2011.
#
from google.appengine.api import users
import logging
import xml.sax
# ====== Generic XML Classes ===========================================================================================
class XMLElement:
OPEN_TEMPLATE = '<%s>'
CLOSE_TEMPLATE = '</%s>'
SELFCLOSING_TEMPLATE = '<%s/>'
ATTRIBUTE_TEMPLATE = '%s="%s"'
def __init__(self, type, attributes=None, content=None):
self.type = type
if attributes == None:
self.attributes = {}
else:
self.attributes = attributes
if content == None:
self.content = []
else:
self.content = content
def set_attribute(self, key, value):
self.attributes[key] = value
def set_time(self, timeStamp):
self.set_attribute('time', rfc3339String(timeStamp))
def add(self, newContent):
self.content.append(newContent)
def write(self, depth=0, lf=False):
if depth > 10: raise Exception('exceeds max recurse depth %d' % depth)
list = [self.type]
for key, value in self.attributes.items():
list.append(XMLElement.ATTRIBUTE_TEMPLATE % (key, value))
attributed_type = ' '.join(list)
list = []
if self.content:
list.append(XMLElement.OPEN_TEMPLATE % attributed_type)
for element in self.content:
try:
theString = element.write(depth + 1, lf=lf)
list.append(theString)
except AttributeError:
list.append(unicode(element))
list.append(XMLElement.CLOSE_TEMPLATE % self.type)
if lf:
joinString = '\n' + depth * ' '
else:
joinString = ''
return joinString.join(list)
else:
return XMLElement.SELFCLOSING_TEMPLATE % attributed_type
class XMLDocument:
def __init__(self, name):
self.root = XMLElement(name)
def doctype(self):
return '<?xml version="1.0" encoding="UTF-8"?>'
def write(self, lf=False):
if lf:
joinString = '\n'
else:
joinString = ''
return joinString.join([self.doctype(), self.root.write(lf=lf)])
# ====== Functions creating XML elements ===============================================================================
def element_with_id(name, id):
return XMLElement(name, {'id': id})
def element_with_content(name, content):
return XMLElement(name, content=[content])
# ====== XML Parser ====================================================================================================
class XMLImporter(xml.sax.handler.ContentHandler):
"""
xml.sax ContentHandler, intended to be subclassed
Compares an existing data set with imported xml data, creates a dictionary with changed objects
and a dictionary with objects that don't appear in the xml.
The actual reading of the data will be done in a subclass implementation of start_xml_element and end_xml_element
Fetched data must be (temporarily) stored in attributes of the Importer
Results must be saved in endDocument()
The following methods must be implemented in subclasses:
active_xml_tags()
existing_objects_dictionary()
key_for_current_object()
create_new_object(key)
start_xml_element(name, attrs)
end_xml_element(name)
update_object(existing_object)
"""
data = None
changes = False
old_objects = None
updated_objects = None
new_objects = None
def startDocument(self):
self.old_objects = self.existing_objects_dictionary()
self.updated_objects = {}
self.new_objects = {}
def endDocument(self):
self.save_objects()
def startElement(self, name, attrs):
self.data = []
self.start_xml_element(name, attrs)
def endElement(self, name):
if name in self.active_xml_tags():
key = self.key_for_current_object()
if key is not None:
current_object = self.pop_from_old_objects(key)
if not current_object:
current_object = self.create_new_object(key)
self.changes = False
self.update_object(current_object, name)
self.new_objects[key] = current_object
if self.changes:
self.updated_objects[key] = current_object
self.end_xml_element(name)
def characters(self, string):
self.data.append(string)
def pop_from_old_objects(self, key):
current_object = self.old_objects.get(key)
if current_object:
del self.old_objects[key]
else:
current_object = self.new_objects.get(key)
return current_object
def active_xml_tags(self):
"""
Provides the name of the xml element that encapsulates the objects that must be imported.
Must be overwritten in subclasses
"""
return None
def existing_objects_dictionary(self):
"""
Provides a dictionary with the objects that will be updated by the import.
Must be overwritten in subclasses
"""
return {}
def key_for_current_object(self):
"""
Provides the key to store the current object. If 'None' is returned the current object will be ignored.
Must be overwritten in subclasses
"""
return None
def create_new_object(self, key):
"""
Provides a new blank object, to be filled with the current import.
Must be overwritten in subclasses
"""
return None
def start_xml_element(self, name, attrs):
"""
Gives subclasses the opportunity to read data from the xml element
"""
pass
def end_xml_element(self, name):
"""
Gives subclasses the opportunity to read data from the xml element
"""
pass
def update_object(self, existing_object, name):
"""
Gives subclasses the opportunity to apply the imported data upon an existing (or newly created) object.
If changes are applied, self.changes must be set to True, for the changes te be saved.
Must be overwritten in subclasses
"""
pass
def save_objects(self):
"""
Gives subclasses the opportunity to
|
save the imported objects.
Must be overwritten in subclasses
"""
pass
# ====== Generic HTML Classes ==========================================================================================
class HTMLDocument(XMLDocument):
def __init__(self, title, language='en', charset='UTF-8'):
XMLDocument.__init__(self, 'html')
self.h
|
ead = XMLElement('head')
self.head.add(title_tag(title))
self.head.add(meta('charset', charset))
self.root.add(self.head)
self.body = XMLElement('body')
self.root.add(self.body)
self.root.set_attribute('lang', language)
def doctype(self):
return '<!doctype html>'
class HTMLTable():
def __init__(self, name, columnTitles):
self.name = name
self.width = len(columnTitles)
self.titles = columnTitles
self.rows = []
def set_title(self, key, name):
self.titles[key] = name
def add_row(self):
|
zegnus/self-driving-car-machine-learning
|
p13-final-project/ros/src/twist_controller/dbw_test.py
|
Python
|
mit
| 3,850 | 0.002857 |
#!/usr/bin/env python
import os
import csv
import rospy
from std_msgs.msg import Bool
from dbw_mkz_msgs.msg import ThrottleCmd, SteeringCmd, BrakeCmd, SteeringReport
'''
You can use this file to test your DBW code against a bag recorded with a reference implementation.
The bag can be found at https://s3-us-west-1.amazonaws.com/udacity-selfdrivingcar/files/reference.bag.zip
To use the downloaded bag file, rename it to 'dbw_test.rosbag.bag' and place it in the CarND-Capstone/data folder.
Then with roscore running, you can then use roslaunch with the dbw_test.launch file found in
<project_repo>/ros/src/twist_controller/launch.
This file will produce 3 csv files which you can process to figure out how your DBW node is
performing on various commands.
`/actual/*` are commands from the recorded bag while `/vehicle/*` are the output of your node.
'''
class DBWTestNode(object):
def __init__(self):
rospy.init_node('dbw_test_node')
rospy.Subscriber('/vehicle/steering_cmd', SteeringCmd, self.steer_cb)
rospy.Subscriber('/vehicle/throttle_cmd', ThrottleCmd, self.throttle_cb)
rospy.Subscriber('/vehicle/brake_cmd', BrakeCmd, self.brake_cb)
rospy.Subscriber('/actual/steering_cmd', SteeringCmd, self.actual_steer_cb)
rospy.Subscriber('/actual/throttle_cmd', ThrottleCmd, self.actual_throttle_cb)
rospy.Subscriber('/actual/brake_cmd', BrakeCmd, self.actual_brake_cb)
rospy.Subscriber('/vehicle/dbw_enabled', Bool, self.dbw_enabled_cb)
self.steer = self.throttle = self.brake = None
self.steer_data = []
self.throttle_data = []
self.brake_data = []
self.dbw_enabled = False
base_path = os.path.dirname(os.path.abspath(__file__))
self.steerfile = os.path.join(base_path, 'steers.csv')
self.throttlefile = os.path.join(base_path, 'throttles.csv')
self.brakefile = os.path.join(base_path, 'brakes.csv')
self.loop()
def loop(self):
rate = rospy.Rate(10) # 10Hz
while not rospy.is_shutdown():
rate.sleep()
fieldnames = ['actual', 'proposed']
with open(self.steerfile, 'w') as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writeheader()
writer.writerows(self.steer_data)
with open(self.throttlefile, 'w') as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writeheader()
writer.writerows(self.throttle_data)
with open(self.brakefile, 'w') as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writeheader()
writer.writerows(self.brake_data)
def dbw_enabled_cb(self, msg):
|
self.dbw_enabled = msg.data
def steer_cb(self, msg):
self.steer = msg.steering_wheel_angle_cmd
def throttle_cb(self, msg):
self.throt
|
tle = msg.pedal_cmd
def brake_cb(self, msg):
self.brake = msg.pedal_cmd
def actual_steer_cb(self, msg):
if self.dbw_enabled and self.steer is not None:
self.steer_data.append({'actual': msg.steering_wheel_angle_cmd,
'proposed': self.steer})
self.steer = None
def actual_throttle_cb(self, msg):
if self.dbw_enabled and self.throttle is not None:
self.throttle_data.append({'actual': msg.pedal_cmd,
'proposed': self.throttle})
self.throttle = None
def actual_brake_cb(self, msg):
if self.dbw_enabled and self.brake is not None:
self.brake_data.append({'actual': msg.pedal_cmd,
'proposed': self.brake})
self.brake = None
if __name__ == '__main__':
DBWTestNode()
|
praekelt/jmbo-music
|
music/tests/__init__.py
|
Python
|
bsd-3-clause
| 3,379 | 0 |
from django.test import TestCase
from django.conf import settings
from django.contrib.sites.models import Site
from django.db.models.query import QuerySet
from preferences import preferences
from music.models import TrackContributor, Credit, Track, Album, CreditOption
from music.utils import wikipedia, lastfm
class ScraperTestCase(TestCase):
@classmethod
def setUpClass(cls):
# Disable scraping
settings.JMBO_MUSIC['scrapers'] = []
# Bootstrap music preferences
prefs = preferences.MusicPreferences
prefs.save()
creditoption = CreditOption.objects.create(
music_preferences=prefs, role_type='artist', role_name='Artist',
role_priority=1
)
# Legitimate entries
artist = TrackContributor.objects.create(title="Oasis")
album = Album.objects.create(title="What's the story morning glory")
track = Track.objects.create(title="Don't look back in anger")
track.create_credit("Oasis", "artist")
track.album.add(album.id)
track.save()
cls.wikipedia_artist = artist
cls.wikipedia_album = album
cls.wikipedia_track = track
artist = TrackContributor.objects.create(title="Foo Fighters")
album = Album.objects.create(title="One By One")
track = Track.objects.create(title="All My Life")
track.create_credit("Foo Fighters", "artist")
track.album.add(album.id)
track.save()
cls.lastfm_artist = artist
cls.lastfm_album = album
cls.lastfm_track = track
# Illegitimate entries
artist = TrackContributor.objects.create(title="vgnfdnvnvfnsncfd")
album = Album.objects.create(title="tggbfbvfvf")
track = Track.objects.create(title="grfgrgeagteg")
track.create_credit("vgnfdnvnvfnsncfd", "artist")
track.album = [album]
track.save()
cls.iartist = artist
cls.ialbum = album
cls.itrack = track
def test_wikipedia(self):
settings.JMBO_MUSIC['scr
|
apers'] = ['wikipedia']
wikipedia(self.wikipedia_artist)
wikipedia(self.wikipedia_album)
|
wikipedia(self.wikipedia_track)
wikipedia(self.iartist)
wikipedia(self.ialbum)
wikipedia(self.itrack)
self.failUnless(self.wikipedia_artist.image)
self.failUnless(self.wikipedia_album.image)
self.failUnless(self.wikipedia_track.image)
self.failIf(self.iartist.image)
self.failIf(self.ialbum.image)
# Track is exempt because it always gets a default image
def test_lastfm(self):
# Abort test if no API key was set
try:
dc = settings.JMBO_MUSIC['lastfm_api_key']
dc = settings.JMBO_MUSIC['lastfm_api_secret']
except KeyError:
return
settings.JMBO_MUSIC['scrapers'] = ['lastfm']
lastfm(self.lastfm_artist)
lastfm(self.lastfm_album)
lastfm(self.lastfm_track)
lastfm(self.iartist)
lastfm(self.ialbum)
lastfm(self.itrack)
self.failUnless(self.lastfm_artist.image)
self.failUnless(self.lastfm_album.image)
self.failUnless(self.lastfm_track.image)
self.failIf(self.iartist.image)
self.failIf(self.ialbum.image)
# Track is exempt because it always gets a default image
|
Tomsod/gemrb
|
gemrb/GUIScripts/pst/MessageWindow.py
|
Python
|
gpl-2.0
| 3,926 | 0.033367 |
# -*-python-*-
# GemRB - Infinity Engine Emulator
# Copyright (C) 2003 The GemRB Project
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without ev
|
en the implied warranty of
# MERCHANTABILITY or
|
FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# MessageWindow.py - scripts and GUI for main (walk) window
###################################################
import GemRB
import GUIClasses
import GUICommon
import GUICommonWindows
import CommonWindow
import GUIWORLD
from GameCheck import MAX_PARTY_SIZE
from GUIDefines import *
MessageWindow = 0
ActionsWindow = 0
PortraitWindow = 0
OptionsWindow = 0
MessageTA = 0
def OnLoad():
global MessageWindow, ActionsWindow, PortraitWindow, OptionsWindow
GemRB.GameSetPartySize(MAX_PARTY_SIZE)
GemRB.GameSetProtagonistMode(0)
GemRB.LoadWindowPack (GUICommon.GetWindowPack())
GemRB.SetInfoTextColor(0,255,0,255)
ActionsWindow = GemRB.LoadWindow(0)
OptionsWindow = GemRB.LoadWindow(2)
MessageWindow = GemRB.LoadWindow(7)
PortraitWindow = GUICommonWindows.OpenPortraitWindow (1)
MessageTA = MessageWindow.GetControl (1)
MessageTA.SetFlags (IE_GUI_TEXTAREA_AUTOSCROLL|IE_GUI_TEXTAREA_HISTORY)
GemRB.SetVar ("MessageTextArea", MessageTA.ID)
GemRB.SetVar ("ActionsWindow", ActionsWindow.ID)
GemRB.SetVar ("OptionsWindow", OptionsWindow.ID)
GemRB.SetVar ("MessageWindow", -1)
GemRB.SetVar ("OtherWindow", -1)
GemRB.SetVar ("ActionsPosition", 1) #Bottom
GemRB.SetVar ("OptionsPosition", 1) #Bottom
GemRB.SetVar ("MessagePosition", 1) #Bottom
GemRB.SetVar ("OtherPosition", 0) #Left
GemRB.GameSetScreenFlags (0, OP_SET)
CloseButton= MessageWindow.GetControl (0)
CloseButton.SetText(28082)
CloseButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, CommonWindow.OnDecreaseSize)
CloseButton.SetFlags (IE_GUI_BUTTON_DEFAULT | IE_GUI_BUTTON_MULTILINE, OP_OR)
OpenButton = OptionsWindow.GetControl (10)
OpenButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, CommonWindow.OnIncreaseSize)
# Select all
Button = ActionsWindow.GetControl (1)
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, GUICommon.SelectAllOnPress)
# Select all
Button = ActionsWindow.GetControl (3)
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, GUICommonWindows.ActionStopPressed)
FormationButton = ActionsWindow.GetControl (4)
FormationButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, GUIWORLD.OpenFormationWindow)
GUICommonWindows.SetupClockWindowControls (ActionsWindow)
GUICommonWindows.SetupMenuWindowControls (OptionsWindow)
UpdateControlStatus ()
def UpdateControlStatus ():
global MessageWindow, PortraitWindow, ActionsWindow, OptionsWindow, MessageTA
Expand = GemRB.GetMessageWindowSize() & (GS_DIALOGMASK|GS_DIALOG)
hideflags = GemRB.HideGUI ()
if Expand:
GemRB.SetVar ("MessageWindow", MessageWindow.ID)
GemRB.SetVar ("PortraitWindow", -1)
GemRB.SetVar ("ActionsWindow", -1)
GemRB.SetVar ("OptionsWindow", -1)
MessageTA = GUIClasses.GTextArea(MessageWindow.ID, GemRB.GetVar ("MessageTextArea"))
MessageTA.SetStatus (IE_GUI_CONTROL_FOCUSED)
Label = MessageWindow.GetControl (0x10000003)
Label.SetText (str (GemRB.GameGetPartyGold ()))
else:
GemRB.SetVar ("MessageWindow", -1)
GemRB.SetVar ("PortraitWindow", PortraitWindow.ID)
GemRB.SetVar ("ActionsWindow", ActionsWindow.ID)
GemRB.SetVar ("OptionsWindow", OptionsWindow.ID)
GUICommon.GameControl.SetStatus(IE_GUI_CONTROL_FOCUSED)
if hideflags:
GemRB.UnhideGUI ()
|
ckan/ckanext-deadoralive
|
ckanext/deadoralive/tests/test_config.py
|
Python
|
agpl-3.0
| 572 | 0 |
import ckanext.deadoralive.config as config
import c
|
kanext.deadoralive.tests.helpers as custom_helpers
class TestConfig(custom_helpers.FunctionalTestBaseClass):
def test_that_it_reads_settings_from_config_file(self):
"""Test that non-default config settings in the config file work."""
# These non-default settings are in the
|
test.ini config file.
assert config.recheck_resources_after == 48
assert config.resend_pending_resources_after == 12
# TODO: Test falling back on defaults when there's nothing in the config
# file.
|
SpeedMe/leihuang.org
|
config.py
|
Python
|
apache-2.0
| 665 | 0.022556 |
# -*- coding: utf-8 -*-
import os
basedir=os.path.abspath(os.path.dirname(__file__))#get basedir of the project
WTF_CSRF_ENABLED = True
SECRET_KEY = 'you-will-guess'
#for database
# SQLALCHEMY_DATABASE_URI = 'mysql:///' + os.path.join(basedir, 'app.db')
SQLALCHEMY_DATABASE_URI = "mysql://username:password@server_ip:port/database_name"
SQLALCHEMY_MIGRATE_REPO = os.path.join(basedir, 'db_repository')
#for upload pic
UPLOAD_FOLDER = basedir+'/uploads/' #s
|
hould use basedir
MAX_CONTENT_LENGTH=2*1024*1024
ALLOWED_EXTENSIONS = set(['png', 'jpg', 'jpeg
|
'])#TODO:make user aware
#for upload excel
UPLOAD_EXCEL = basedir+'/app/static/add_info/' #should use basedir
|
markdrago/banter
|
banter/banter.py
|
Python
|
mit
| 3,421 | 0.004969 |
from __future__ import print_function
import readline
import sys
import argparse
from . import crucible, config, config_ui, utils, patch
def main():
parser = argparse.ArgumentParser(description='Create Code Reviews')
parser.add_argument('--setup', action='store_true', help='setup banter configuration')
parser.add_argument('-t', '--title', help="set title of new review")
parser.add_argument("-r", "--reviewers", help="set reviewers of new review")
parser_results = vars(parser.parse_args())
if parser_results['setup']:
setup()
else:
return create_review(title=parser_results['title'],
reviewers=parser_results['reviewers'])
def create_review(title='', reviewers=''):
conf = load_config()
if conf is None:
return 1
crucible_url = conf.get_value('crucible', 'url')
crucible_conn = crucible.Crucible(crucible_url)
username = conf.get_value('crucible', 'username')
auth_token = conf.get_value('crucible', 'token')
project_key = conf.get_value('crucible', 'project_key')
reviewers = reviewers or conf.get_value('crucible', 'reviewers')
diff = patch.clean(sys.stdin.read())
review_id = do_create_review(crucible_conn, username, auth_token, project_key, diff, title)
if review_id == -1:
return review_id
add_reviewers(crucible_conn, auth_token, review_id, reviewers)
print(utils.combine_url_components(crucible_url, "cru", review_id))
def do_create_review(crucible_conn, username, auth_token, project_key, diff, title=''):
parameters = {
'allow_reviewers_to_join': True,
'author': username,
'description': '',
'name': title,
'project_key': project_key,
'patch': diff
}
resp = crucible_conn.create_review(auth_token, **parameters)
if resp.status_code == 200 or resp.status_code == 201:
return resp.json()['permaId']['id']
sys.stderr.write("Got " + str(resp.status_code) + " HTTP code from server!\n")
return -1
def add_reviewers(crucible_conn, auth_token, review_id, reviewers):
if reviewers is not None and reviewers != "":
re
|
viewer_list = [r.strip() for r in reviewers.split(',')]
|
r = crucible_conn.add_reviewers(auth_token, review_id, reviewer_list)
def setup():
conf = config.Config()
conf.load_from_file()
updated_conf = config_ui.get_config_from_user(conf.as_dict())
set_crucible_token(updated_conf)
conf.set_from_dict(updated_conf)
conf.save()
def set_crucible_token(conf):
# get crucible token and forget crucible password
crucible_conn = crucible.Crucible(conf['crucible']['url'])
token = crucible_conn.get_auth_token(conf['crucible']['username'], conf['crucible']['password'])
conf['crucible']['token'] = token
del conf['crucible']['password']
def load_config():
"""load config, check for required fields, print error if any are missing"""
conf = config.Config()
conf.load_from_file()
if not has_all_required_fields(conf):
print("Your configuration is incomplete, please run 'banter setup' to get that fixed up")
return None
return conf
def has_all_required_fields(conf):
for field in ('url', 'username', 'token', 'project_key'):
if conf.get_value('crucible', field) is None:
return False
return True
if __name__ == '__main__':
sys.exit(main())
|
nict-isp/uds-sdk
|
uds/data/check.py
|
Python
|
gpl-2.0
| 4,491 | 0.004035 |
# -*- coding: utf-8 -*-
"""
uds.data.check
~~~~~~~~~~~~~~
|
:copyright: Copyright (c) 2015, National Institute of Information and Communications Technology.All rights reserved.
:license: GPL2, see LICENSE for more details.
"""
import re
import datetime
import dateutil
|
.parser
import pytz
import uds.logging
from uds.data import M2MDataVisitor
class M2MDataChecker(M2MDataVisitor):
"""M2MDataChecker check validity of M2M Data object.
"""
def __init__(self):
pass
def visit_v101(self, m2m_data):
"""Check v1.01 M2M Data.
:param M2MDataV101 m2m_data: Check target
:return: If the target is valid, return true, else return False.
:rtype: :class:`bool`
"""
device_info = m2m_data.device_info
# Check info schema
if 'longitude' not in device_info or device_info['longitude'] is None:
uds.logging.error('[check] M2M Data schema is invalid. longitude is not in device_info.')
return False
if 'latitude' not in device_info or device_info['latitude'] is None:
uds.logging.error('[check] M2M Data schema is invalid. latitude is not in device_info.')
return False
# Check info values
if self._check_geo_point(m2m_data.device_info['longitude'], m2m_data.device_info['latitude']) is False:
return False
for datum in m2m_data.data_values:
# Check datum schema
if 'time' not in datum or datum['time'] is None:
uds.logging.error('[check] M2M Data schema is invalid. time is none.')
return False
# Check datum values
if self._check_time(datum['time'], m2m_data.dict['primary']['timezone']) is False:
return False
return True
def visit_v102(self, m2m_data):
"""Check v1.02 M2M Data.
:param M2MDataV102 m2m_data: Check target
:return: If the target is valid, return true, else return False.
:rtype: :class:`bool`
"""
# Check info schema
# => nothing to do
for datum in m2m_data.data_values:
# Check datum schema
if 'time' not in datum or datum['time'] is None:
uds.logging.error('[check] M2M Data schema is invalid. time is none.')
return False
if 'longitude' not in datum or datum['longitude'] is None:
uds.logging.error('[check] M2M Data schema is invalid. longitude is none.')
return False
if 'latitude' not in datum or datum['latitude'] is None:
uds.logging.error('[check] M2M Data schema is invalid. latitude is none.')
return False
# Check datum values
if self._check_geo_point(datum['longitude'], datum['latitude']) is False:
return False
if self._check_time(datum['time'], m2m_data.dict['primary']['timezone']) is False:
return False
return True
@staticmethod
def _check_geo_point(longitude, latitude):
# Check whether longitude and latitude is within validity range.
if longitude < -180 or 180 < longitude or latitude < -90 or 90 < latitude:
uds.logging.error('[check] Geo point range is invalid. longitude or latitude is out of range.')
return False
if longitude == 0 and latitude == 0:
uds.logging.error('[check] Geo point range is invalid. longitude=latitude=0.')
return False
return True
@staticmethod
def _check_time(time, offset):
if offset is None:
uds.logging.error('[check] timezone is none.')
return False
# Check whether sensor time is earlier than current time.
try:
sensor_time = dateutil.parser.parse(time+offset) # sensor time
now_time = pytz.utc.localize(datetime.datetime.utcnow()) # current time
except Exception as e:
uds.logging.error(
'[check] time or timezone format is invalid. time={0}, timezone={1}, parse_error={2}'.format(
str(time), str(offset), str(e)))
return False
# 10分以上未来の場合、エラーとする
if (now_time - sensor_time) > datetime.timedelta(minutes=-10):
return True
else:
uds.logging.error('[check] Sensing time is out of range.')
return False
|
chiesax/sandbox
|
sandbox/install_project/venv_inspect.py
|
Python
|
mit
| 1,009 | 0.003964 |
# -*- coding: utf-8 -*-
"""
Created by chiesa on 18.01.16
Copyright 2015 Alpes Lasers SA, Neuchatel, Switzerland
"""
import json
import subprocess
from tempfile import NamedTemporaryFile
__author__ = 'chiesa'
__copyright__ = "Copyright 2015, Alpes Lasers SA"
def get_entry_points(venv_python_path, project_name):
f = NamedTemporaryFile(delete=
|
False)
f.write('import pkg_resources\n')
f.write('import json\n\n')
f.write('print json.dumps(pkg_resources.get_entry_map(\'{0}\').get(\'console_scripts\', {{
|
}}).keys())\n'.format(project_name))
f.close()
return json.loads(subprocess.check_output([venv_python_path, f.name]))
def get_project_version(venv_python_path, project_name):
f = NamedTemporaryFile(delete=False)
f.write('import pkg_resources\n')
f.write('import json\n\n')
f.write('print json.dumps(pkg_resources.get_distribution(\'{0}\').version)\n'.format(project_name))
f.close()
return json.loads(subprocess.check_output([venv_python_path, f.name]))
|
cozy/python_cozy_management
|
cozy_management/helpers.py
|
Python
|
lgpl-3.0
| 2,238 | 0.000447 |
'''
Some helpers
'''
import os
import pwd
import time
import requests
import subprocess
def get_uid(username):
return int(pwd.getpwnam(username).pw_uid)
def file_rights(filepath, mode=None, uid=None, gid=None):
'''
Change file rights
'''
file_handle = os.open(filepath, os.O_RDONLY)
if mode:
os.fchmod(file_handle, mode)
if uid:
if not gid:
gid = 0
os.fchown(file_handle, uid, gid)
os.close(file_handle)
def cmd_exec(cmd, show_output=False):
if show_output:
p = subprocess.Popen(cmd, shell=True, close_fds=True)
stdout, stderr = p.communicate()
return p.returncode
else:
p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, close_fds=True)
stdout, stderr = p.communicate()
return {
'error': p.returncode,
'stdout': stdout,
'stderr': stderr
}
def array_2_str(array):
return ''.join(array)
def get_ip_addresses():
result = cmd_exec('hostname -I')
return result['stdout'].split(' ')[:-1]
def wait_http(url, ok_message, interval=10):
couchdb_status = False
while not couchdb_status:
try:
requests.get(url)
couchdb_status = True
print ok_message
except requests.exceptions.ConnectionError, e:
print e
time.sleep(interval)
def wait_couchdb(interval=10):
wait_http('http://127.0.0.1:5984/', 'CouchDB OK', interval)
def wait_cozy_controller(interval=10):
wait_http('http://127.0.0.1:9002/', 'Cozy controller OK', interval)
def wait_cozy_datasytem(interval=10):
wait_http('
|
http://127.0.0.1:9101/', 'Cozy data sytem OK', interval)
def wait_cozy_home(interval=10):
wait_http('http://127.0.0.1:9103/', 'Cozy home OK', interval)
def wait_cozy_proxy(interval=10):
wait_http('http://127.0.0.1:9104/', 'Cozy proxy
|
OK', interval)
def wait_cozy_stack(interval=10):
wait_couchdb(interval)
wait_cozy_controller(interval)
wait_cozy_datasytem(interval)
wait_cozy_home(interval)
wait_cozy_proxy(interval)
|
compstak/selenium
|
py/selenium/webdriver/remote/command.py
|
Python
|
apache-2.0
| 5,188 | 0.000771 |
# Copyright 2010 WebDriver committers
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class Command(object):
"""
Defines constants for the standard WebDriver commands.
While these constants have no meaning in and of themselves, they are
used to marshal commands through a service that implements WebDriver's
remote wire protocol:
http://code.google.com/p/selenium/wiki/JsonWireProtocol
"""
# Keep in sync with org.openqa.selenium.remote.DriverCommand
STATUS = "status"
NEW_SESSION = "newSession"
GET_ALL_SESSIONS = "getAllSessions"
DELETE_SESSION = "deleteSession"
CLOSE = "close"
QUIT = "quit"
GET = "get"
GO_BACK = "goBack"
GO_FORWARD = "goForward"
REFRESH = "refresh"
ADD_COOKIE = "addCookie"
GET_COOKIE = "getCookie"
GET_ALL_COOKIES = "getCookies"
DELETE_COOKIE = "deleteCookie"
DELETE_ALL_COOKIES = "deleteAllCookies"
FIND_ELEMENT = "findElement"
FIND_ELEMENTS = "findElements"
FIND_CHILD_ELEMENT = "findChildElement"
FIND_CHILD_ELEMENTS = "findChildElements"
CLEAR_ELEMENT = "clearElement"
CLICK_ELEMENT = "clickElement"
SEND_KEYS_TO_ELEMENT = "sendKeysToElement"
SEND_KEYS_TO_ACTIVE_ELEMENT = "sendKeysToActiveElement"
SUBMIT_ELEMENT = "submitElement"
UPLOAD_FILE = "uploadFile"
GET_CURRENT_WINDOW_HANDLE = "getCurrentWindowHandle"
GET_WINDOW_HANDLES = "getWindowHandles"
GET_WINDOW_SIZE = "getWindowSize"
GET_WINDOW_POSITION = "getWindowPosition"
SET_WINDOW_SIZE = "setWindowSize"
SET_WINDOW_POSITION = "setWindowPosition"
SWITCH_TO_WINDOW = "switchToWindow"
SWITCH_TO_FRAME = "switchToFrame"
SWITCH_TO_PARENT_FRAME = "switchToParentFrame"
GET_ACTIVE_ELEMENT = "getActiveElement"
GET_CURRENT_URL = "getCurrentUrl"
GET_PAGE_SOURCE = "getPageS
|
ource"
GET_TITLE = "getTitle"
EXECUTE_SCRIPT = "executeScript"
SET_BROWSER_VISIBLE = "setBrowserVisible"
IS_BROWSER_VISIBLE = "isBrowserVisible"
GET_ELEMENT_TEXT = "getElementText"
GET_ELEMENT_VALUE = "getElementValue"
GET_ELEMENT_TAG_NAME = "getElementTagName"
SET_ELEMENT_SELECTED = "setElementSelected"
IS_ELEMENT_SELECTED = "isElementSelected"
IS_ELEMENT_ENABLED = "isElementEnabled"
IS_ELEMENT_DISPLAYED = "isElementDisplaye
|
d"
GET_ELEMENT_LOCATION = "getElementLocation"
GET_ELEMENT_LOCATION_ONCE_SCROLLED_INTO_VIEW = "getElementLocationOnceScrolledIntoView"
GET_ELEMENT_SIZE = "getElementSize"
GET_ELEMENT_ATTRIBUTE = "getElementAttribute"
GET_ELEMENT_VALUE_OF_CSS_PROPERTY = "getElementValueOfCssProperty"
ELEMENT_EQUALS = "elementEquals"
SCREENSHOT = "screenshot"
IMPLICIT_WAIT = "implicitlyWait"
EXECUTE_ASYNC_SCRIPT = "executeAsyncScript"
SET_SCRIPT_TIMEOUT = "setScriptTimeout"
SET_TIMEOUTS = "setTimeouts"
MAXIMIZE_WINDOW = "windowMaximize"
GET_LOG = "getLog"
GET_AVAILABLE_LOG_TYPES = "getAvailableLogTypes"
#Alerts
DISMISS_ALERT = "dismissAlert"
ACCEPT_ALERT = "acceptAlert"
SET_ALERT_VALUE = "setAlertValue"
GET_ALERT_TEXT = "getAlertText"
# Advanced user interactions
CLICK = "mouseClick"
DOUBLE_CLICK = "mouseDoubleClick"
MOUSE_DOWN = "mouseButtonDown"
MOUSE_UP = "mouseButtonUp"
MOVE_TO = "mouseMoveTo"
# Screen Orientation
SET_SCREEN_ORIENTATION = "setScreenOrientation"
GET_SCREEN_ORIENTATION = "getScreenOrientation"
# Touch Actions
SINGLE_TAP = "touchSingleTap"
TOUCH_DOWN = "touchDown"
TOUCH_UP = "touchUp"
TOUCH_MOVE = "touchMove"
TOUCH_SCROLL = "touchScroll"
DOUBLE_TAP = "touchDoubleTap"
LONG_PRESS = "touchLongPress"
FLICK = "touchFlick"
#HTML 5
EXECUTE_SQL = "executeSql"
GET_LOCATION = "getLocation"
SET_LOCATION = "setLocation"
GET_APP_CACHE = "getAppCache"
GET_APP_CACHE_STATUS = "getAppCacheStatus"
CLEAR_APP_CACHE = "clearAppCache"
IS_BROWSER_ONLINE = "isBrowserOnline"
SET_BROWSER_ONLINE = "setBrowserOnline"
GET_LOCAL_STORAGE_ITEM = "getLocalStorageItem"
REMOVE_LOCAL_STORAGE_ITEM = "removeLocalStorageItem"
GET_LOCAL_STORAGE_KEYS = "getLocalStorageKeys"
SET_LOCAL_STORAGE_ITEM = "setLocalStorageItem"
CLEAR_LOCAL_STORAGE = "clearLocalStorage"
GET_LOCAL_STORAGE_SIZE = "getLocalStorageSize"
GET_SESSION_STORAGE_ITEM = "getSessionStorageItem"
REMOVE_SESSION_STORAGE_ITEM = "removeSessionStorageItem"
GET_SESSION_STORAGE_KEYS = "getSessionStorageKeys"
SET_SESSION_STORAGE_ITEM = "setSessionStorageItem"
CLEAR_SESSION_STORAGE = "clearSessionStorage"
GET_SESSION_STORAGE_SIZE = "getSessionStorageSize"
|
rdkit/rdkit
|
Code/GraphMol/Descriptors/test3D_old.py
|
Python
|
bsd-3-clause
| 3,537 | 0.035341 |
from rdkit import Chem
from rdkit import rdBase
from rdkit.Chem import rdMolDescriptors as rdMD
from rdkit.Chem import AllChem
from rdkit.Chem.EState import EStateIndices
from rdkit.Chem.EState import AtomTypes
import time
print rdBase.rdkitVersion
print rdBase.boostVersion
def getEState(mol):
return EStateIndices(mol)
def localopt(mol, steps = 500):
if mol.GetNumConformers() == 0:
mol=make3D(mol)
AllChem.MMFFOptimizeMolecule(mol, maxIters = steps)
return mol
def make3D(mol, steps = 50):
mol = Chem.AddHs(mol)
success = AllChem.EmbedMolecule(mol)
if success == -1: # Failed
success = AllChem.EmbedMolecule(mol, useRandomCoords = True)
if success == -1:
raise Error, "Embedding failed!"
mol = localopt(mol, steps)
return mol
def get3D(m,is3d):
if not is3d:
m = Chem.AddHs(m)
AllChem.EmbedMolecule(m)
AllChem.MMFFOptimizeMolecule(m)
r= rdMD.CalcAUTOCORR3D(m)+rdMD.CalcRDF(m)+rdMD.CalcMORSE(m)+rdMD.CalcWHIM(m)+rdMD.CalcGETAWAY(m)
return r
def generateALL():
m = Chem.MolFromSmiles('Cc1ccccc1')
thefile = open('testAC.txt', 'w')
filename="/Users/mbp/Github/rdkit_mine/Code/GraphMol/Descriptors/test_data/PBF_egfr.sdf"
suppl = Chem.SDMolSupplier(filename,removeHs=False)
mols = [x for x in suppl]
start = time.time()
for m in mols:
r= get3D(m,True)
for item in r:
thefile.write("%.3f," % item)
thefile.write("\n")
end = time.time()
print end - start
thefile = open('tes
|
tSMWHIM.txt', 'w')
writer = Chem.SDWriter('3Dsmallmol.sdf')
A=['[H][H]','B','O=O','C','CC','CCC','CCCC','CCCCC','CCCCCC','CO','CCO','CCCO','CCCCO','CCCCCO','CCCCCCO','CCl','CCCl','CCCCl','CCCCCl','CCCCCCl','CCCCCCCl','CBr','CCBr','CCCBr','CCCCBr','CCCCCBr','CCCCCCBr','CI','CCI','CCCI','CCCCI','CCCCCI','CCCCCCI','CF','CCF','CCCF','CCCCF','CCCCCF','CCCCCCF','CS','CCS','CCCS','CCCCS','CCCCCS','CCCCCCS','CN','CCN','CCCN','CCCCN','CCCCCN','CCCCCCN']
for smi in A:
m = Chem
|
.MolFromSmiles(smi)
m=localopt(m,100)
#r=get3D(m,True)
print smi
print "---------"
r=rdMD.CalcWHIM(m)
print "Ei:"+str(r[0])+ "," + str(r[1]) + "," + str(r[2])+ "\n"
print "Gi:"+str(r[5])+ "," + str(r[6]) + "," + str(r[7])+ "\n"
print "SI:"+str(rdMD.CalcSpherocityIndex(m))
print "AS:"+str(rdMD.CalcAsphericity(m))
print "EX:"+str(rdMD.CalcEccentricity(m))
for item in r:
thefile.write("%.3f," % item)
thefile.write("\n")
#m.SetProp("smi", smi)
#writer.write(m)
thefile = open('testBPA.txt', 'w')
writer = Chem.SDWriter('3DBPAmol.sdf')
B=['CN(C)CC(Br)c1ccccc1','CN(C)CC(Br)c1ccc(F)cc1','CN(C)CC(Br)c1ccc(Cl)cc1','CN(C)CC(Br)c1ccc(Cl)cc1','CN(C)CC(Br)c1ccc(I)cc1','CN(C)CC(Br)c1ccc(C)cc1','CN(C)CC(Br)c1cccc(F)c1','CN(C)CC(Br)c1cccc(Cl)c1','CN(C)CC(Br)c1cccc(Br)c1','CN(C)CC(Br)c1cccc(I)c1','CN(C)CC(Br)c1cccc(C)c1','CN(C)CC(Br)c1ccc(F)c(Cl)c1','CN(C)CC(Br)c1ccc(F)c(Br)c1','CN(C)CC(Br)c1ccc(F)c(C)c1','CN(C)CC(Br)c1ccc(Cl)c(Cl)c1','CN(C)CC(Br)c1ccc(Cl)c(Br)c1','CN(C)CC(Br)c1ccc(Cl)c(C)c1','CN(C)CC(Br)c1ccc(Br)c(Cl)c1','CN(C)CC(Br)c1ccc(Br)c(Br)c1','CN(C)CC(Br)c1ccc(Br)c(C)c1','CN(C)CC(Br)c1ccc(C)c(C)c1','CN(C)CC(Br)c1ccc(C)c(Br)c1']
for smi in B:
m = Chem.MolFromSmiles(smi)
m=localopt(m,100)
#r=get3D(m,True)
r=rdMD.CalcWHIM(m)
for item in r:
thefile.write("%.3f," % item)
thefile.write("\n")
#m.SetProp("smi", smi)
#writer.write(m)
A="G1w,G2w,G3w,Gw"
print dir(rdMD)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.