repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
---|---|---|---|---|---|---|---|---|
saltstack/salt
|
salt/modules/mine.py
|
Python
|
apache-2.0
| 19,294 | 0.00114 |
"""
The function cache system allows for data to be stored on the master so it can be easily read by other minions
"""
import logging
import time
import traceback
import salt.channel.client
import salt.crypt
import salt.payload
import salt.transport
import salt.utils.args
import salt.utils.dictupdate
import salt.utils.event
import salt.utils.functools
import salt.utils.mine
import salt.utils.minions
import salt.utils.network
from salt.exceptions import SaltClientError
MINE_INTERNAL_KEYWORDS = frozenset(
[
"__pub_user",
"__pub_arg",
"__pub_fun",
"__pub_jid",
"__pub_tgt",
"__pub_tgt_type",
"__pub_ret",
]
)
__proxyenabled__ = ["*"]
log = logging.getLogger(__name__)
def _auth():
"""
Return the auth object
"""
if "auth" not in __context__:
try:
__context__["auth"] = salt.crypt.SAuth(__opts__)
except SaltClientError:
log.error(
"Could not authenticate with master. Mine data will not be transmitted."
)
return __context__["auth"]
def _mine_function_available(func):
if func not in __salt__:
log.error("Function %s in mine_functions not available", func)
return False
return True
def _mine_send(load, opts):
eventer = salt.utils.event.MinionEvent(opts, listen=False)
event_ret = eventer.fire_event(load, "_minion_mine")
# We need to pause here to allow for the decoupled nature of
# events time to allow the mine to propagate
time.sleep(0.5)
return event_ret
def _mine_get(load, opts):
if opts.get("transport", "") in salt.transport.TRANSPORTS:
try:
load["tok"] = _auth().gen_token(b"salt")
except AttributeError:
log.error(
"Mine could not authenticate with master. Mine could not be retrieved."
)
return False
with salt.channel.client.ReqChannel.factory(opts) as channel:
return channel.send(load)
def _mine_store(mine_data, clear=False):
"""
Helper function to store the provided mine data.
This will store either locally in the cache (for masterless setups), or in
the master's cache.
:param dict mine_data: Dictionary with function_name: function_data to store.
:param bool clear: Whether or not to clear (`True`) the mine data for the
function names present in ``mine_data``, or update it (`False`).
"""
# Store in the salt-minion's local cache
if __opts__["file_client"] == "local":
if not clear:
old = __salt__["data.get"]("mine_cache")
if isinstance(old, dict):
old.update(mine_data)
mine_data = old
return __salt__["data.update"]("mine_cache", mine_data)
# Store on the salt master
load = {
"cmd": "_mine",
"data": mine_data,
"id": __opts__["id"],
"clear": clear,
}
return _mine_send(load, __opts__)
def update(clear=False, mine_functions=None):
"""
Call the configured functions and send the data back up to the master.
The functions to be called are merged from the master config, pillar and
minion config under the option `mine_functions`:
.. code-block:: yaml
mine_functions:
network.ip_addrs:
- eth0
disk.usage: []
This function accepts the following arguments:
:param bool clear: Default: ``False``
Specifies whether updating will clear the existing values (``True``), or
whether it will update them (``False``).
:param dict mine_functions:
Update (or clear, see ``clear``) the mine data on these functions only.
This will need to have the structure as defined on
https://docs.saltproject.io/en/latest/topics/mine/index.html#mine-functions
|
This feature can be used when updating the mine for functions
that require a refresh at different intervals than the rest of
the functions specified under `mine_functions` in the
minion/ma
|
ster config or pillar.
A potential use would be together with the `scheduler`, for example:
.. code-block:: yaml
schedule:
lldp_mine_update:
function: mine.update
kwargs:
mine_functions:
net.lldp: []
hours: 12
In the example above, the mine for `net.lldp` would be refreshed
every 12 hours, while `network.ip_addrs` would continue to be updated
as specified in `mine_interval`.
The function cache will be populated with information from executing these
functions
CLI Example:
.. code-block:: bash
salt '*' mine.update
"""
if not mine_functions:
mine_functions = __salt__["config.merge"]("mine_functions", {})
# If we don't have any mine functions configured, then we should just bail out
if not mine_functions:
return
elif isinstance(mine_functions, list):
mine_functions = {fun: {} for fun in mine_functions}
elif isinstance(mine_functions, dict):
pass
else:
return
mine_data = {}
for function_alias, function_data in mine_functions.items():
(
function_name,
function_args,
function_kwargs,
minion_acl,
) = salt.utils.mine.parse_function_definition(function_data)
if not _mine_function_available(function_name or function_alias):
continue
try:
res = salt.utils.functools.call_function(
__salt__[function_name or function_alias],
*function_args,
**function_kwargs
)
except Exception: # pylint: disable=broad-except
trace = traceback.format_exc()
log.error(
"Function %s in mine.update failed to execute",
function_name or function_alias,
)
log.debug("Error: %s", trace)
continue
if minion_acl.get("allow_tgt"):
mine_data[function_alias] = salt.utils.mine.wrap_acl_structure(
res, **minion_acl
)
else:
mine_data[function_alias] = res
return _mine_store(mine_data, clear)
def send(name, *args, **kwargs):
"""
Send a specific function and its result to the salt mine.
This gets stored in either the local cache, or the salt master's cache.
:param str name: Name of the function to add to the mine.
The following pameters are extracted from kwargs if present:
:param str mine_function: The name of the execution_module.function to run
and whose value will be stored in the salt mine. Defaults to ``name``.
:param str allow_tgt: Targeting specification for ACL. Specifies which minions
are allowed to access this function. Please note both your master and
minion need to be on, at least, version 3000 for this to work properly.
:param str allow_tgt_type: Type of the targeting specification. This value will
be ignored if ``allow_tgt`` is not specified. Please note both your
master and minion need to be on, at least, version 3000 for this to work
properly.
Remaining args and kwargs will be passed on to the function to run.
:rtype: bool
:return: Whether executing the function and storing the information was successful.
.. versionchanged:: 3000
Added ``allow_tgt``- and ``allow_tgt_type``-parameters to specify which
minions are allowed to access this function.
See :ref:`targeting` for more information about targeting.
CLI Example:
.. code-block:: bash
salt '*' mine.send network.ip_addrs interface=eth0
salt '*' mine.send eth0_ip_addrs mine_function=network.ip_addrs interface=eth0
salt '*' mine.send eth0_ip_addrs mine_function=network.ip_addrs interface=eth0 allow_tgt='G@grain:value' allow_tgt_type=compound
"""
kwargs = salt.utils.args.clean_kwargs(**kwargs)
mine_function = kwargs.pop("mine_function", Non
|
cloudify-cosmo/tosca-vcloud-plugin
|
system_tests/vcloud_handler.py
|
Python
|
apache-2.0
| 5,843 | 0.000342 |
# Copyright (c) 2015-2020 Cloudify Platform Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from cosmo_tester.framework.handlers import (
BaseHandler,
BaseCloudifyInputsConfigReader)
from pyvcloud.schema.vcd.v1_5.schemas.vcloud import taskType
from pyvcloud import vcloudair
import time
import requests
TEST_VDC = "systest"
class VcloudCleanupContext(BaseHandler.CleanupContext):
def __init__(self, context_name, env):
super(VcloudCleanupContext, self).__init__(context_name, env)
@classmethod
def clean_all(cls, env):
"""
Cleans *all* resources, including resources that were not
created by the test
"""
super(VcloudCleanupContext, cls).clean_all(env)
class CloudifyVcloudInputsConfigReader(BaseCloudifyInputsConfigReader):
def __init__(self, cloudify_config, manager_blueprint_path, **kwargs):
super(CloudifyVcloudInputsConfigReader, self).__init__(
cloudify_config, manager_blueprint_path=manager_blueprint_path,
**kwargs)
@property
def vcloud_username(self):
return self.config['vcloud_username']
@property
def vcloud_password(self):
return self.config['vcloud_password']
@property
def vcloud_url(self):
return self.config['vcloud_url']
@property
def vcloud_service(self):
return self.config['vcloud_service']
@property
def vcloud_org(self):
return self.config['vcloud_org']
@property
def vcloud_vdc(self):
return self.config['vcloud_vdc']
@property
def manager_server_name(self):
return self.config['server_name']
@property
def manager_server_catalog(self):
return self.config['catalog']
@property
def manager_server_template(self):
return self.config['template']
@property
def management_network_use_existing(self):
return self.config['management_network_use_existing']
@property
def management_network_name(self):
return self.config['management_network_name']
@property
def edge_gateway(self):
return self.config['edge_gateway']
@property
def floating_ip_public_ip(self):
return self.config['floating_ip_public_ip']
@property
def ssh_key_filename(self):
return self.config['ssh_key_filename']
@property
def agent_private_key_path(self):
return self.config['agent_private_key_path']
@property
def user_public_key(self):
return self.config['user_public_key']
@property
def agent_public_key(self):
return self.config['user_public_key']
@property
def management_port_ip_allocation_mode(self):
return self.config['management_port_ip_allocation_mode']
@property
def vcloud_service_type(self):
return self.config['vcloud_service_type']
@property
def vcloud_region(self):
return self.config['vcloud_region']
@property
def public_catalog(self):
return 'Public Catalog'
@property
def ubuntu_precise_template(self):
return 'Ubuntu Server 12.04 LTS (amd64 20150127)'
class VcloudHandler(BaseHandler):
CleanupContext = VcloudCleanupContext
CloudifyConfigReader = CloudifyVcloudInputsConfigReader
def before_bootstrap(self):
super(VcloudHandler, self).before_bootstrap()
vca = login(self.env.cloudify_config)
if vca.get_vdc(TEST_VDC):
status, task = vca.delete_vdc(TE
|
ST_VDC)
if status:
wait_for_task(vca, task)
else:
raise RuntimeError("Can't delete test VDC")
if vca:
task = vca.create_vdc(T
|
EST_VDC)
wait_for_task(vca, task)
else:
raise RuntimeError("Can't create test VDC")
handler = VcloudHandler
def login(env):
vca = vcloudair.VCA(
host=env['vcloud_url'],
username=env['vcloud_username'],
service_type=env['vcloud_service_type'],
version="5.7",
verify=False)
logined = (vca.login(env['vcloud_password']) and
vca.login_to_instance(env['vcloud_instance'], env['vcloud_password']) and
vca.login_to_instance(env['vcloud_instance'], None,
vca.vcloud_session.token, vca.vcloud_session.org_url))
if logined:
return vca
else:
return None
def wait_for_task(vca_client, task):
TASK_RECHECK_TIMEOUT = 5
TASK_STATUS_SUCCESS = 'success'
TASK_STATUS_ERROR = 'error'
WAIT_TIME_MAX_MINUTES = 30
MAX_ATTEMPTS = WAIT_TIME_MAX_MINUTES * 60 / TASK_RECHECK_TIMEOUT
status = task.get_status()
for attempt in xrange(MAX_ATTEMPTS):
if status == TASK_STATUS_SUCCESS:
return
if status == TASK_STATUS_ERROR:
error = task.get_Error()
raise RuntimeError(
"Error during task execution: {0}".format(error.get_message()))
time.sleep(TASK_RECHECK_TIMEOUT)
response = requests.get(
task.get_href(),
headers=vca_client.vcloud_session.get_vcloud_headers(),
verify=False)
task = taskType.parseString(response.content, True)
status = task.get_status()
raise RuntimeError("Wait for task timeout.")
|
solus-project/ypkg
|
ypkg2/sources.py
|
Python
|
gpl-3.0
| 12,925 | 0 |
#!/bin/true
# -*- coding: utf-8 -*-
#
# This file is part of ypkg2
#
# Copyright 2015-2017 Ikey Doherty <ikey@solus-project.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
from . import console_ui
import os
import hashlib
import subprocess
import fnmatch
import shutil
KnownSourceTypes = {
'tar': [
'*.tar.*',
'*.tgz',
],
'zip': [
'*.zip',
],
}
class YpkgSource:
def __init__(self):
pass
def fetch(self, context):
""" Fetch this source from it's given location """
return False
def verify(self, context):
""" Verify the locally obtained source """
return False
def extract(self, context):
""" Attempt extraction of this source type, if needed """
return False
def remove(self, context):
""" Attempt removal of this source type """
return False
def cached(self, context):
""" Report on whether this source is cached """
return False
class GitSource(YpkgSource):
""" Provides git source support to ypkg """
# Source URI
uri = None
# Tag or branch to check out
tag = None
def __init__(self, uri, tag):
YpkgSource.__init__(self)
self.uri = uri
self.tag = tag
self.filename = self.get_target_name()
def __str__(self):
return "{} ({})".format(self.uri, self.tag)
def is_dumb_transport(self):
""" Http depth cloning = no go """
if self.uri.startswith("http:") or self.uri.startswith("https:"):
return True
return False
def get_target_name(self):
""" Get the target directory base name after its fetched """
uri = str(self.uri)
if uri.endswith(".git"):
uri = uri[:-4]
return os.path.basename(uri) + ".git"
def get_full_path(self, context):
""" Fully qualified target path """
return os.path.join(context.get_sources_directory(),
self.get_target_name())
def fetch(self, context):
""" Clone the actual git repo, favouring efficiency... """
source_dir = context.get_sources_directory()
# Ensure source dir exists
if not os.path.exists(source_dir):
try:
os.makedirs(source_dir, mode=00755)
except Exception as e:
console_ui.emit_error("Source", "Cannot create sources "
"directory: {}".format(e))
return False
cmd = "git -C \"{}\" clone \"{}\" {}".format(
source_dir, self.uri, self.get_target_name())
console_ui.emit_info("Git", "Fetching: {}".format(self.uri))
try:
r = subprocess.check_call(cmd, shell=True)
except Exception as e:
console_ui.emit_error("Git", "Failed to fetch {}".format(
self.uri))
print("Error follows: {}".format(e))
return False
console_ui.emit_info("Git", "Checking out: {}".format(self.tag))
cmd = "git -C \"{}\" checkout \"{}\"".format(
os.path.join(source_dir, self.get_target_name()),
self.tag)
try:
r = subprocess.check_call(cmd, shell=True)
except Exception as e:
console_ui.emit_error("Git", "Failed to checkout {}".format(
self.tag))
return False
ddir = os.path.join(source_dir, self.get_target_name())
if not os.path.exists(os.path.join(ddir, ".gitmodules")):
return True
cmd1 = "git -C \"{}\" submodule init".format(ddir)
cmd2 = "git -C \"{}\" submodule update".format(ddir)
try:
r = subprocess.check_call(cmd1, shell=True)
r = subprocess.check_call(cmd2, shell=True)
except Exception as e:
console_ui.emit_error("Git", "Failed to submodule init {}".format(
e))
return False
return True
def verify(self, context):
""" Verify source = good. """
bpath = self.get_full_path(context)
status_cmd = "git -C {} diff --exit-code"
try:
subprocess.check_call(status_cmd.format(bpath), shell=True)
except Exception:
console_ui.emit_error("Git", "Unexpected diff in source")
return False
return True
def extract(self, context):
""" Extract ~= copy source into build area. Nasty but original source
should not be tainted between runs.
"""
source = self.get_full_path(context)
target = os.path.join(context.get_build_dir(),
self.get_target_name())
if os.path.exists(target):
try:
shutil.rmtree(target)
except Exception as e:
console_ui.emit_error("Git", "Cannot remove stagnant tree")
print(e)
return False
if not os.path.exists(context.get_build_dir()):
try:
os.makedirs(context.get_build_dir(), mode=00755)
except Exception as e:
console_ui.emit_error("Source", "Cannot create sources "
"directory: {}".format(e))
return False
try:
cmd = "cp -Ra \"{}/\" \"{}\"".format(source, target)
subprocess.check_call(cmd, shell=True)
except Exception as e:
console_ui.emit_error("Git", "Failed to copy source to build")
print(e)
return False
return True
def cached(self, context):
bpath = self.get_full_path(context)
return os.path.exists(bpath)
class TarSource(YpkgSource):
""" Represents a simple tarball source """
uri = None
hash = None
filename = None
def __init__(self, uri, hash):
YpkgSource.__init__(self)
self.uri = uri
self.filename = os.path.basename(uri)
self.hash = hash
def __str__(self):
return "%s (%s)" % (self.uri, self.hash)
def _get_full_path(self, context):
bpath = os.path.join(context.get_sources_directory(),
self.filename)
return bpath
def fetch(self, context):
source_dir = context.get_sources_directory()
# Ensure source dir exists
if not os.path.exists(source_dir):
try:
os.makedirs(source_dir, mode=00755)
except Exception as e:
console_ui.emit_error("Source", "Cannot create sources "
"directory: {}".format(e))
return False
console_ui.emit_info("Source", "Fetching: {}".format(self.uri))
fpath = self._get_full_path(context)
cmd = "curl -o \"{}\" --url \"{}\" --location".format(
fpath, self.uri)
try:
r = subprocess.check_call(cmd, shell=True)
except Exception as e:
console_ui.emit_error("Source", "Failed to fetch {}".format(
self.uri))
print("Error follows: {}".format(e))
return False
return True
def verify(self, context):
bpath = self._get_full_path(context)
hash = None
with open(bpath, "r") as inp:
h = hashlib.sha256()
h.update(inp.read())
hash = h.he
|
xdigest()
if hash != self.hash:
console_ui.emit_error("Source", "Incorrect hash for {}".
format(self.filename))
print("Found hash : {}".format(hash))
print("Expected hash : {}".format(self.hash))
return False
return True
target = os.path.join(BallDir, os.path.basename(x))
ext = "unzip" if target.endswith(".zip") else "tar xf"
|
diropt = "-d" if targe
|
jigarkb/CTCI
|
LeetCode/036-M-ValidSudoku.py
|
Python
|
mit
| 1,045 | 0.004785 |
# Determine if a Sudoku is valid, according to: Sudoku Puzzles - The Rules (http://sudoku.com.au/TheRules.aspx).
#
# The Sudoku board could be partially filled, where empty cells are filled with the character '.'.
#
# Note:
# A valid Sudoku board (partially filled) is not necessarily solvable. Only the filled cells need to be validated.
class Solution(object):
def isValidSudoku(self, board):
"""
:type board: List[List[str]
|
]
:rtype: bool
"""
row = {}
column = {}
box = {}
for i in range(len(board)):
for j in range(len(board[i])):
if board[i][j] != '.':
num = int(board[i][j])
if row.get((i, num)) or column.get((j, num)) or box.get((i/3, j/3, num)):
return False
row[i, num] = column[j,
|
num] = box[i/3, j/3, num] = True
return True
# Note:
# We maintain 3 hash maps for rows(i), column(j) and box((i/3,j,3)) and return false if anyone has value true
|
Rogentos/rogentos-anaconda
|
timezone.py
|
Python
|
gpl-2.0
| 2,650 | 0.004151 |
#
# timezone.py - timezone install data
#
# Copyright (C) 2001 Red Hat, Inc. All rights reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import shutil
import iutil
import os
from flags import flags
from anaco
|
nda_log import PROGRAM_LOG_FILE
import log
|
ging
log = logging.getLogger("anaconda")
class Timezone:
def writeKS(self, f):
f.write("timezone")
if self.utc:
f.write(" --utc")
f.write(" %s\n" % self.tz)
def write(self, instPath):
fromFile = "/usr/share/zoneinfo/" + self.tz
tzfile = instPath + "/etc/localtime"
if os.path.isdir(instPath+"/etc"):
try:
if os.path.lexists(tzfile):
os.remove(tzfile)
os.symlink(fromFile, tzfile)
except OSError, e:
log.error("Error copying timezone (from %s): %s" % (
fromFile, e,))
f = open(instPath + "/etc/timezone", "w")
f.write(self.tz)
f.flush()
f.close()
# all this is ugly, but it's going away
# hopefully soon.
timedatectl = "/usr/bin/timedatectl"
if os.path.lexists(timedatectl):
if self.utc:
iutil.execWithRedirect(
timedatectl, ["set-local-rtc", "0"],
stdout = PROGRAM_LOG_FILE,
stderr = PROGRAM_LOG_FILE)
else:
iutil.execWithRedirect(
timedatectl, ["set-local-rtc", "1"],
stdout = PROGRAM_LOG_FILE,
stderr = PROGRAM_LOG_FILE)
# this writes /etc/adjtime, so copy it over
adjtime = "/etc/adjtime"
if os.path.isfile(adjtime):
shutil.copy2(adjtime, instPath + adjtime)
def getTimezoneInfo(self):
return (self.tz, self.utc)
def setTimezoneInfo(self, timezone, asUtc = 0):
self.tz = timezone
self.utc = asUtc
def __init__(self):
self.tz = "America/New_York"
self.utc = 0
|
lowfatcomputing/amforth
|
core/devices/atmega8535/device.py
|
Python
|
gpl-2.0
| 4,349 | 0.07404 |
# Partname: ATmega8535
# generated automatically, do not edit
MCUREGS = {
'ADMUX': '&39',
'ADMUX_REFS': '$C0',
'ADMUX_ADLAR': '$20',
'ADMUX_MUX': '$1F',
'ADCSRA': '&38',
'ADCSRA_ADEN': '$80',
'ADCSRA_ADSC': '$40',
'ADCSRA_ADATE': '$20',
'ADCSRA_ADIF': '$10',
'ADCSRA_ADIE': '$08',
'ADCSRA_ADPS': '$07',
'ADC': '&36',
'SFIOR': '&80',
'SFIOR_ADTS': '$E0',
'ACSR': '&40',
'ACSR_ACD': '$80',
'ACSR_ACBG': '$40',
'ACSR_ACO': '$20',
'ACSR_ACI': '$10',
'ACSR_ACIE': '$08',
'ACSR_ACIC': '$04',
'ACSR_ACIS': '$03',
'TWBR': '&32',
'TWCR': '&86',
'TWCR_TWINT': '$80',
'TWCR_TWEA': '$40',
'TWCR_TWSTA': '$20',
'TWCR_TWSTO': '$10',
'TWCR_TWWC': '$08',
'TWCR_TWEN': '$04',
'TWCR_TWIE': '$01',
'TWSR': '&33',
'TWSR_TWS': '$F8',
'TWSR_TWPS': '$03',
'TWDR': '&35',
'TWAR': '&34',
'TWAR_TWA': '$FE',
'TWAR_TWGCE': '$01',
'UDR': '&44',
'UCSRA': '&43',
'
|
UCSRA_RXC': '$80',
'UCSRA_TXC': '$40',
'UCSRA_UDRE': '$20',
'UCSRA_FE': '$10',
'UCSRA_DOR': '$08',
'UCSRA_UPE': '$04',
'UCSRA_U2X': '$02',
'UCSRA_MPCM': '$01
|
',
'UCSRB': '&42',
'UCSRB_RXCIE': '$80',
'UCSRB_TXCIE': '$40',
'UCSRB_UDRIE': '$20',
'UCSRB_RXEN': '$10',
'UCSRB_TXEN': '$08',
'UCSRB_UCSZ2': '$04',
'UCSRB_RXB8': '$02',
'UCSRB_TXB8': '$01',
'UCSRC': '&64',
'UCSRC_URSEL': '$80',
'UCSRC_UMSEL': '$40',
'UCSRC_UPM': '$30',
'UCSRC_USBS': '$08',
'UCSRC_UCSZ': '$06',
'UCSRC_UCPOL': '$01',
'UBRRH': '&64',
'UBRRH_URSEL': '$80',
'UBRRH_UBRR1': '$0C',
'UBRRH_UBRR': '$03',
'UBRRL': '&41',
'PORTA': '&59',
'DDRA': '&58',
'PINA': '&57',
'PORTB': '&56',
'DDRB': '&55',
'PINB': '&54',
'PORTC': '&53',
'DDRC': '&52',
'PINC': '&51',
'PORTD': '&50',
'DDRD': '&49',
'PIND': '&48',
'SPDR': '&47',
'SPSR': '&46',
'SPSR_SPIF': '$80',
'SPSR_WCOL': '$40',
'SPSR_SPI2X': '$01',
'SPCR': '&45',
'SPCR_SPIE': '$80',
'SPCR_SPE': '$40',
'SPCR_DORD': '$20',
'SPCR_MSTR': '$10',
'SPCR_CPOL': '$08',
'SPCR_CPHA': '$04',
'SPCR_SPR': '$03',
'EEAR': '&62',
'EEDR': '&61',
'EECR': '&60',
'EECR_EERIE': '$08',
'EECR_EEMWE': '$04',
'EECR_EEWE': '$02',
'EECR_EERE': '$01',
'TCCR0': '&83',
'TCCR0_FOC0': '$80',
'TCCR0_WGM00': '$40',
'TCCR0_COM0': '$30',
'TCCR0_WGM01': '$08',
'TCCR0_CS0': '$07',
'TCNT0': '&82',
'OCR0': '&92',
'TIMSK': '&89',
'TIMSK_OCIE0': '$02',
'TIMSK_TOIE0': '$01',
'TIFR': '&88',
'TIFR_OCF0': '$02',
'TIFR_TOV0': '$01',
'TCCR1A': '&79',
'TCCR1A_COM1A': '$C0',
'TCCR1A_COM1B': '$30',
'TCCR1A_FOC1A': '$08',
'TCCR1A_FOC1B': '$04',
'TCCR1A_WGM1': '$03',
'TCCR1B': '&78',
'TCCR1B_ICNC1': '$80',
'TCCR1B_ICES1': '$40',
'TCCR1B_WGM1': '$18',
'TCCR1B_CS1': '$07',
'TCNT1': '&76',
'OCR1A': '&74',
'OCR1B': '&72',
'ICR1': '&70',
'TCCR2': '&69',
'TCCR2_FOC2': '$80',
'TCCR2_WGM20': '$40',
'TCCR2_COM2': '$30',
'TCCR2_WGM21': '$08',
'TCCR2_CS2': '$07',
'TCNT2': '&68',
'OCR2': '&67',
'ASSR': '&66',
'ASSR_AS2': '$08',
'ASSR_TCN2UB': '$04',
'ASSR_OCR2UB': '$02',
'ASSR_TCR2UB': '$01',
'GICR': '&91',
'GICR_INT': '$C0',
'GICR_INT2': '$20',
'GICR_IVSEL': '$02',
'GICR_IVCE': '$01',
'GIFR': '&90',
'GIFR_INTF': '$C0',
'GIFR_INTF2': '$20',
'MCUCR': '&85',
'MCUCR_ISC1': '$0C',
'MCUCR_ISC0': '$03',
'MCUCSR': '&84',
'MCUCSR_ISC2': '$40',
'WDTCR': '&65',
'WDTCR_WDCE': '$10',
'WDTCR_WDE': '$08',
'WDTCR_WDP': '$07',
'SREG': '&95',
'SREG_I': '$80',
'SREG_T': '$40',
'SREG_H': '$20',
'SREG_S': '$10',
'SREG_V': '$08',
'SREG_N': '$04',
'SREG_Z': '$02',
'SREG_C': '$01',
'SP': '&93',
'OSCCAL': '&81',
'SPMCR': '&87',
'SPMCR_SPMIE': '$80',
'SPMCR_RWWSB': '$40',
'SPMCR_RWWSRE': '$10',
'SPMCR_BLBSET': '$08',
'SPMCR_PGWRT': '$04',
'SPMCR_PGERS': '$02',
'SPMCR_SPMEN': '$01',
'INT0Addr': '1',
'INT1Addr': '2',
'TIMER2_COMPAddr': '3',
'TIMER2_OVFAddr': '4',
'TIMER1_CAPTAddr': '5',
'TIMER1_COMPAAddr': '6',
'TIMER1_COMPBAddr': '7',
'TIMER1_OVFAddr': '8',
'TIMER0_OVFAddr': '9',
'SPI_STCAddr': '10',
'USART_RXAddr': '11',
'USART_UDREAddr': '12',
'USART_TXAddr': '13',
'ADCAddr': '14',
'EE_RDYAddr': '15',
'ANA_COMPAddr': '16',
'TWIAddr': '17',
'INT2Addr': '18',
'TIMER0_COMPAddr': '19',
'SPM_RDYAddr': '20'
}
|
gpostelnicu/fin_data
|
fin_data/stat/correlation/random_matrix.py
|
Python
|
mit
| 2,722 | 0.003306 |
import numpy as np
def empirical_rmt(input_returns):
"""
Empirical RMT computes an empirical correlation matrix and then filters using Random Matrix Theory.
THIS FUNCTION TAKES AS INPUT A CLEAN NUMPY ARRAY (please remove n/a first !)
To compute a correlation matrix, and then apply Wishart RMT, it is required that input variables be as close to a
series of iid Gaussian vectors as possible.
This is why we start by normalizing observations by dispersions (cross-section standard dev, here estimated assuming
zero average)
To check the "Gaussianization":
import matplotlib.pyplot as pp
pp.plot(rets.index ,dispersion)
pp.hist(returns_dispersion_nformalized.reshape(returns_dispersion_normalized.size), bins=1000, range=[-5, 5])
pp.hist(returns_np.reshape(returns_np.size), bins=1000, range=[-.05, .05])
Args:
input_returns (np.array): Input cleaned log-returns.
Returns:
# to compute a correlation we have better start standardizing returns by dispersion
np.array: correlation matrix
"""
# 1. NORMALIZE BY DISPERSIONS
#
# this "Gaussianization" works well as can be seen on the histograms
# (although there is still a peak for the "cleansed" data)
dispersion = np.sqrt(1. / input_returns.shape[1] * np.sum(input_returns * input_returns, axis=1))
returns_dispersion_normalized = input_returns \
/ np.tile(dispersion.reshape(len(dispersion), 1), [1, input_returns.shape[1]])
# 2. COMPUTE THE EMPIRICAL CORRELATION MATRIX
# now returns are gaussian, we can compute a proper correlation matrix
correlation_matrix_raw = np.corrcoef(returns_dispersion_normalized, rowvar=0)
n_time, n_asset = returns_dispersion_normalized.shape
quality = n_asset/n_time
# print('Quality coefficient (the smaller the better)' + str(quality))
# 3. APPLY RMT FILTERING FORMULA
# RMT filter: this formula works for Wishart matrices !!!
w, v = np.linalg.eigh(correlation_matrix_raw)
lambda_plus = (1. + quality) ** 2
# Remove smaller eigen-spaces
v = v[:, w > lambda_plus]
w = w[w > lambda_plus]
# Reconstruct the correlation matrix from eigen-spaces
correlation_matrix = np.zeros_like(correlation_matrix_raw)
number_of_assets = correlation_matrix_raw.shape[0]
for index_eig in range(len(w)):
cur_vect = np.reshape(v[:, index_eig], [number_of_assets, 1])
correlation_matrix += w[i
|
ndex_eig] * cur_vect * np.transpose(cur_vect)
# print(correlation_matrix)
# Fill the remaining eigen-vectors
for index_eig in range(correlation_matrix.shape[0]):
corre
|
lation_matrix[index_eig, index_eig] = 1.
return correlation_matrix
|
arborworkflows/ProjectManager
|
tangelo/projmgr.py
|
Python
|
apache-2.0
| 7,996 | 0.009005 |
import tangelo
import pymongo
import bson.json_util
from ArborFileManagerAPI import ArborFileManager
api = ArborFileManager()
api.initDatabaseConnection()
@tangelo.restful
def get(*pargs, **query_args):
if len(pargs) == 0:
return tangelo.HTTPStatusCode(400, "Missing resource type")
resource_type = pargs[0]
allowed = ["project", "analysis","collection", "workflow"]
if resource_type == "project":
if len(pargs) == 1:
return api.getListOfProjectNames()
elif len(pargs) == 2:
project = pargs[1]
return api.getListOfTypesForProject(project)
elif len(pargs) == 3:
project = pargs[1]
datatype = pargs[2]
return api.getListOfDatasetsByProjectAndType(project, datatype)
elif len(pargs) == 4:
project = pargs[1]
datatype = pargs[2]
dataset = pargs[3]
coll = api.db[api.returnCollectionForObjectByName(project, datatype, dataset)]
return bson.json_util.dumps(list(coll.find()))
elif len(pargs) == 5:
project = pargs[1]
datatype = pargs[2]
dataset = pargs[3]
stringFormat = pargs[4]
string = api.getDatasetAsTextString(project, datatype, dataset, stringFormat)
return string
else:
return tangelo.HTTPStatusCode(400, "Bad request - got %d parameter(s), was expecting between 1 and 5")
elif resource_type == "analysis":
if len(pargs) == 1:
return api.getListOfAnalysisNames()
elif len(pargs) == 2:
analysis_name = pargs[1]
coll = api.db[api.returnCollectionForAnalysisByName(analysis_name)]
return bson.json_util.dumps(list(coll.find()))
elif len(pargs) == 3:
analysis_name = pargs[1]
coll = api.db[api.returnCollectionForAnalysisByName(analysis_name)]
return coll.find_one()["analysis"]["script"]
# add a collection option to return the database and collection name for an object in the
# Arbor treestore. This 'information hiding violation' of the treestore allows for low-level
# clients to connect and work directly with the mongo database, should it be needed. This level
# is used in the phylomap application.
elif resource_type == "collection":
if len(pargs) == 4:
project = pargs[1]
datatype = pargs[2]
dataset = pargs[3]
collname = api.returnCollectionForObjectByName(project, datatype, dataset)
dbname = api.getMongoDatabase()
dbhost = api.getMongoHost()
dbport = api.getMongoPort()
return bson.json_util.dumps({'host':dbhost,'port':dbport,'db': dbname,'collection': collname})
# if workflow is specified as the resource type, then list the workflows in a project or display the
# information about a particular workflow
elif resource_type == "workflow":
if len(pargs) == 2:
project = pargs[1]
return api.getListOfDatasetsByProjectAndType(project,"Workflow")
if len(pargs) == 3:
project = pargs[1]
workflowName = pargs[2]
print("REST: getting status of workflow:",workflowName)
return bson.json_util.dumps(api.getStatusOfWorkflow(workflowName,project))
else:
return tangelo.HTTPStatusCode(400, "Workflow resource requires 2 or 3 positional arguments")
else:
return tangelo.HTTPStatusCode(400, "Bad resource type '%s' - allowed types are: %s" % (resource_type, ", ".join(allowed)))
# Jan 2014 - added support for workflows as a datatype inside projects. new workflow-only named types are
# defined here to allow workflows to be created and run through the REST interface
#
@tangelo.restful
def put(resource, projname, datasetname=None, data=None, filename=None, filetype=None,
workflowName = None, stepName=None, stepType=None, inputStepName=None, outputStepName=None,
inPortName=None,outPortName=None,operation=None, parameterName=None, parameterValue=None,
parameterValueNumber=None,flowType=None,dataType=None, **kwargs):
if (resource != "project") and (resource != "workflow"):
return tangelo.HTTPStatusCode(400, "Bad
|
resource type '%s' - allowed types are: project")
if resource == "project":
if datasetname is None:
api.newProject(projname)
else:
if filename is None:
return tangelo.HTTPStatusCode(400, "Missing argument 'filename'")
if filetype is None:
return tangelo.HTTPStatusCode(400, "Mi
|
ssing argument 'filetype'")
if data is None:
return tangelo.HTTPStatusCode(400, "Missing argument 'data'")
if datasetname is None:
return tangelo.HTTPStatusCode(400, "Missing argument 'datasetname'")
# user wants to upload a tree or a character matrix
if filetype == "newick" or filetype == "phyloxml":
api.newTreeInProjectFromString(datasetname, data, projname, filename, filetype)
if (filetype == "csv" and dataType is None) or (filetype == "csv" and dataType=='CharacterMatrix'):
api.newCharacterMatrixInProjectFromString(datasetname, data, projname, filename)
if filetype == "csv" and dataType=="Occurrences":
api.newOccurrencesInProjectFromString(datasetname, data, projname)
# workflow creation
# arborapi: /workflow/projname/workflowname - creates new empty workflow
# arborapi: /workflow/projname/workflowname//
if resource == "workflow":
# the user wants to create a new, empty workflow
if operation == "newWorkflow":
api.newWorkflowInProject(workflowName, projname)
if operation == "newWorkstepInWorkflow":
api.newWorkstepInWorkflow(workflowName, stepType, stepName, projname)
# allow user to add a parameter to a workstep or update the value of the parameter. There
# is currently a limitation that all values are strings, e.g. "2.4" instead of 2.4.
if operation == "updateWorkstepParameter":
# if a float argument is sent, use this as the value for the parameter, instead of the
# string. A conversion is done to float to assure numberic values
if parameterValueNumber != None:
print "found number filter value"
parameterValue = float(parameterValueNumber)
api.updateWorkstepParameter(workflowName, stepName, parameterName, parameterValue, projname)
if operation == "connectWorksteps":
#api.connectStepsInWorkflow(workflowName,outStepName,outPortName,inStepName,inPortName,projname)
api.connectStepsInWorkflow(workflowName,outputStepName,inputStepName,projname)
if operation == "executeWorkflow":
api.executeWorkflowInProject(workflowName,projname)
if operation == "updateWorkflowFromString":
print "received request to update workflow: ",workflowName
api.updateExistingWorkflowInProject(workflowName,data,projname)
return "OK"
@tangelo.restful
def post(*pargs, **kwargs):
return "projmgr.post()"
@tangelo.restful
def delete(resource, projname, datatype=None, dataset=None):
if resource != "project":
return tangelo.HTTPStatusCode(400, "Bad resource type '%s' - allowed types are: project")
# (This is expressing xor)
if (datatype is None) != (dataset is None):
return tangelo.HTTPStatusCode(400, "Bad arguments - 'datatype' and 'dataset' must both be specified if either one is specified")
if datatype is None:
api.deleteProjectNamed(projname)
else:
api.deleteDataset(projname, datatype, dataset)
return "OK"
|
ypkang/keras
|
keras/constraints.py
|
Python
|
mit
| 1,069 | 0.012161 |
from __future__ import absolute_import
import theano
import theano.tensor as T
import numpy as np
class Constraint(object):
def __call__(self, p):
return p
def get_config(self):
return {"name":self.__class__.__name__}
class MaxNorm(Constraint):
def __init__(self, m=2):
self.m = m
def __call__(self, p):
norms = T.sqrt(T.sum(T.sqr(p), axis=0))
desired = T.clip(norms, 0, self.m)
p = p * (desired / (1e-7 + norms))
return p
def get_config(self):
return {"name":self.__class__.__name__,
|
"m":self.m}
class NonNeg(Constraint):
def __call__(self, p):
p *= T.ge(p, 0)
return p
class UnitNorm(Constraint):
def __call__(self, p):
return p / T.sqrt(T.sum(p**2, axis=-1, keepdims=True))
identity = Constraint
maxnorm = MaxNorm
nonneg = NonNeg
unitnorm = UnitNorm
from .utils.generic_utils import get_from_module
def get(identifier, kwargs=None):
|
return get_from_module(identifier, globals(), 'constraint', instantiate=True, kwargs=kwargs)
|
google-research/google-research
|
tft/script_hyperparam_opt.py
|
Python
|
apache-2.0
| 7,859 | 0.006489 |
# coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Main hyperparameter optimisation script.
Performs random search to optimize hyperparameters on a single machine. For new
datasets, inputs to the main(...) should be customised.
"""
import argparse
import datetime as dte
import os
import data_formatters.base
import expt_settings.configs
import libs.hyperparam_opt
import libs.tft_model
import libs.utils as utils
import numpy as np
import pandas as pd
import tensorflow.compat.v1 as tf
ExperimentConfig = expt_settings.configs.ExperimentConfig
HyperparamOptManager = libs.hyperparam_opt.HyperparamOptManager
ModelClass = libs.tft_model.TemporalFusionTransformer
def main(expt_name, use_gpu, restart_opt, model_folder, hyperparam_iterations,
data_csv_path, data_formatter):
"""Runs main hyperparameter optimization routine.
Args:
expt_name: Name of experiment
use_gpu: Whether to run tensorflow with GPU operations
restart_opt: Whether to run hyperparameter optimization from scratch
model_folder: Folder path where models are serialized
hyperparam_iterations: Number of iterations of random search
data_csv_path: Path to csv file containing data
data_formatter: Dataset-specific data fromatter (see
expt_settings.dataformatter.GenericDataFormatter)
"""
if not isinstance(data_formatter, data_formatters.base.GenericDataFormatter):
raise ValueError(
"Data formatters should inherit from" +
"AbstractDataFormatter! Type={}".format(typ
|
e(data_formatter)))
default_keras_session = tf.keras.backend.get_session()
if use_gpu:
tf_config = utils.get_default_tensorflow_config(tf_device="gpu", gpu_id=0)
else:
tf_config = utils.get_default_tensorflow_config(tf_device="cpu")
print("### Running hyperparameter optimization for {} ###".format(expt_name))
print("Loading & splitting data...")
raw_data = pd.read_csv(data_csv_path, index_col=0)
train, valid, test = data_formatter.split_
|
data(raw_data)
train_samples, valid_samples = data_formatter.get_num_samples_for_calibration(
)
# Sets up default params
fixed_params = data_formatter.get_experiment_params()
param_ranges = ModelClass.get_hyperparm_choices()
fixed_params["model_folder"] = model_folder
print("*** Loading hyperparm manager ***")
opt_manager = HyperparamOptManager(param_ranges, fixed_params, model_folder)
success = opt_manager.load_results()
if success and not restart_opt:
print("Loaded results from previous training")
else:
print("Creating new hyperparameter optimisation")
opt_manager.clear()
print("*** Running calibration ***")
while len(opt_manager.results.columns) < hyperparam_iterations:
print("# Running hyperparam optimisation {} of {} for {}".format(
len(opt_manager.results.columns) + 1, hyperparam_iterations, "TFT"))
tf.reset_default_graph()
with tf.Graph().as_default(), tf.Session(config=tf_config) as sess:
tf.keras.backend.set_session(sess)
params = opt_manager.get_next_parameters()
model = ModelClass(params, use_cudnn=use_gpu)
if not model.training_data_cached():
model.cache_batched_data(train, "train", num_samples=train_samples)
model.cache_batched_data(valid, "valid", num_samples=valid_samples)
sess.run(tf.global_variables_initializer())
model.fit()
val_loss = model.evaluate()
if np.allclose(val_loss, 0.) or np.isnan(val_loss):
# Set all invalid losses to infintiy.
# N.b. val_loss only becomes 0. when the weights are nan.
print("Skipping bad configuration....")
val_loss = np.inf
opt_manager.update_score(params, val_loss, model)
tf.keras.backend.set_session(default_keras_session)
print("*** Running tests ***")
tf.reset_default_graph()
with tf.Graph().as_default(), tf.Session(config=tf_config) as sess:
tf.keras.backend.set_session(sess)
best_params = opt_manager.get_best_params()
model = ModelClass(best_params, use_cudnn=use_gpu)
model.load(opt_manager.hyperparam_folder)
print("Computing best validation loss")
val_loss = model.evaluate(valid)
print("Computing test loss")
output_map = model.predict(test, return_targets=True)
targets = data_formatter.format_predictions(output_map["targets"])
p50_forecast = data_formatter.format_predictions(output_map["p50"])
p90_forecast = data_formatter.format_predictions(output_map["p90"])
def extract_numerical_data(data):
"""Strips out forecast time and identifier columns."""
return data[[
col for col in data.columns
if col not in {"forecast_time", "identifier"}
]]
p50_loss = utils.numpy_normalised_quantile_loss(
extract_numerical_data(targets), extract_numerical_data(p50_forecast),
0.5)
p90_loss = utils.numpy_normalised_quantile_loss(
extract_numerical_data(targets), extract_numerical_data(p90_forecast),
0.9)
tf.keras.backend.set_session(default_keras_session)
print("Hyperparam optimisation completed @ {}".format(dte.datetime.now()))
print("Best validation loss = {}".format(val_loss))
print("Params:")
for k in best_params:
print(k, " = ", best_params[k])
print()
print("Normalised Quantile Loss for Test Data: P50={}, P90={}".format(
p50_loss.mean(), p90_loss.mean()))
if __name__ == "__main__":
def get_args():
"""Returns settings from command line."""
experiment_names = ExperimentConfig.default_experiments
parser = argparse.ArgumentParser(description="Data download configs")
parser.add_argument(
"expt_name",
metavar="e",
type=str,
nargs="?",
default="volatility",
choices=experiment_names,
help="Experiment Name. Default={}".format(",".join(experiment_names)))
parser.add_argument(
"output_folder",
metavar="f",
type=str,
nargs="?",
default=".",
help="Path to folder for data download")
parser.add_argument(
"use_gpu",
metavar="g",
type=str,
nargs="?",
choices=["yes", "no"],
default="no",
help="Whether to use gpu for training.")
parser.add_argument(
"restart_hyperparam_opt",
metavar="o",
type=str,
nargs="?",
choices=["yes", "no"],
default="yes",
help="Whether to re-run hyperparameter optimisation from scratch.")
args = parser.parse_known_args()[0]
root_folder = None if args.output_folder == "." else args.output_folder
return args.expt_name, root_folder, args.use_gpu == "yes", \
args.restart_hyperparam_opt
# Load settings for default experiments
name, folder, use_tensorflow_with_gpu, restart = get_args()
print("Using output folder {}".format(folder))
config = ExperimentConfig(name, folder)
formatter = config.make_data_formatter()
# Customise inputs to main() for new datasets.
main(
expt_name=name,
use_gpu=use_tensorflow_with_gpu,
restart_opt=restart,
model_folder=os.path.join(config.model_folder, "main"),
hyperparam_iterations=config.hyperparam_iterations,
data_csv_path=config.data_csv_path,
data_formatter=formatter)
|
sseering/ytdlWrapper
|
urlfind.py
|
Python
|
unlicense
| 1,923 | 0.00208 |
#!/usr/bin/env python3
import re
import os
import os.path
import sys
def main():
already_found = []
url_matcher = re.compile(r'(https?://(www.)?)?((youtu.be|youtube.(com|de|ch|at))/watch\?v=[-_0-9A-Za-z]{11}|youtu.be/[-_0-9A-Za-z]{11})')
backup_matcher = re.compile(r'youtu')
argc = len(sys.argv)
if argc == 1:
whole_input = sys.stdin.read()
elif argc == 2:
with open(sys.argv[1], mode='rt', encoding='utf8') as inf:
whole_input = inf.read()
else:
raise Exception()
os.makedirs('./urls', exist_ok=True)
num_found = 0
filename_ctr = 0
for match in url_matcher.finditer(whole_input):
num_found += 1
already_found.append((match.start(), match.end()))
written = False
while (not written) and (filename_ctr < 31337):
try:
with open(os.path.join('./urls/', '{0}.txt'.format(filename_ctr)), mode='xt', encoding='utf8') as outf:
print(match.group(0), file=outf)
written = True
except OSError:
pass
filename_ctr += 1
if filename_ctr >= 31337:
print("Error: hit infinite loop while attempting to create files. Exiting.", file=sys.stderr)
sys.exit(1)
num_backup_candidates = 0
whole_len = len(whole_input)
for match in backup_matcher.finditer(whole_input):
ms = match.start()
me = match.end()
for (s, e) in already_found:
if ms >= s and me <= e:
break
else:
s = max(ms - 33, 0)
e = min(me + 33, whole_len)
num_backup_candidates += 1
print('found unmatched candidate: ' + whole_input[s:e])
pr
|
int('found {0} unmatched candidates and created {1} URL files'.format(num_backup_candidates, num_found))
print('done')
|
if __name__ == "__main__":
main()
|
jcfr/mystic
|
examples/cg_rosenbrock.py
|
Python
|
bsd-3-clause
| 1,478 | 0.023004 |
#!/usr/bin/env python
#
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
# Copyright (c) 1997-2015 California Institute of Technology.
# License: 3-clause BSD. The full license text is available at:
# - http://trac.mystic.cacr.caltech.edu/project/mystic/browser/mystic/LI
|
CENSE
"""
See test_rosenbrock.py.
This one uses Scipy's CG (Polak-Ribiere) plus viz via matplotlib
cg works well on this problem.
"""
import pylab
from test_rosenbrock import *
from numpy import log
from mystic._scipyoptimize import fmin_cg
import numpy
from mystic.tools import getch
def show():
import
|
pylab, Image
pylab.savefig('cg_rosenbrock_out',dpi=72)
im = Image.open('cg_rosenbrock_out.png')
im.show()
return
def draw_contour():
import numpy
x, y = numpy.mgrid[-1:2.1:0.02,-0.1:2.1:0.02]
c = 0*x
s,t = x.shape
for i in range(s):
for j in range(t):
xx,yy = x[i,j], y[i,j]
c[i,j] = rosen([xx,yy])
pylab.contourf(x,y,log(c*20+1)+2,60)
def run_once(x0,x1,color='w'):
sol = fmin_cg(rosen, [x0, x1], retall = True, full_output=1)
xy = numpy.asarray(sol[-1])
pylab.plot(xy[:,0],xy[:,1],color+'-',linewidth=2)
pylab.plot(xy[:,0],xy[:,1],color+'o',markersize=6)
return sol
if __name__ == '__main__':
draw_contour()
run_once(0.3,0.3,'k')
run_once(0.5,1.3,'y')
run_once(1.8,0.2,'w')
try:
show()
except ImportError:
pylab.show()
# end of file
|
mcgill-robotics/compsys
|
scripts/bag/bag/__main__.py
|
Python
|
mit
| 1,110 | 0 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""McGill Robotics ROS Bagger.
This tool can:
1. Record the specified topics into 15 second bags to the specified
directory.
2. Merge previously recorded bags from the specified directory.
By default, all the topics defined in your project's 'topics' file will be
recorded/merged if no arguments are specified. Otherwise, only the topics
specified will be recorded/merged.
"""
import os
import sys
try:
import rosbag
except ImportError:
sys.stderr.write("Could not find rosbag package. Is ROS installed?\n")
sys.exit(-1)
from ut
|
il import Parser, TopicList
__version__ = "1.3.1"
def main():
"""Runs the CLI."""
try:
topics_path = os.environ["TOPICS_PATH"]
except KeyError:
print("E: TOPICS_PATH environment variable not set")
sys.exit(2)
topics = TopicList(topics_path)
args = Parser(topics, __doc__, __version__)
status = args.cmd(
topics=args.enabled, name=args.name, dir=args.dir, args=args.raw).run()
sys.exit(status)
if __name__ == "__main__":
mai
|
n()
|
swenger/glitter
|
glitter/contexts/glx.py
|
Python
|
mit
| 1,256 | 0.002389 |
"""GLX context creation and management.
Depends on the binary glxcontext module.
@todo: Include glxcontext in distribution as an optional module.
|
@author: Stephan Wenger
@date: 2012-08-28
"""
try:
from glxcontext import GLXContext as _GLXContext
from glitter.contexts.context import Context
from glitter.contexts.contextmanager import context_manager
class GLXContext(_GLXContext, Context):
"""Offscreen GLX cont
|
ext."""
def __init__(self, **kwargs):
_GLXContext.__init__(self, **kwargs)
Context.__init__(self)
# TODO the lines below should not be necessary, or should at least be performed automatically by context_manager
# XXX I would have expected it worked without these lines because of "with self._context" around GLObject functions; is this a problem with multiple Contexts?
old_binding = context_manager.current_context
if old_binding:
old_binding._bind()
else:
context_manager.current_context = self
def _bind(self):
return _GLXContext.bind(self)
def bind(self):
return Context.bind(self)
__all__ = ["GLXContext"]
except ImportError:
pass
|
lferr/charm
|
charm/schemes/ibenc/ibenc_ckrs09.py
|
Python
|
lgpl-3.0
| 4,080 | 0.017157 |
'''
Jan Camenisch, Markulf Kohlweiss, Alfredo Rial, and Caroline Sheedy (Pairing-based)
| From: "Blind and Anonymous Identity-Based Encryption and
Authorised Private Searches on Public Key Encrypted Data".
| Published in: PKC 2009
| Available from: http://www.iacr.org/archive/pkc2009/54430202/54430202.pdf
| Notes: section 4.1, first blind and anonymous IBE scheme
| Security Assumptions:
|
| type: identity-based encryption (public key)
| setting: Pairing
:Authors: J Ayo Akinyele/Mike Rushanan
:Date: 02/2012
'''
from charm.toolbox.pairinggroup import PairingGroup,ZR,G1,G2,GT,pair
from charm.toolbox.IBEnc import IBEnc
from charm.toolbox.conversion import Conversion
from charm.toolbox.bitstring import Bytes
from charm.toolbox.iterate import dotprod2
from charm.toolbox.hash_module import Waters
import hashlib
debug = False
class IBE_CKRS(IBEnc):
"""
>>> from charm.toolbox.pairinggroup import PairingGroup, GT
>>> group = PairingGroup('SS512')
>>> ibe = IBE_CKRS(group)
>>> (master_public_key, master_secret_key) = ibe.setup()
>>> ID = "bob@mail.com"
>>> secret_key = ibe.extract(master_public_key, master_secret_key, ID)
>>> msg = group.random(GT)
>>> cipher_text = ibe.encrypt(master_public_key, ID, msg)
>>> decrypted_msg = ibe.decrypt(master_public_key, secret_key, cipher_text)
>>> decrypted_msg == msg
True
"""
def __init__(self, groupObj):
global group,hashObj
group = groupObj
def setup(self, n=5, l=32):
"""n integers with each size l"""
global lam_func, waters
lam_func = lambda i,x,y: x[i] ** y[i]
waters = Waters(group, n, l)
alpha, t1, t2, t3, t4 = group.random(ZR, 5)
z = list(group.random(ZR, n))
g = group.random(G1)
h = group.random(G2)
omega = pair(g, h) ** (t1 * t2 * alpha)
g_l = [g ** i for i in z]
h_l = [h ** i for i in z]
v1, v2 = g ** t1, g ** t2
v3, v4 = g ** t3, g ** t4
msk = { 'alpha':alpha, 't1':t1, 't2':t2, 't3':t3, 't4':t4 }
mpk = { 'omega':omega, 'g':g, 'h':h, 'g_l':g_l, 'h_l':h_l,
'v1':v1, 'v2':v2, 'v3':v3, 'v4':v4, 'n':n, 'l':l }
return (mpk, msk)
def extract(self, mpk, msk, ID):
r1, r2 = group.random(ZR, 2) # should be params of extract
hID = waters.hash(ID)
hashID2 = mpk['h_l'][0] * dotprod2(range(1,mpk['n']), lam_func, mpk['h_l'], hID)
d = {}
d[0] = mpk['h'] ** ((r1 * msk['t1'] * msk['t2']) + (r2 * msk['t3'] * msk['t4']))
d[1] = (mpk['h'] ** (-msk['alpha'] * msk['t2'])) * (hashID2 ** (-r1 * msk['t2']))
d[2] = (mpk['h'] ** (-msk['alpha'] * msk['t1'])) * (hashID2 ** (-r1 * msk['t1']))
d[3] = hashID2 ** (-r2 * msk['t4'])
d[4] = hashID2 ** (-r2 * msk['t3'])
return { 'd':d }
def encrypt(self, mpk, ID, msg):
s, s1, s2 = group.random(ZR, 3)
hID = waters
|
.hash(ID)
hashID1 = mpk['g_l'][0] * dotprod2(range(1,mpk['n']), lam_func, mpk['g_l'], hID)
c = {}
c_pr = (mpk['omega'] ** s) * msg
c[0] = hashID1 ** s
c[1] = mpk['v1'] ** (s - s1)
c[2] = mpk['v2'] ** s1
c[3] = mpk['v3'] **
|
(s - s2)
c[4] = mpk['v4'] ** s2
return {'c':c, 'c_prime':c_pr }
def decrypt(self, mpk, sk, ct):
c, d = ct['c'], sk['d']
msg = ct['c_prime'] * pair(c[0], d[0]) * pair(c[1], d[1]) * pair(c[2], d[2]) * pair(c[3], d[3]) * pair(c[4], d[4])
return msg
def main():
groupObj = PairingGroup('SS512')
ibe = IBE_CKRS(groupObj)
(mpk, msk) = ibe.setup()
# represents public identity
ID = "bob@mail.com"
sk = ibe.extract(mpk, msk, ID)
M = groupObj.random(GT)
ct = ibe.encrypt(mpk, ID, M)
m = ibe.decrypt(mpk, sk, ct)
if debug: print('m =>', m)
assert m == M, "FAILED Decryption!"
if debug: print("Successful Decryption!!! m => '%s'" % m)
if __name__ == "__main__":
debug = True
main()
|
OpenTherapeutics/transcode
|
tests/test_config.py
|
Python
|
mit
| 1,153 | 0.005204 |
import pytest
import transcode.conf
import transcode.render
def my_callback(source, *args, **kws):
pass
CFG_GOOD = {
'TEXT': {'transcoder': my_callback},
}
CFG_BAD = {
'MARK': {'transcoder': 42}
}
class TestConf:
def test_default_config(self):
for fmt,
|
expected in (
(transcode.conf.HTML_FORMAT, transcode.render.render_html),
(transcode.conf.SIMPLE_TEXT_FORMAT, transcode.render.render_simple),
(transcode.conf.MARKDOWN_FORMAT, transcode.render.render_markdown),
(transcode.conf.RST_FORMAT, transcode.render.render_restructuredtext),
):
handler, args, kwargs = transcode.conf.get_transcoder(fmt)
assert handler is expected
def test_config_with_actual_callback(self):
|
handler, args, kwargs = transcode.conf.get_transcoder('TEXT', CFG_GOOD)
assert handler == my_callback
assert args == ()
assert kwargs == {}
def test_config_with_bad_callback(self):
try:
transcode.conf.load_config(CFG_BAD)
except TypeError:
assert True
else:
assert False
|
pgmmpk/pypatgen
|
patgen/tests/test_dictionary.py
|
Python
|
mit
| 1,321 | 0.009084 |
'''
Created on Mar 7, 2016
@author: mike
'''
from patgen.dictionary import parse_dictionary_word, format_dictionary_word,\
Dictionary
import unittest
from patgen.margins import Margins
class TestDictionary(unittest.TestCase):
def
|
test_parse(self):
text, hyphens, missed, false, weights = parse_dictionary_word('hy-phe-2n-a-tion')
self.assertEqual(text, 'hyphenation')
self.assertEqual(hyphens, {2, 5, 6, 7})
self.assertEqual(missed, set())
self.assertEqual(
|
false, set())
self.assertEqual(weights, {0: 1, 1: 1, 2: 1, 3: 1, 4: 1, 5: 2, 6: 1, 7: 1, 8: 1, 9: 1, 10: 1, 11: 1})
def test_format(self):
s = format_dictionary_word('hyphenation', {2, 5, 6, 7})
self.assertEqual(s, 'hy-phe-n-a-tion')
def test_format_weights(self):
s = format_dictionary_word('hyphenation', {2, 5, 6, 7}, weights={0: 3, 1: 3, 2: 3, 3: 3, 4: 3, 5: 3, 6: 3, 7: 3, 8: 3, 9: 3, 10: 3, 11: 3})
self.assertEqual(s, '3hy-phe-n-a-tion')
def test_analysis(self):
dictionary = Dictionary.from_string('''
word
''')
x = set(dictionary.generate_pattern_statistics(False, 3, 1, Margins(1,1)))
self.assertEqual(x, {('wor', 0, 1), ('ord', 0, 1), ('rd.', 0, 1)})
|
eltonkevani/tempest_el_env
|
tempest/api/orchestration/stacks/test_server_cfn_init.py
|
Python
|
apache-2.0
| 6,500 | 0 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import testtools
from tempest.api.orchestration import base
from tempest.common.utils import data_utils
from tempest.common.utils.linux.remote_client import RemoteClient
import tempest.config
from tempest.openstack.common import log as logging
from tempest.test import attr
LOG = logging.getLogger(__name__)
class ServerCfnInitTestJSON(base.BaseOrchestrationTest):
_interface = 'json'
existing_keypair = (tempest.config.TempestConfig().
orchestration.keypair_name is not None)
template = """
HeatTemplateFormatVersion: '2012-12-12'
Description: |
Template which uses a wait condition to confirm that a minimal
cfn-init and cfn-signal has worked
Parameters:
key_name:
Type: String
flavor:
Type: String
image:
Type: String
network:
Type: String
Resources:
CfnUser:
Type: AWS::IAM::User
SmokeSecurityGroup:
Type: AWS::EC2::SecurityGroup
Properties:
GroupDescription: Enable only ping and SSH access
SecurityGroupIngress:
- {CidrIp: 0.0.0.0/0, FromPort: '-1', IpProtocol: icmp, ToPort: '-1'}
- {CidrIp: 0.0.0.0/0, FromPort: '22', IpProtocol: tcp, ToPort: '22'}
SmokeKeys:
Type: AWS::IAM::AccessKey
Properties:
UserName: {Ref: CfnUser}
SmokeServer:
Type: OS::Nova::Server
Metadata:
AWS::CloudFormation::Init:
config:
files:
/tmp/smoke-status:
content: smoke test complete
/etc/cfn/cfn-credentials:
content:
Fn::Join:
- ''
- - AWSAccessKeyId=
|
- {Ref: SmokeKeys}
- '
'
- AWSSecretKey=
- Fn::GetAtt: [SmokeKeys, SecretAccessKey]
- '
'
mode: '000400'
owner: root
group: root
Properties:
image: {Ref: image}
flavor: {Ref: flavor}
|
key_name: {Ref: key_name}
security_groups:
- {Ref: SmokeSecurityGroup}
networks:
- uuid: {Ref: network}
user_data:
Fn::Base64:
Fn::Join:
- ''
- - |-
#!/bin/bash -v
/opt/aws/bin/cfn-init
- |-
|| error_exit ''Failed to run cfn-init''
/opt/aws/bin/cfn-signal -e 0 --data "`cat /tmp/smoke-status`" '
- {Ref: WaitHandle}
- '''
'
WaitHandle:
Type: AWS::CloudFormation::WaitConditionHandle
WaitCondition:
Type: AWS::CloudFormation::WaitCondition
DependsOn: SmokeServer
Properties:
Handle: {Ref: WaitHandle}
Timeout: '600'
Outputs:
WaitConditionStatus:
Description: Contents of /tmp/smoke-status on SmokeServer
Value:
Fn::GetAtt: [WaitCondition, Data]
SmokeServerIp:
Description: IP address of server
Value:
Fn::GetAtt: [SmokeServer, first_address]
"""
@classmethod
def setUpClass(cls):
super(ServerCfnInitTestJSON, cls).setUpClass()
if not cls.orchestration_cfg.image_ref:
raise cls.skipException("No image available to test")
cls.client = cls.orchestration_client
stack_name = data_utils.rand_name('heat')
if cls.orchestration_cfg.keypair_name:
keypair_name = cls.orchestration_cfg.keypair_name
else:
cls.keypair = cls._create_keypair()
keypair_name = cls.keypair['name']
# create the stack
cls.stack_identifier = cls.create_stack(
stack_name,
cls.template,
parameters={
'key_name': keypair_name,
'flavor': cls.orchestration_cfg.instance_type,
'image': cls.orchestration_cfg.image_ref,
'network': cls._get_default_network()['id']
})
@attr(type='slow')
@testtools.skipIf(existing_keypair, 'Server ssh tests are disabled.')
def test_can_log_into_created_server(self):
sid = self.stack_identifier
rid = 'SmokeServer'
# wait for server resource create to complete.
self.client.wait_for_resource_status(sid, rid, 'CREATE_COMPLETE')
resp, body = self.client.get_resource(sid, rid)
self.assertEqual('CREATE_COMPLETE', body['resource_status'])
# fetch the IP address from servers client, since we can't get it
# from the stack until stack create is complete
resp, server = self.servers_client.get_server(
body['physical_resource_id'])
# Check that the user can authenticate with the generated password
linux_client = RemoteClient(
server, 'ec2-user', pkey=self.keypair['private_key'])
self.assertTrue(linux_client.can_authenticate())
@attr(type='slow')
def test_stack_wait_condition_data(self):
sid = self.stack_identifier
# wait for create to complete.
self.client.wait_for_stack_status(sid, 'CREATE_COMPLETE')
# fetch the stack
resp, body = self.client.get_stack(sid)
self.assertEqual('CREATE_COMPLETE', body['stack_status'])
# fetch the stack
resp, body = self.client.get_stack(sid)
self.assertEqual('CREATE_COMPLETE', body['stack_status'])
# This is an assert of great significance, as it means the following
# has happened:
# - cfn-init read the provided metadata and wrote out a file
# - a user was created and credentials written to the server
# - a cfn-signal was built which was signed with provided credentials
# - the wait condition was fulfilled and the stack has changed state
wait_status = json.loads(
self.stack_output(body, 'WaitConditionStatus'))
self.assertEqual('smoke test complete', wait_status['00000'])
|
sebdah/markdown-docs
|
markdowndocs/__init__.py
|
Python
|
apache-2.0
| 4,970 | 0.004024 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
""" markdown-docs markdown documentation reader
APACHE LICENSE 2.0
Copyright 2013 Sebastian Dahlgren
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
VERSION = '0.2.0'
import os
import sys
import shutil
import tempfile
import argparse
import markdowndocs.generator
import markdowndocs.web_server
from markdowndocs.markdown_file import MarkdownFile
from markdowndocs.log_handler import LOGGER as logger
def main():
""" Main function """
parser = argparse.ArgumentParser(
description='markdown-docs markdown documentation generator')
parser.add_argument('-d', '--directory',
help='Root directory to parse from (default: current dir)')
parser.add_argument('-o', '--output',
help='Output directory to store HTML files in')
parser.add_argument('--version',
action='store_true',
help='Print version information')
parser.add_argument('generate',
nargs='?',
de
|
fault=False,
help='Generate HTML')
parser.add_argument('serve',
nargs='?',
default=True,
help='Start a local web ser
|
ver to serve the documentation')
args = parser.parse_args()
if args.version:
print_version()
sys.exit(0)
if args.directory:
source_dir = os.path.expandvars(os.path.expanduser(args.directory))
if not os.path.exists(source_dir):
logger.error('{} does not exist'.format(source_dir))
sys.exit(1)
elif not os.path.isdir(source_dir):
logger.error('{} is not a directory'.format(source_dir))
sys.exit(1)
else:
source_dir = os.path.realpath(os.path.curdir)
temp_dir_used = False
if args.output:
destination_root_dir = os.path.expandvars(
os.path.expanduser(args.output))
try:
os.makedirs(destination_root_dir)
except OSError as (errno, errmsg):
if errno == 17:
# Code 17 == File exists
pass
else:
logger.error('Error creating {}: {}'.format(
destination_root_dir, errmsg))
sys.exit(1)
else:
destination_root_dir = tempfile.mkdtemp(prefix='markdown-docs')
logger.debug('Using temporary folder: {}'.format(destination_root_dir))
if not args.generate:
temp_dir_used = True
try:
markdown_files = find_markdown_files(source_dir, destination_root_dir)
logger.info('Generating documentation for {:d} markdown files..'.format(
len(markdown_files)))
markdowndocs.generator.generate_html(markdown_files)
markdowndocs.generator.generate_index_page(markdown_files)
markdowndocs.generator.import_static_files(destination_root_dir)
logger.info('Done with documentation generation!')
if args.serve and not args.generate:
markdowndocs.web_server.run_webserver(destination_root_dir)
if args.generate:
logger.info('HTML output can be found in {}'.format(
destination_root_dir))
finally:
if temp_dir_used:
logger.debug('Removing temporary folder: {}'.format(
destination_root_dir))
shutil.rmtree(destination_root_dir)
def find_markdown_files(source_dir, destination_root_dir):
""" Returns a list of all Markdown files
:type source_dir: str
:param source_dir: Where should the markdown-docs start looking?
:type destination_root_dir: str
:param destination_root_dir: Path to the output dir
:returns: list -- List of MarkdownFile objects
"""
md_files_dict = {}
for dirpath, _, filenames in os.walk(source_dir):
for filename in filenames:
try:
_, extension = filename.rsplit('.', 1)
if extension in ['md', 'mdown', 'markdown']:
md_file = MarkdownFile(
os.path.join(dirpath, filename),
source_dir,
destination_root_dir)
md_files_dict[os.path.join(dirpath, filename)] = md_file
except ValueError:
pass
markdown_files = []
for md_file in sorted(md_files_dict):
markdown_files.append(md_files_dict[md_file])
return markdown_files
def print_version():
""" Prints the version and exits """
print('markdown-doc: {}'.format(VERSION))
|
kvar/ansible
|
lib/ansible/modules/storage/purestorage/_purefa_facts.py
|
Python
|
gpl-3.0
| 32,021 | 0.000999 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2018, Simon Dodsley (simon@purestorage.com)
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['deprecated'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: purefa_facts
version_added: '2.6'
deprecated:
removed_in: '2.13'
why: Deprecated in favor of C(_info) module.
alternative: Use M(purefa_info) instead.
short_description: Collect facts from Pure Storage FlashArray
description:
- Collect facts information from a Pure Storage Flasharray running the
Purity//FA operating system. By default, the module will collect basic
fact information including hosts, host groups, protection
groups and volume counts. Additional fact information can be collected
based on the configured set of arguments.
author:
- Pure Storage ansible Team (@sdodsley) <pure-ansible-team@purestorage.com>
options:
gather_subset:
description:
- When supplied, this argument will define the facts to be collected.
Possible values for this include all, minimum, config, performance,
capacity, network, subnet, interfaces, hgroups, pgroups, hosts,
admins, volumes, snapshots, pods, vgroups, offload, apps and arrays.
type: list
required: false
default: minimum
extends_documentation_fragme
|
nt:
- purestorage.fa
'''
EXAMPLES = r'''
- name: collect default set of facts
purefa_facts:
fa_url:
|
10.10.10.2
api_token: e31060a7-21fc-e277-6240-25983c6c4592
- name: collect configuration and capacity facts
purefa_facts:
gather_subset:
- config
- capacity
fa_url: 10.10.10.2
api_token: e31060a7-21fc-e277-6240-25983c6c4592
- name: collect all facts
purefa_facts:
gather_subset:
- all
fa_url: 10.10.10.2
api_token: e31060a7-21fc-e277-6240-25983c6c4592
'''
RETURN = r'''
ansible_facts:
description: Returns the facts collected from the FlashArray
returned: always
type: complex
sample: {
"capacity": {},
"config": {
"directory_service": {
"array_admin_group": null,
"base_dn": null,
"bind_password": null,
"bind_user": null,
"check_peer": false,
"enabled": false,
"group_base": null,
"readonly_group": null,
"storage_admin_group": null,
"uri": []
},
"dns": {
"domain": "domain.com",
"nameservers": [
"8.8.8.8",
"8.8.4.4"
]
},
"ntp": [
"0.ntp.pool.org",
"1.ntp.pool.org",
"2.ntp.pool.org",
"3.ntp.pool.org"
],
"smtp": [
{
"enabled": true,
"name": "alerts@acme.com"
},
{
"enabled": true,
"name": "user@acme.com"
}
],
"snmp": [
{
"auth_passphrase": null,
"auth_protocol": null,
"community": null,
"host": "localhost",
"name": "localhost",
"privacy_passphrase": null,
"privacy_protocol": null,
"user": null,
"version": "v2c"
}
],
"ssl_certs": {
"country": null,
"email": null,
"issued_by": "",
"issued_to": "",
"key_size": 2048,
"locality": null,
"organization": "Acme Storage, Inc.",
"organizational_unit": "Acme Storage, Inc.",
"state": null,
"status": "self-signed",
"valid_from": "2017-08-11T23:09:06Z",
"valid_to": "2027-08-09T23:09:06Z"
},
"syslog": []
},
"default": {
"array_name": "flasharray1",
"connected_arrays": 1,
"hostgroups": 0,
"hosts": 10,
"pods": 3,
"protection_groups": 1,
"purity_version": "5.0.4",
"snapshots": 1,
"volume_groups": 2
},
"hgroups": {},
"hosts": {
"host1": {
"hgroup": null,
"iqn": [
"iqn.1994-05.com.redhat:2f6f5715a533"
],
"wwn": []
},
"host2": {
"hgroup": null,
"iqn": [
"iqn.1994-05.com.redhat:d17fb13fe0b"
],
"wwn": []
},
"host3": {
"hgroup": null,
"iqn": [
"iqn.1994-05.com.redhat:97b1351bfb2"
],
"wwn": []
},
"host4": {
"hgroup": null,
"iqn": [
"iqn.1994-05.com.redhat:dd84e9a7b2cb"
],
"wwn": [
"10000000C96C48D1",
"10000000C96C48D2"
]
}
},
"interfaces": {
"CT0.ETH4": "iqn.2010-06.com.purestorage:flasharray.2111b767484e4682",
"CT0.ETH5": "iqn.2010-06.com.purestorage:flasharray.2111b767484e4682",
"CT1.ETH4": "iqn.2010-06.com.purestorage:flasharray.2111b767484e4682",
"CT1.ETH5": "iqn.2010-06.com.purestorage:flasharray.2111b767484e4682"
},
"network": {
"ct0.eth0": {
"address": "10.10.10.10",
"gateway": "10.10.10.1",
"hwaddr": "ec:f4:bb:c8:8a:04",
"mtu": 1500,
"netmask": "255.255.255.0",
"services": [
"management"
],
"speed": 1000000000
},
"ct0.eth2": {
"address": "10.10.10.11",
"gateway": null,
"hwaddr": "ec:f4:bb:c8:8a:00",
"mtu": 1500,
"netmask": "255.255.255.0",
"services": [
"replication"
],
"speed": 10000000000
},
"ct0.eth3": {
"address": "10.10.10.12",
"gateway": null,
"hwaddr": "ec:f4:bb:c8:8a:02",
"mtu": 1500,
"netmask": "255.255.255.0",
"services": [
"replication"
],
"speed": 10000000000
},
"ct0.eth4": {
"address": "10.10.10.13",
"gateway": null,
"hwaddr": "90:e2:ba:83:79:0c",
"mtu": 1500,
"netmask": "255.255.255.0",
"services": [
"iscsi"
],
"speed": 10000000000
},
"ct0.eth5": {
"address": "10.10.10.14",
"gateway": null,
"hwaddr": "90:e2:ba:83:79:0d",
"mtu": 1500,
"netmask": "255.255.255.0",
"services": [
"iscsi"
],
"speed": 10000000000
},
"vir0": {
"address": "10.10.10.20",
"gateway": "10.10.10.1",
"hwaddr": "fe:ba:e9:e7:6b:0f",
"mtu": 1500,
"netmask": "255.255.255.0",
"services": [
"management"
],
"speed": 1000000000
}
},
"offload": {
"nfstarget": {
"address": "10.0.2.53",
|
silly-wacky-3-town-toon/SOURCE-COD
|
toontown/safezone/SafeZoneLoader.py
|
Python
|
apache-2.0
| 9,369 | 0.001814 |
from panda3d.core import *
from panda3d.direct import *
from toontown.toonbase.ToonBaseGlobal import *
from toontown.distributed.ToontownMsgTypes import *
from toontown.hood import ZoneUtil
from direct.directnotify import DirectNotifyGlobal
from toontown.hood import Place
from direct.showbase import DirectObject
from direct.fsm import StateData
from direct.fsm import ClassicFSM, State
from direct.fsm import State
from direct.task import Task
from toontown.launcher import DownloadForceAcknowledge
from toontown.toon import HealthForceAcknowledge
from toontown.toon.Toon import teleportDebug
from toontown.tutorial import TutorialForceAcknowledge
from toontown.toonbase.ToontownGlobals import *
from toontown.building import ToonInterior
from toontown.hood import QuietZoneState
from toontown.dna.DNAParser import *
from direct.stdpy.file import *
class SafeZoneLoader(StateData.StateData):
notify = DirectNotifyGlobal.directNotify.newCategory('SafeZoneLoader')
def __init__(self, hood, parentFSMState, doneEvent):
StateData.StateData.__init__(self, doneEvent)
self.hood = hood
self.parentFSMState = parentFSMState
self.fsm = ClassicFSM.ClassicFSM('SafeZoneLoader', [State.State('start', self.enterStart, self.exitStart, ['quietZone', 'playground
|
', 'toonInterior']),
State.State('playground', self.enterPlayground, self.exitPlayground, ['quietZone']),
State.State('toonInterior', self.enterToonInterior, self.exitToonInterior, ['quietZone']),
State.State('quietZone', self.enterQuietZone, self.exitQuietZone, ['playground', 'toonInterior']),
State.State('golfcourse', self.enterGolfcourse, self.exitGolfcourse, ['quietZone', 'playground']),
State.State('final', self.enterFi
|
nal, self.exitFinal, ['start'])], 'start', 'final')
self.placeDoneEvent = 'placeDone'
self.place = None
self.playgroundClass = None
return
def load(self):
self.music = base.loadMusic(self.musicFile)
self.activityMusic = base.loadMusic(self.activityMusicFile)
self.createSafeZone(self.dnaFile)
self.parentFSMState.addChild(self.fsm)
def unload(self):
self.parentFSMState.removeChild(self.fsm)
del self.parentFSMState
self.geom.removeNode()
del self.geom
del self.fsm
del self.hood
del self.nodeList
del self.playgroundClass
del self.music
del self.activityMusic
del self.holidayPropTransforms
self.deleteAnimatedProps()
self.ignoreAll()
ModelPool.garbageCollect()
TexturePool.garbageCollect()
def enter(self, requestStatus):
self.fsm.enterInitialState()
messenger.send('enterSafeZone')
self.setState(requestStatus['where'], requestStatus)
def exit(self):
messenger.send('exitSafeZone')
def setState(self, stateName, requestStatus):
self.fsm.request(stateName, [requestStatus])
def createSafeZone(self, dnaFile):
if self.safeZoneStorageDNAFile:
dnaBulk = DNABulkLoader(self.hood.dnaStore, (self.safeZoneStorageDNAFile,))
dnaBulk.loadDNAFiles()
node = loadDNAFile(self.hood.dnaStore, dnaFile)
if node.getNumParents() == 1:
self.geom = NodePath(node.getParent(0))
self.geom.reparentTo(hidden)
else:
self.geom = hidden.attachNewNode(node)
self.makeDictionaries(self.hood.dnaStore)
self.createAnimatedProps(self.nodeList)
self.holidayPropTransforms = {}
npl = self.geom.findAllMatches('**/=DNARoot=holiday_prop')
for i in xrange(npl.getNumPaths()):
np = npl.getPath(i)
np.setTag('transformIndex', `i`)
self.holidayPropTransforms[i] = np.getNetTransform()
gsg = base.win.getGsg()
if gsg:
self.geom.prepareScene(gsg)
self.geom.flattenMedium()
def makeDictionaries(self, dnaStore):
self.nodeList = []
for i in xrange(dnaStore.getNumDNAVisGroups()):
groupFullName = dnaStore.getDNAVisGroupName(i)
groupName = base.cr.hoodMgr.extractGroupName(groupFullName)
groupNode = self.geom.find('**/' + groupFullName)
if groupNode.isEmpty():
self.notify.error('Could not find visgroup')
groupNode.flattenMedium()
self.nodeList.append(groupNode)
self.removeLandmarkBlockNodes()
self.hood.dnaStore.resetPlaceNodes()
self.hood.dnaStore.resetDNAGroups()
self.hood.dnaStore.resetDNAVisGroups()
self.hood.dnaStore.resetDNAVisGroupsAI()
def removeLandmarkBlockNodes(self):
npc = self.geom.findAllMatches('**/suit_building_origin')
for i in range(npc.getNumPaths()):
npc.getPath(i).removeNode()
def enterStart(self):
pass
def exitStart(self):
pass
def enterPlayground(self, requestStatus):
self.acceptOnce(self.placeDoneEvent, self.handlePlaygroundDone)
self.place = self.playgroundClass(self, self.fsm, self.placeDoneEvent)
self.place.load()
self.place.enter(requestStatus)
base.cr.playGame.setPlace(self.place)
def exitPlayground(self):
self.ignore(self.placeDoneEvent)
self.place.exit()
self.place.unload()
self.place = None
base.cr.playGame.setPlace(self.place)
return
def handlePlaygroundDone(self):
status = self.place.doneStatus
teleportDebug(status, 'handlePlaygroundDone, doneStatus=%s' % (status,))
if ZoneUtil.getBranchZone(status['zoneId']) == self.hood.hoodId and status['shardId'] == None:
teleportDebug(status, 'same branch')
self.fsm.request('quietZone', [status])
else:
self.doneStatus = status
teleportDebug(status, 'different hood')
messenger.send(self.doneEvent)
return
def enterToonInterior(self, requestStatus):
self.acceptOnce(self.placeDoneEvent, self.handleToonInteriorDone)
self.place = ToonInterior.ToonInterior(self, self.fsm.getStateNamed('toonInterior'), self.placeDoneEvent)
base.cr.playGame.setPlace(self.place)
self.place.load()
self.place.enter(requestStatus)
def exitToonInterior(self):
self.ignore(self.placeDoneEvent)
self.place.exit()
self.place.unload()
self.place = None
base.cr.playGame.setPlace(self.place)
return
def handleToonInteriorDone(self):
status = self.place.doneStatus
if ZoneUtil.getBranchZone(status['zoneId']) == self.hood.hoodId and status['shardId'] == None:
self.fsm.request('quietZone', [status])
else:
self.doneStatus = status
messenger.send(self.doneEvent)
return
def enterQuietZone(self, requestStatus):
self.quietZoneDoneEvent = uniqueName('quietZoneDone')
self.acceptOnce(self.quietZoneDoneEvent, self.handleQuietZoneDone)
self.quietZoneStateData = QuietZoneState.QuietZoneState(self.quietZoneDoneEvent)
self.quietZoneStateData.load()
self.quietZoneStateData.enter(requestStatus)
def exitQuietZone(self):
self.ignore(self.quietZoneDoneEvent)
del self.quietZoneDoneEvent
self.quietZoneStateData.exit()
self.quietZoneStateData.unload()
self.quietZoneStateData = None
return
def handleQuietZoneDone(self):
status = self.quietZoneStateData.getRequestStatus()
if status['where'] == 'estate':
self.doneStatus = status
messenger.send(self.doneEvent)
else:
self.fsm.request(status['where'], [status])
def enterFinal(self):
pass
def exitFinal(self):
pass
def createAnimatedProps(self, nodeList):
self.animPropDict = {}
for i in nodeList:
animPropNodes = i.findAllMatches('**/animated_prop_*')
numAnimPropNodes = animPropNodes.getNumPaths()
for j in range(numAnimPropNodes):
animPropNode
|
cloud9ers/gurumate
|
environment/share/doc/ipython/examples/parallel/interengine/bintree.py
|
Python
|
lgpl-3.0
| 7,132 | 0.008833 |
"""
BinaryTree inter-engine communication class
use from bintree_script.py
Provides parallel [all]reduce functionality
"""
import cPickle as pickle
import re
import socket
import uuid
import zmq
from IPython.parallel.util import disambiguate_url
#----------------------------------------------------------------------------
# bintree-related construction/printing helpers
#----------------------------------------------------------------------------
def bintree(ids, parent=None):
"""construct {child:parent} dict representation of a binary tree
keys are the nodes in the tree, and values are the parent of each node.
The root node has parent `
|
parent`, default: None.
>>> tree = bintree(range(7))
>>> tree
{0: None, 1: 0,
|
2: 1, 3: 1, 4: 0, 5: 4, 6: 4}
>>> print_bintree(tree)
0
1
2
3
4
5
6
"""
parents = {}
n = len(ids)
if n == 0:
return parents
root = ids[0]
parents[root] = parent
if len(ids) == 1:
return parents
else:
ids = ids[1:]
n = len(ids)
left = bintree(ids[:n/2], parent=root)
right = bintree(ids[n/2:], parent=root)
parents.update(left)
parents.update(right)
return parents
def reverse_bintree(parents):
"""construct {parent:[children]} dict from {child:parent}
keys are the nodes in the tree, and values are the lists of children
of that node in the tree.
reverse_tree[None] is the root node
>>> tree = bintree(range(7))
>>> reverse_bintree(tree)
{None: 0, 0: [1, 4], 4: [5, 6], 1: [2, 3]}
"""
children = {}
for child,parent in parents.iteritems():
if parent is None:
children[None] = child
continue
elif parent not in children:
children[parent] = []
children[parent].append(child)
return children
def depth(n, tree):
"""get depth of an element in the tree"""
d = 0
parent = tree[n]
while parent is not None:
d += 1
parent = tree[parent]
return d
def print_bintree(tree, indent=' '):
"""print a binary tree"""
for n in sorted(tree.keys()):
print "%s%s" % (indent * depth(n,tree), n)
#----------------------------------------------------------------------------
# Communicator class for a binary-tree map
#----------------------------------------------------------------------------
ip_pat = re.compile(r'^\d+\.\d+\.\d+\.\d+$')
def disambiguate_dns_url(url, location):
"""accept either IP address or dns name, and return IP"""
if not ip_pat.match(location):
location = socket.gethostbyname(location)
return disambiguate_url(url, location)
class BinaryTreeCommunicator(object):
id = None
pub = None
sub = None
downstream = None
upstream = None
pub_url = None
tree_url = None
def __init__(self, id, interface='tcp://*', root=False):
self.id = id
self.root = root
# create context and sockets
self._ctx = zmq.Context()
if root:
self.pub = self._ctx.socket(zmq.PUB)
else:
self.sub = self._ctx.socket(zmq.SUB)
self.sub.setsockopt(zmq.SUBSCRIBE, b'')
self.downstream = self._ctx.socket(zmq.PULL)
self.upstream = self._ctx.socket(zmq.PUSH)
# bind to ports
interface_f = interface + ":%i"
if self.root:
pub_port = self.pub.bind_to_random_port(interface)
self.pub_url = interface_f % pub_port
tree_port = self.downstream.bind_to_random_port(interface)
self.tree_url = interface_f % tree_port
self.downstream_poller = zmq.Poller()
self.downstream_poller.register(self.downstream, zmq.POLLIN)
# guess first public IP from socket
self.location = socket.gethostbyname_ex(socket.gethostname())[-1][0]
def __del__(self):
self.downstream.close()
self.upstream.close()
if self.root:
self.pub.close()
else:
self.sub.close()
self._ctx.term()
@property
def info(self):
"""return the connection info for this object's sockets."""
return (self.tree_url, self.location)
def connect(self, peers, btree, pub_url, root_id=0):
"""connect to peers. `peers` will be a dict of 4-tuples, keyed by name.
{peer : (ident, addr, pub_addr, location)}
where peer is the name, ident is the XREP identity, addr,pub_addr are the
"""
# count the number of children we have
self.nchildren = btree.values().count(self.id)
if self.root:
return # root only binds
root_location = peers[root_id][-1]
self.sub.connect(disambiguate_dns_url(pub_url, root_location))
parent = btree[self.id]
tree_url, location = peers[parent]
self.upstream.connect(disambiguate_dns_url(tree_url, location))
def serialize(self, obj):
"""serialize objects.
Must return list of sendable buffers.
Can be extended for more efficient/noncopying serialization of numpy arrays, etc.
"""
return [pickle.dumps(obj)]
def unserialize(self, msg):
"""inverse of serialize"""
return pickle.loads(msg[0])
def publish(self, value):
assert self.root
self.pub.send_multipart(self.serialize(value))
def consume(self):
assert not self.root
return self.unserialize(self.sub.recv_multipart())
def send_upstream(self, value, flags=0):
assert not self.root
self.upstream.send_multipart(self.serialize(value), flags=flags|zmq.NOBLOCK)
def recv_downstream(self, flags=0, timeout=2000.):
# wait for a message, so we won't block if there was a bug
self.downstream_poller.poll(timeout)
msg = self.downstream.recv_multipart(zmq.NOBLOCK|flags)
return self.unserialize(msg)
def reduce(self, f, value, flat=True, all=False):
"""parallel reduce on binary tree
if flat:
value is an entry in the sequence
else:
value is a list of entries in the sequence
if all:
broadcast final result to all nodes
else:
only root gets final result
"""
if not flat:
value = reduce(f, value)
for i in range(self.nchildren):
value = f(value, self.recv_downstream())
if not self.root:
self.send_upstream(value)
if all:
if self.root:
self.publish(value)
else:
value = self.consume()
return value
def allreduce(self, f, value, flat=True):
"""parallel reduce followed by broadcast of the result"""
return self.reduce(f, value, flat=flat, all=True)
|
progdupeupl/pdp_website
|
doc/conf.py
|
Python
|
agpl-3.0
| 7,983 | 0.00714 |
# -*- coding: utf-8 -*-
#
# Progdupeupl documentation build configuration file, created by
# sphinx-quickstart on Sat Dec 07 17:25:18 2013.
#
# T
|
his file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated
|
file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('../'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.pngmath',
'sphinx.ext.mathjax',
'sphinx.ext.viewcode',
'sphinx.ext.napoleon',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Progdupeupl'
copyright = u'2013, Romain Porte'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.1'
# The full version, including alpha/beta/rc tags.
release = '1.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'Progdupeupldoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'Progdupeupl.tex', u'Progdupeupl Documentation',
u'Romain Porte', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'progdupeupl', u'Progdupeupl Documentation',
[u'Romain Porte'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'Progdupeupl', u'Progdupeupl Documentation',
u'Romain Porte', 'Progdupeupl', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
autodoc_default_flags = ['members']
|
amdeb/amdeb-integrator
|
amdeb_integrator/shared/model_names.py
|
Python
|
gpl-3.0
| 549 | 0 |
# -*- coding: utf-8 -*-
"""
contains definition of models names
"""
PRODUCT_TEMPLATE_TABLE = 'product.template'
PRODUCT_PRODUCT_TABLE = 'product.product'
PRODUCT_TEMPLATE_ID_FIELD = 'product_tmpl_id'
PRODUCT_VIRTUAL_AVAILABLE_FIELD = 'virtual_available'
PRODUCT_OPERA
|
TION_TABLE = 'amdeb.product.operation'
MODEL_NAME_FIELD = 'model_name'
RECORD_ID_FIELD
|
= 'record_id'
TEMPLATE_ID_FIELD = 'template_id'
OPERATION_TYPE_FIELD = 'operation_type'
WRITE_FIELD_NAMES_FIELD = 'write_field_names'
FIELD_NAME_DELIMITER = ', '
TIMESTAMP_FIELD = 'timestamp'
|
bedekelly/pysh
|
setup.py
|
Python
|
gpl-2.0
| 340 | 0 |
#
|
!/usr/bin/python3
from distutils.core import setup
setup(name='PySh',
version='0.0.1',
py_modules=['pysh'],
description="A tiny interface to intu
|
itively access shell commands.",
author="Bede Kelly",
author_email="bedekelly97@gmail.com",
url="https://github.com/bedekelly/pysh",
provides=['pysh'])
|
UWCS/uwcs-website
|
uwcs_website/search/__init__.py
|
Python
|
agpl-3.0
| 1,310 | 0.003817 |
registry = {}
def register(model, fields, order='pk', filter=False, results=5):
registry[str(model)] = (model, fields, results, order, filter)
class LoopBreak(Exception): pass
def search_for_string(search_string):
search_string = search_string.lower()
matches = []
for key in registry:
model, fields, results, order, filter_by = registry[key]
# partial application didn't seem sane in python ... so:
if filter_by:
if callable(filter_by):
filter_by = filter_by()
objects = model.objects.filter(filter_by)
else:
objects = model.objects.all()
counter = 0
try:
for object in objects.order_by(order):
for field in fields:
try:
searchee = getattr(object, field)
except AttributeError:
p
|
ass
if callable(searchee):
searchee = searchee()
if search_string in searchee.lower():
matches.append(object)
coun
|
ter += 1
if counter >= results:
raise LoopBreak()
except LoopBreak:
pass
return matches
|
code4futuredotorg/reeborg_tw
|
src/python/editor.py
|
Python
|
agpl-3.0
| 145 | 0 |
from
|
browser import window
from preprocess import transform
from reeborg_en import * # NOQA
src
|
= transform(window.editor.getValue())
exec(src)
|
coderbone/SickRage-alt
|
sickchill/views/api/webapi.py
|
Python
|
gpl-3.0
| 118,111 | 0.002879 |
# coding=utf-8
# Author: Dennis Lutter <lad1337@gmail.com>
# Author: Jonathon Saine <thezoggy@gmail.com>
# URL: h
|
ttps://sickchill.github.io
#
# This file is part of SickChill.
#
# SickChill is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either
|
version 3 of the License, or
# (at your option) any later version.
#
# SickChill is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickChill. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import, print_function, unicode_literals
# Stdlib Imports
import abc
import datetime
import io
import os
import re
import time
import traceback
# Third Party Imports
import six
# noinspection PyUnresolvedReferences
from six.moves import urllib
from tornado.web import RequestHandler
# First Party Imports
import sickbeard
import sickchill
from sickbeard import classes, db, helpers, image_cache, logger, network_timezones, sbdatetime, search_queue, ui
from sickbeard.common import (ARCHIVED, DOWNLOADED, FAILED, IGNORED, Overview, Quality, SKIPPED, SNATCHED, SNATCHED_PROPER, statusStrings, UNAIRED, UNKNOWN,
WANTED)
from sickbeard.postProcessor import PROCESS_METHODS
from sickbeard.versionChecker import CheckVersion
from sickchill.helper.common import dateFormat, dateTimeFormat, pretty_file_size, sanitize_filename, timeFormat, try_int
from sickchill.helper.encoding import ek
from sickchill.helper.exceptions import CantUpdateShowException, ex, ShowDirectoryNotFoundException
from sickchill.helper.quality import get_quality_string
from sickchill.media.ShowBanner import ShowBanner
from sickchill.media.ShowFanArt import ShowFanArt
from sickchill.media.ShowNetworkLogo import ShowNetworkLogo
from sickchill.media.ShowPoster import ShowPoster
from sickchill.show.ComingEpisodes import ComingEpisodes
from sickchill.show.History import History
from sickchill.show.Show import Show
from sickchill.system.Restart import Restart
from sickchill.system.Shutdown import Shutdown
try:
import json
except ImportError:
# noinspection PyPackageRequirements,PyUnresolvedReferences
import simplejson as json
indexer_ids = ["indexerid", "tvdbid"]
RESULT_SUCCESS = 10 # only use inside the run methods
RESULT_FAILURE = 20 # only use inside the run methods
RESULT_TIMEOUT = 30 # not used yet :(
RESULT_ERROR = 40 # only use outside of the run methods !
RESULT_FATAL = 50 # only use in Api.default() ! this is the "we encountered an internal error" error
RESULT_DENIED = 60 # only use in Api.default() ! this is the access denied error
result_type_map = {
RESULT_SUCCESS: "success",
RESULT_FAILURE: "failure",
RESULT_TIMEOUT: "timeout",
RESULT_ERROR: "error",
RESULT_FATAL: "fatal",
RESULT_DENIED: "denied",
}
# basically everything except RESULT_SUCCESS / success is bad
# noinspection PyAbstractClass
class ApiHandler(RequestHandler):
""" api class that returns json results """
version = 5 # use an int since float-point is unpredictable
def __init__(self, *args, **kwargs):
super(ApiHandler, self).__init__(*args, **kwargs)
# def set_default_headers(self):
# self.set_header('Cache-Control', 'no-store, no-cache, must-revalidate, max-age=0')
def get(self, *args, **kwargs):
kwargs = self.request.arguments
# noinspection PyCompatibility
for arg, value in six.iteritems(kwargs):
if len(value) == 1:
kwargs[arg] = value[0]
args = args[1:]
# set the output callback
# default json
output_callback_dict = {
'default': self._out_as_json,
'image': self._out_as_image,
}
access_msg = "API :: " + self.request.remote_ip + " - gave correct API KEY. ACCESS GRANTED"
logger.log(access_msg, logger.DEBUG)
# set the original call_dispatcher as the local _call_dispatcher
_call_dispatcher = self.call_dispatcher
# if profile was set wrap "_call_dispatcher" in the profile function
if 'profile' in kwargs:
from profilehooks import profile
_call_dispatcher = profile(_call_dispatcher, immediate=True)
del kwargs["profile"]
try:
out_dict = _call_dispatcher(args, kwargs)
except Exception as e: # real internal error oohhh nooo :(
logger.log("API :: " + ex(e), logger.ERROR)
error_data = {
"error_msg": ex(e),
"args": args,
"kwargs": kwargs
}
out_dict = _responds(RESULT_FATAL, error_data,
"SickChill encountered an internal error! Please report to the Devs")
if 'outputType' in out_dict:
output_callback = output_callback_dict[out_dict['outputType']]
else:
output_callback = output_callback_dict['default']
# noinspection PyBroadException
try:
self.finish(output_callback(out_dict))
except Exception:
pass
def _out_as_image(self, _dict):
self.set_header('Content-Type'.encode('utf-8'), _dict['image'].get_media_type())
return _dict['image'].get_media()
def _out_as_json(self, _dict):
self.set_header("Content-Type".encode('utf-8'), "application/json;charset=UTF-8")
try:
out = json.dumps(_dict, ensure_ascii=False, sort_keys=True)
callback = self.get_query_argument('callback', None) or self.get_query_argument('jsonp', None)
if callback:
out = callback + '(' + out + ');' # wrap with JSONP call if requested
except Exception as e: # if we fail to generate the output fake an error
logger.log("API :: " + traceback.format_exc(), logger.DEBUG)
out = '{{"result": "{0}", "message": "error while composing output: {1}"}}'.format(result_type_map[RESULT_ERROR], ex(e))
return out
def call_dispatcher(self, args, kwargs): # pylint:disable=too-many-branches
""" calls the appropriate CMD class
looks for a cmd in args and kwargs
or calls the TVDBShorthandWrapper when the first args element is a number
or returns an error that there is no such cmd
"""
logger.log("API :: all args: '" + str(args) + "'", logger.DEBUG)
logger.log("API :: all kwargs: '" + str(kwargs) + "'", logger.DEBUG)
commands = None
if args:
commands, args = args[0], args[1:]
commands = kwargs.pop("cmd", commands)
out_dict = {}
if commands:
commands = commands.split("|")
multi_commands = len(commands) > 1
for cmd in commands:
cur_args, cur_kwargs = self.filter_params(cmd, args, kwargs)
if len(cmd.split("_")) > 1:
cmd, cmd_index = cmd.split("_")
else:
cmd_index = None
logger.log("API :: " + cmd + ": cur_kwargs " + str(cur_kwargs), logger.DEBUG)
if not (cmd in ('show.getbanner', 'show.getfanart', 'show.getnetworklogo', 'show.getposter') and
multi_commands): # skip these cmd while chaining
try:
if cmd in function_mapper:
func = function_mapper.get(cmd) # map function
to_call = func(cur_args, cur_kwargs)
to_call.rh = self
cur_out_dict = to_call.run() # call function and get response
elif _is_int(cmd):
to_call = TVDBShorthandWrapper(cur_args, cur_kwargs, cmd)
to_call.rh = self
cur_out_dict = to_call.run()
|
o-kei/design-computing-aij
|
ch4_1/nearest_liner.py
|
Python
|
mit
| 491 | 0.002037 |
from math import sqrt
def sq_dist(p, q):
return((p[0] - q[0])**2 + (p[1] - q[1])**2)
def linear_search(points, query):
sqd = float("inf")
for point in points:
|
d = sq_dist(point, query)
if d < sqd:
nearest = point
sqd = d
return(nearest, sqd)
point_list = [(2, 3), (5, 4), (9, 6), (4, 7), (8, 1), (7, 2)]
n = linear_search(point_list, (9, 2))
print('nearest:', n[0], 'dist:', sqrt(n
|
[1]))
# nearest: (8, 1) dist: 1.4142135623730951
|
Learningtribes/edx-platform
|
openedx/core/lib/block_structure/tests/test_manager.py
|
Python
|
agpl-3.0
| 6,900 | 0.002464 |
"""
Tests for manager.py
"""
from nose.plugins.attrib import attr
from unittest import TestCase
from ..block_structure import BlockStructureBlockData
from ..exceptions import UsageKeyNotInBlockStructure
from ..manager import BlockStructureManager
from ..transformers import BlockStructureTransformers
from .helpers import (
MockModulestoreFactory, MockCache, MockTransformer, ChildrenMapTestMixin, mock_registered_transformers
)
class TestTransformer1(MockTransformer):
"""
Test Transformer class with basic functionality to verify collected and
transformed data.
"""
collect_data_key = 't1.collect'
transform_data_key = 't1.transform'
collect_call_count = 0
@classmethod
def collect(cls, block_structure):
"""
Collects block data for the block structure.
"""
cls._set_block_values(block_structure, cls.collect_data_key)
cls.collect_call_count += 1
def transform(self, usage_info, block_structure):
"""
Transforms the block structure.
"""
self._set_block_values(block_structure, self.transform_data_key)
@classmethod
def assert_collected(cls, block_structure):
"""
Asserts data was collected for the block structure.
"""
cls._assert_block_values(block_structure, cls.collect_data_key)
@classmethod
def assert_transformed(cls, block_structure):
"""
|
Asserts the block
|
structure was transformed.
"""
cls._assert_block_values(block_structure, cls.transform_data_key)
@classmethod
def _set_block_values(cls, block_structure, data_key):
"""
Sets a value for each block in the given structure, using the given
data key.
"""
for block_key in block_structure.topological_traversal():
block_structure.set_transformer_block_field(
block_key, cls, data_key, cls._create_block_value(block_key, data_key)
)
@classmethod
def _assert_block_values(cls, block_structure, data_key):
"""
Verifies the value for each block in the given structure, for the given
data key.
"""
for block_key in block_structure.topological_traversal():
assert (
block_structure.get_transformer_block_field(
block_key,
cls,
data_key,
) == cls._create_block_value(block_key, data_key)
)
@classmethod
def _create_block_value(cls, block_key, data_key):
"""
Returns a unique deterministic value for the given block key
and data key.
"""
return data_key + 't1.val1.' + unicode(block_key)
@attr('shard_2')
class TestBlockStructureManager(TestCase, ChildrenMapTestMixin):
"""
Test class for BlockStructureManager.
"""
def setUp(self):
super(TestBlockStructureManager, self).setUp()
TestTransformer1.collect_call_count = 0
self.registered_transformers = [TestTransformer1()]
with mock_registered_transformers(self.registered_transformers):
self.transformers = BlockStructureTransformers(self.registered_transformers)
self.children_map = self.SIMPLE_CHILDREN_MAP
self.modulestore = MockModulestoreFactory.create(self.children_map)
self.cache = MockCache()
self.bs_manager = BlockStructureManager(
root_block_usage_key=0,
modulestore=self.modulestore,
cache=self.cache,
)
def collect_and_verify(self, expect_modulestore_called, expect_cache_updated):
"""
Calls the manager's get_collected method and verifies its result
and behavior.
"""
self.modulestore.get_items_call_count = 0
self.cache.set_call_count = 0
with mock_registered_transformers(self.registered_transformers):
block_structure = self.bs_manager.get_collected()
self.assert_block_structure(block_structure, self.children_map)
TestTransformer1.assert_collected(block_structure)
if expect_modulestore_called:
self.assertGreater(self.modulestore.get_items_call_count, 0)
else:
self.assertEquals(self.modulestore.get_items_call_count, 0)
self.assertEquals(self.cache.set_call_count, 1 if expect_cache_updated else 0)
def test_get_transformed(self):
with mock_registered_transformers(self.registered_transformers):
block_structure = self.bs_manager.get_transformed(self.transformers)
self.assert_block_structure(block_structure, self.children_map)
TestTransformer1.assert_collected(block_structure)
TestTransformer1.assert_transformed(block_structure)
def test_get_transformed_with_starting_block(self):
with mock_registered_transformers(self.registered_transformers):
block_structure = self.bs_manager.get_transformed(self.transformers, starting_block_usage_key=1)
substructure_of_children_map = [[], [3, 4], [], [], []]
self.assert_block_structure(block_structure, substructure_of_children_map, missing_blocks=[0, 2])
TestTransformer1.assert_collected(block_structure)
TestTransformer1.assert_transformed(block_structure)
def test_get_transformed_with_nonexistent_starting_block(self):
with mock_registered_transformers(self.registered_transformers):
with self.assertRaises(UsageKeyNotInBlockStructure):
self.bs_manager.get_transformed(self.transformers, starting_block_usage_key=100)
def test_get_collected_cached(self):
self.collect_and_verify(expect_modulestore_called=True, expect_cache_updated=True)
self.collect_and_verify(expect_modulestore_called=False, expect_cache_updated=False)
self.assertEquals(TestTransformer1.collect_call_count, 1)
def test_get_collected_outdated_data(self):
self.collect_and_verify(expect_modulestore_called=True, expect_cache_updated=True)
TestTransformer1.VERSION += 1
self.collect_and_verify(expect_modulestore_called=True, expect_cache_updated=True)
self.assertEquals(TestTransformer1.collect_call_count, 2)
def test_get_collected_version_update(self):
self.collect_and_verify(expect_modulestore_called=True, expect_cache_updated=True)
BlockStructureBlockData.VERSION += 1
self.collect_and_verify(expect_modulestore_called=True, expect_cache_updated=True)
self.assertEquals(TestTransformer1.collect_call_count, 2)
def test_clear(self):
self.collect_and_verify(expect_modulestore_called=True, expect_cache_updated=True)
self.bs_manager.clear()
self.collect_and_verify(expect_modulestore_called=True, expect_cache_updated=True)
self.assertEquals(TestTransformer1.collect_call_count, 2)
|
cutoffthetop/zeit.content.image
|
src/zeit/content/image/variant.py
|
Python
|
bsd-3-clause
| 6,899 | 0.00029 |
import UserDict
import copy
import grokcore.component as grok
import sys
import zeit.cms.content.sources
import zeit.content.image.interfaces
import zeit.edit.body
import zope.schema
class Variants(grok.Adapter, UserDict.DictMixin):
grok.context(zeit.content.image.interfaces.IImageGroup)
grok.implements(zeit.content.image.interfaces.IVariants)
def __init__(self, context):
super(Variants, self).__init__(context)
self.__parent__ = context
def __getitem__(self, key):
"""Retrieve Variant for JSON Requests"""
if key in self.context.variants:
variant = Variant(id=key, **self.context.variants[key])
config = VARIANT_SOURCE.factory.find(self.context, key)
self._copy_missing_fields(config, variant)
else:
variant = VARIANT_SOURCE.factory.find(self.context, key)
if not variant.is_default:
self._copy_missing_fields(self.default_variant, variant)
variant.__parent__ = self
return variant
def _copy_missing_fields(self, source, target):
for key in zope.schema.getFieldNames(
zeit.content.image.interfaces.IVariant):
if hasattr(target, key) and getattr(target, key) is not None:
continue
if hasattr(source, key):
setattr(target, key, getattr(source, key))
def keys(self):
keys = [x.id for x in VARIANT_SOURCE(self.context)]
for key in self.context.variants.keys():
if key not in keys:
keys.append(key)
return keys
@property
def default_variant(self):
if Variant.DEFAULT_NAME in self.context.variants:
default = self[Variant.DEFAULT_NAME]
else:
default = VARIANT_SOURCE.factory.find(
self.context, Variant.DEFAULT_NAME)
return default
class Variant(object):
DEFAULT_NAME = 'default'
interface = zeit.content.image.interfaces.IVariant
grok.implements(interface)
max_size = None
legacy_name = None
aspect_ratio = None
def __init__(self, **kw):
"""Set attributes that are part of the Schema and convert their type"""
fields = zope.schema.getFields(self.interface)
for key, value in kw.items():
if key not in fields:
continue # ignore attributes that aren't part of the schema
value = fields[key].fromUnicode(unicode(value))
setattr(self, key, value)
@property
def ratio(self):
if self.is_default:
image = zeit.content.image.interfaces.IMasterImage(
zeit.content.image.interfaces.IImageGroup(self))
xratio, yratio = image.getImageSize()
return float(xratio) / float(yratio)
if self.aspect_ratio is None:
return None
xratio, yratio = self.aspect_ratio.split(':')
ret
|
urn float(xratio) / float(yratio)
@property
def max_width(self):
if self.max_size is None:
return sys.maxint
width, height =
|
self.max_size.split('x')
return int(width)
@property
def max_height(self):
if self.max_size is None:
return sys.maxint
width, height = self.max_size.split('x')
return int(height)
@property
def is_default(self):
return self.id == self.DEFAULT_NAME
@property
def relative_image_path(self):
if self.is_default:
thumbnails = zeit.content.image.interfaces.IThumbnails(
zeit.content.image.interfaces.IImageGroup(self))
return thumbnails.source_image.__name__
if self.max_size is None:
return '%s/%s' % (
zeit.content.image.imagegroup.Thumbnails.NAME, self.name)
return '{}/{}__{}'.format(
zeit.content.image.imagegroup.Thumbnails.NAME,
self.name, self.max_size)
class VariantSource(zeit.cms.content.sources.XMLSource):
product_configuration = 'zeit.content.image'
config_url = 'variant-source'
def getTitle(self, context, value):
return value.id
def getToken(self, context, value):
return value.id
def getValues(self, context):
tree = self._get_tree()
result = []
for node in tree.iterchildren('*'):
if not self.isAvailable(node, context):
continue
sizes = list(node.iterchildren('size'))
if not sizes:
# If there are no children, create a Variant from parent node
attributes = dict(node.attrib)
attributes['id'] = attributes['name']
result.append(Variant(**attributes))
for size in sizes:
# Create Variant for each given size
result.append(Variant(**self._merge_attributes(
node.attrib, size.attrib)))
return result
def find(self, context, id):
for value in self.getValues(context):
if value.id == id:
return value
raise KeyError(id)
def _merge_attributes(self, parent_attr, child_attr):
"""Merge attributes from parent with those from child.
Attributes from child are more specific and therefore may overwrite
attributes from parent. Create the child `id` via concatenation, since
it should be unique among variants and respects the parent / child
hierarchy.
"""
result = copy.copy(parent_attr)
result.update(child_attr)
if 'name' in parent_attr and 'id' in child_attr:
result['id'] = '{}-{}'.format(
parent_attr['name'], child_attr['id'])
return result
VARIANT_SOURCE = VariantSource()
class VariantsTraverser(zeit.edit.body.Traverser):
grok.context(zeit.content.image.interfaces.IRepositoryImageGroup)
body_name = 'variants'
body_interface = zeit.content.image.interfaces.IVariants
@grok.adapter(zeit.content.image.interfaces.IVariants)
@grok.implementer(zeit.content.image.interfaces.IImageGroup)
def imagegroup_for_variants(context):
return zeit.content.image.interfaces.IImageGroup(context.__parent__)
@grok.adapter(zeit.content.image.interfaces.IVariant)
@grok.implementer(zeit.content.image.interfaces.IImageGroup)
def imagegroup_for_variant(context):
return zeit.content.image.interfaces.IImageGroup(context.__parent__)
class LegacyVariantSource(zeit.cms.content.sources.XMLSource):
product_configuration = 'zeit.content.image'
config_url = 'legacy-variant-source'
def getValues(self, context):
tree = self._get_tree()
result = []
for node in tree.iterchildren('*'):
result.append({'old': node.get('old'), 'new': node.get('new')})
return result
LEGACY_VARIANT_SOURCE = LegacyVariantSource()
|
epitron/youtube-dl
|
youtube_dl/extractor/naver.py
|
Python
|
unlicense
| 5,293 | 0.001171 |
# coding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
from ..utils import (
ExtractorError,
int_or_none,
update_url_query,
)
class NaverIE(InfoExtractor):
_VALID_URL = r'https?://(?:m\.)?tv(?:cast)?\.naver\.com/v/(?P<id>\d+)'
_TESTS = [{
'url': 'http://tv.naver.com/v/81652',
'info_dict': {
'id': '81652',
'ext': 'mp4',
'title': '[9월 모의고사 해설강의][수학_김상희] 수학 A형 16~20번',
'description': '합격불변의 법칙 메가스터디 | 메가스터디 수학 김상희 선생님이 9월 모의고사 수학A형 16번에서 20번까지 해설강의를 공개합니다.',
'upload_date': '20130903',
},
}, {
'url': 'http://tv.naver.com/v/395837',
'md5': '638ed4c12012c458fefcddfd01f173cd',
'info_dict': {
'id': '395837',
'ext': 'mp4',
'title': '9년이 지나도 아픈 기억, 전효성의 아버지',
'description': 'md5:5bf200dcbf4b66eb1b350d1eb9c753f7',
'upload_date': '20150519',
},
'skip': 'Georestricted',
}, {
'url': 'http://tvcast.naver.com/v/81652',
'only_matching': True,
}]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
vid = self._search_regex(
r'videoId["\']\s*:\s*(["\'])(?P<value>(?:(?!\1).)+)\1', webpage,
'video id', fatal=None, group='value')
in_key = self._search_regex(
r'inKey["\']\s*:\s*(["\'])(?P<value>(?:(?!\1).)+)\1', webpage,
'key', default=None, group='value')
if not vid or not in_key:
error = self._html_search_regex(
r'(?s)<div class="(?:nation_error|nation_box|error_box)">\s*(?:<!--.*?-->)?\s*<p class="[^"]+">(?P<msg>.+?)</p>\s*</div>',
webpage, 'error', default=None)
if error:
raise ExtractorError(error, expected=True)
raise ExtractorError('couldn\'t extract vid and key')
video_data = self._download_json(
'http://play.rmcnmv.naver.com/vod/play/v2.0/' + vid,
video_id, query={
'key': in_key,
})
meta = video_data['meta']
title = meta['subject']
formats = []
def extract_formats(streams, stream_type, query={}):
for stream in streams:
stream_url = stream.get('source')
if not stream_url:
continue
stream_url = update_url_query(stream_url, query)
encoding_option = stream.get('encodingOption', {})
bitrate = stream.get('bitrate', {})
formats.append({
'format_id': '%s_%s' % (stream.get('type') or stream_type, encoding_option.get('id') or encoding_option.get('name')),
'url': stream_url,
'width': int_or_none(encoding_option.get('width')),
'height': int_or_none(encoding_option.get('height')),
'vbr': int_or_none(bitrate.get('video')),
|
'abr': int_or_none(bitrate.get('audio')),
'filesize': int_or_none(stream.get('size')),
'protocol': 'm3u8_native' if stream_type == 'HLS' else None,
})
|
extract_formats(video_data.get('videos', {}).get('list', []), 'H264')
for stream_set in video_data.get('streams', []):
query = {}
for param in stream_set.get('keys', []):
query[param['name']] = param['value']
stream_type = stream_set.get('type')
videos = stream_set.get('videos')
if videos:
extract_formats(videos, stream_type, query)
elif stream_type == 'HLS':
stream_url = stream_set.get('source')
if not stream_url:
continue
formats.extend(self._extract_m3u8_formats(
update_url_query(stream_url, query), video_id,
'mp4', 'm3u8_native', m3u8_id=stream_type, fatal=False))
self._sort_formats(formats)
subtitles = {}
for caption in video_data.get('captions', {}).get('list', []):
caption_url = caption.get('source')
if not caption_url:
continue
subtitles.setdefault(caption.get('language') or caption.get('locale'), []).append({
'url': caption_url,
})
upload_date = self._search_regex(
r'<span[^>]+class="date".*?(\d{4}\.\d{2}\.\d{2})',
webpage, 'upload date', fatal=False)
if upload_date:
upload_date = upload_date.replace('.', '')
return {
'id': video_id,
'title': title,
'formats': formats,
'subtitles': subtitles,
'description': self._og_search_description(webpage),
'thumbnail': meta.get('cover', {}).get('source') or self._og_search_thumbnail(webpage),
'view_count': int_or_none(meta.get('count')),
'upload_date': upload_date,
}
|
ExsonumChain/ProtoServer
|
voks/settings.py
|
Python
|
mit
| 3,423 | 0.001753 |
"""
Django settings for voks project.
Generated by 'django-admin startproject' using Django 1.11.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
#SECRET_KEY = ''
# SECURITY WARNING: don't run with debug turned on in pro
|
duction!
DEBUG = True
ALLOWED_HOSTS = ['127.0.0.1']
# Application definition
INSTALLED_APPS = [
'blockchain.apps.BlockchainConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.
|
sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'graphene_django'
]
REST_FRAMEWORK = {
'DEFAULT_PERMISSION_CLASSES': [
# 'rest_framework.permissions.IsAdminUser',
],
# 'PAGE_SIZE': 2
}
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'voks.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'voks.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = 'static/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'uploads/')
MEDIA_URL = "/uploads/"
GRAPHENE = {
'SCHEMA': 'blockchain.schema.schema'
}
|
Kiganshee/Flip-Sign
|
TransitionFunctions.py
|
Python
|
cc0-1.0
| 14,898 | 0.006511 |
from PIL import Image
from PIL import ImageDraw
from PIL import ImageChops
import random
def message_transition(func):
func.is_message_transition = True
func.is_display_transition = False
def display_transition(func):
func.is_message_transition = False
func.is_display_transition = True
def FlashStarsTransition(current_state,desired_state):
"""
This transition function flashes all asterisks, then blanks, then asterisks, then the desired message.
:param current_state: the current state of the display - again ignored by this function
:param desired_state: the desired display state
:return: a list containing the display states to be passed through
"""
assert type(current_state) == list
num_lines = len(current_state)
num_chars = len(current_state[0])
return [['*'*num_chars]*num_lines, [' '*num_chars]*num_lines, ['*'*num_chars]*num_lines, desired_state]
message_transition(FlashStarsTransition)
def SimpleTransition(current_state,desired_state):
"""
The simplest possible transition -- go to the desired state directly with no fancy stuff.
:param current_state: the current display state -- ignored by this function but included for consistency with other
transition functions.
:param desired_state: the desired display state
:return: in this case, just a single-element list containing the desired state
"""
return [desired_state]
display_transition(SimpleTransition)
def center_wipe(currentstate, desiredstate):
"""
Transition function that wipes from currentstate to desiredstate out from the center in both directions.
:param currentstate: a PIL image object representing the current display state
:param desiredstate: a PIL image object representing the eventual desired display state
:return: a list of PIL image objects representing a transition of display states to get from current to desired
"""
assert isinstance(currentstate, Image.Image)
assert isinstance(desiredstate, Image.Image)
assert currentstate.size == desiredstate.size
# initialize list for transition
output = []
# set initial columns for wipe (possibly same if odd number of pixels)
if desiredstate.size[0] % 2 == 0: # if the number of columns of pixels is even
# set the right and left columns as the middle ones - assuming the indices start at 0
left_column = desiredstate.size[0] / 2 - 1
right_column = desiredstate.size[0] / 2
else: # if the number of columns of pixels is odd
left_column = desiredstate.size[0] / 2 - 0.5
right_column = left_column
# iterate until the wipe has passed the edge
while left_column >= -1:
# create a mask with the right amount of interior area transparent
# note - Image.composite(image1, image2, mask) yields image1 where mask is 1 and image2 where mask is 0
image_mask = Image.new('1',desiredstate.size,1)
ImageDraw.Draw(image_mask).rectangle([left_column, 0, right_column, desiredstate.size[1]-1], fill=0)
# composite the initial image with the desired state using the layer mask
composite = Image.composite(currentstate, desiredstate, image_mask)
# draw vertical lines of all white to create the line doing the wiping
draw = ImageDraw.Draw(composite)
draw.line(xy=[left_column, 0, left_column, desiredstate.size[1]-1], fill=1, width=1)
draw.line(xy=[right_column, 0, right_column, desiredstate.size[1]-1], fill=1, width=1)
# append this new image to the list of images
output.append(composite)
left_column -= 1
right_column += 1
# return the list of images for transition
return output
display_transition(center_wipe)
def dissolve_changes_only(currentstate, desiredstate):
"""
A transition function that changes pixels one by one at random between currentstate and desiredstate. Pixels that
are the same in both images are skipped (no time taken)
:param currentstate: a PIL image object representing the current display state
:param desiredstate: a PIL image object representing the eventual desired display state
:return: a list of PIL image objects representing a transition of display states to get from current to desired
"""
assert isinstance(currentstate, Image.Image)
assert isinstance(desiredstate, Image.Image)
assert currentstate.size == desiredstate.size
# generate a list of all pixel addresses in the image and shuffle it
pixel_addresses = []
for column in range(currentstate.size[0]):
for row in range(currentstate.size[1]):
pixel_addresses.append((column, row))
random.shuffle(pixel_addresses)
output = []
next_image = currentstate.copy()
# for each pixel in the image
for pixel in pixel_addresses:
# if the pixel is different between the input image and the desired one
if currentstate.getpixel(pixel) != desiredstate.getpixel(pixel):
# take the previous image in the output list and change that pixel (currentstate if list is empty)
ImageDraw.Draw(next_image).point(pixel, fill=desiredstate.getpixel(pixel))
# append that image to the output list
output.append(next_image.copy())
return output
display_transition(dissolve_changes_only)
def push_up(current_state, desired_state):
"""
A transition function that raises the desired state up from the bottom of the screen, "pushing" the current state
off the top. One blank line is inserted between.
:param current_state: a PIL image object representing the current
|
display state
:param desired_state: a PIL image object representing the eventual desired display state
:return: a list of PIL image objects representing a transition of display states to get from current to desired
"""
assert isinstance(current_state, Image.Image)
assert isinstance(desired_state, Image.Image)
assert current_state.size == desired_state.size
output = []
current_state_y_val =
|
-1
desired_state_y_val = current_state.size[1]
# while the desired image has not reached the top
while desired_state_y_val >= 0:
# initialize next image
next = Image.new("1", current_state.size, color=0)
# paste current state at its y valute
next.paste(current_state, (0, current_state_y_val))
# paste desired state at its y value
next.paste(desired_state, (0, desired_state_y_val))
# increment y vales
current_state_y_val -= 1
desired_state_y_val -= 1
# append output
output.append(next)
output.append(desired_state)
# return the output
return output
display_transition(push_up)
def push_down(current_state, desired_state):
"""
A transition function that raises the desired state down from the top of the screen, "pushing" the current state
off the bottom. One blank line is inserted between.
:param current_state: a PIL image object representing the current display state
:param desired_state: a PIL image object representing the eventual desired display state
:return: a list of PIL image objects representing a transition of display states to get from current to desired
"""
assert isinstance(current_state, Image.Image)
assert isinstance(desired_state, Image.Image)
assert current_state.size == desired_state.size
output = []
current_state_y_val = 1
desired_state_y_val = 0 - current_state.size[1]
# while the desired image has not reached the top
while desired_state_y_val <= 0:
# initialize next image
next = Image.new("1", current_state.size, color=0)
# paste current state at its y valute
next.paste(current_state, (0, current_state_y_val))
# paste desired state at its y value
next.paste(desired_state, (0, desired_state_y_val))
# increment y vales
current_state_y_val += 1
desired_state_y_val += 1
# append output
output.append(next)
# return the output
return output
display_transition(push_down)
de
|
miooim/project_hours
|
src/config.py
|
Python
|
mit
| 1,076 | 0.009294 |
from tornado.options import define, options
define('mongodb', default='localhost:27017', help='mongodb host name or ip address +port', type=str)
define('mongod_name', default='project_hours', help='Project hours database name', type=str)
define('auth_db', default='project_hours', help='authentication database', type=str)
define('auth_driver', default='auth.ldap_auth.LdapAuth', help='authentication driver', type=str)
define('app_port', default=8181, help='application port', type=int)
# LDAP authentication
define("active_directory_server", default='server_name', help="Active directory server", type=str)
define("active_directory_username", default='user_name', help="Active directory username", type=str)
define("active_directory_password", default='password', help="Act
|
ive directory password", type=str)
define("active_directory_search_def", default='ou=Crow,dc=Crow,dc=local', help="active directory search
|
definition",
type=str)
#user cookie
define("auth_cookie_name", default='project_hours', help="name of the cookie to use for authentication", type=str)
|
googleapis/python-appengine-admin
|
samples/generated_samples/appengine_v1_generated_instances_delete_instance_sync.py
|
Python
|
apache-2.0
| 1,530 | 0.000654 |
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, eithe
|
r express or implied.
# See the License for the specific
|
language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for DeleteInstance
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-appengine-admin
# [START appengine_v1_generated_Instances_DeleteInstance_sync]
from google.cloud import appengine_admin_v1
def sample_delete_instance():
# Create a client
client = appengine_admin_v1.InstancesClient()
# Initialize request argument(s)
request = appengine_admin_v1.DeleteInstanceRequest(
)
# Make the request
operation = client.delete_instance(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
# [END appengine_v1_generated_Instances_DeleteInstance_sync]
|
evgenyz/meson
|
modules/gnome.py
|
Python
|
apache-2.0
| 10,923 | 0.003662 |
# Copyright 2015 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''This module provides helper functions for Gnome/GLib related
functionality such as gobject-introspection and gresources.'''
import build
import os, sys
import subprocess
from coredata import MesonException
import mlog
class GnomeModule:
def compile_resources(self, state, args, kwargs):
cmd = ['glib-compile-resources', '@INPUT@', '--generate']
if 'source_dir' in kwargs:
d = os.path.join(state.build_to_src, state.subdir, kwargs.pop('source_dir'))
cmd += ['--sourcedir', d]
if 'c_name' in kwargs:
cmd += ['--c-name', kwargs.pop('c_name')]
cmd += ['--target', '@OUTPUT@']
kwargs['command'] = cmd
output_c = args[0] + '.c'
output_h = args[0] + '.h'
kwargs['input'] = args[1]
kwargs['output'] = output_c
target_c = build.CustomTarget(args[0]+'_c', state.subdir, kwargs)
kwargs['output'] = output_h
target_h = build.CustomTarget(args[0] + '_h', state.subdir, kwargs)
return [target_c, target_h]
def generate_gir(self, state, args, kwargs):
if len(args) != 1:
raise MesonException('Gir takes one argument')
girtarget = args[0]
while hasattr(girtarget, 'held_object'):
girtarget = girtarget.held_object
if not isinstance(girtarget, (build.Executable, build.SharedLibrary)):
raise MesonException('Gir target must be an executable or shared library')
pkgstr = subprocess.check_output(['pkg-config', '--cflags', 'gobject-introspection-1.0'])
pkgargs = pkgstr.decode().strip().split()
ns = kwargs.pop('namespace')
nsversion = kwargs.pop('nsversion')
libsources = kwargs.pop('sources')
girfile = '%s-%s.gir' % (ns, nsversion)
depends = [girtarget]
scan_command = ['g-ir-scanner', '@INPUT@']
scan_command += pkgargs
scan_command += ['--namespace='+ns, '--nsversion=' + nsversion, '--warn-all',
'--output', '@OUTPUT@']
for incdirs in girtarget.include_dirs:
for incdir in incdirs.get_incdirs():
scan_command += ['-I%s' % os.path.join(state.environment.get_source_dir(), incdir)]
if 'link_with' in kwargs:
link_with = kwargs.pop('link_with')
for link in link_with:
lib = link.held_object
scan_command += ['-l%s' % lib.name]
if isinstance(lib, build.SharedLibrary):
scan_command += ['-L%s' %
os.path.join(state.environment.get_build_dir(),
lib.subdir)]
depends.append(lib)
if 'includes' in kwargs:
includes = kwargs.pop('includes')
if isinstance(includes, str):
scan_command += ['--include=%s' % includes]
elif isinstance(includes, list):
scan_command += ['--include=%s' % inc for inc in includes]
else:
raise MesonException('Gir includes must be str or list')
if state.global_args.get('c'):
scan_command += ['--cflags-begin']
scan_command += state.global_args['c']
scan_command += ['--cflags-end']
if kwargs.get('symbol_prefix'):
sym_prefix = kwargs.pop('symbol_prefix')
if not isinstance(sym_prefix, str):
raise MesonException('Gir symbol prefix must be str')
scan_command += ['--symbol-prefix=%s' % sym_prefix]
if kwargs.get('identifier_prefix'):
identifier_prefix = kwargs.pop('identifier_prefix')
if not isinstance(identifier_prefix, str):
raise MesonException('Gir identifier prefix must be str')
scan_command += ['--identifier-prefix=%s' % identifier_prefix]
if kwargs.get('export_packages'):
pkgs = kwargs.pop('export_packages')
if isinstance(pkgs, str):
scan_command += ['--pkg-export=%s' % pkgs]
elif isinstance(pkgs, list):
scan_command += ['--pkg-export=%s' % pkg for pkg in pkgs]
else:
raise MesonException('Gir export packages must be str or list')
deps = None
if 'dependencies' in kwargs:
deps = kwargs.pop('dependencies')
if not isinstance (deps, list):
deps = [deps]
for dep in deps:
girdir = dep.held_object.get_variable ("girdir")
if girdir:
scan_command += ["--add-include-path=%s" % girdir]
inc_dirs = None
if kwargs.get('include_directories'):
inc_dirs = kwargs.pop('include_directories')
if isinstance(inc_dirs.held_object, build.IncludeDirs):
scan_command += ['--add-include-path=%s' % inc for inc in inc_dirs.held_object.get_incdirs()]
else:
raise MesonException('Gir include dirs should be include_directories()')
if isinstance(girtarget, build.Executable):
scan_command += ['--program', girtarget]
elif isinstance(girtarget, build.SharedLibrary):
scan_command += ["-L", os.path.join (state.environment.get_build_dir(), girtarget.subdir)]
libname = girtarget.get_basename()
scan_command += ['--library', libname]
scankwargs = {'output' : girfile,
'input' : libsources,
'command' : scan_command,
'depends' : depends,
}
if kwargs.get('install'):
scankwargs['install'] = kwargs['install']
scankwargs['install_dir'] = os.path.join(state.environment.get_datadir(), 'gir-1.0')
scan_target = GirTarget(girfile, state.subdir, scankwargs)
typelib_output = '%s-%s.typelib' % (ns, nsversion)
typelib_cmd = ['g-ir-compiler', scan_target, '--output', '@OUTPUT@']
if inc_dirs:
typelib_cmd += ['--includedir=%s' % inc for inc in
inc_dirs.held_object.get_incdirs()]
|
if deps:
for dep in deps:
|
girdir = dep.held_object.get_variable ("girdir")
if girdir:
typelib_cmd += ["--includedir=%s" % girdir]
kwargs['output'] = typelib_output
kwargs['command'] = typelib_cmd
# Note that this can't be libdir, because e.g. on Debian it points to
# lib/x86_64-linux-gnu but the girepo dir is always under lib.
kwargs['install_dir'] = 'lib/girepository-1.0'
typelib_target = TypelibTarget(typelib_output, state.subdir, kwargs)
return [scan_target, typelib_target]
def compile_schemas(self, state, args, kwargs):
if len(args) != 0:
raise MesonException('Compile_schemas does not take positional arguments.')
srcdir = os.path.join(state.build_to_src, state.subdir)
outdir = state.subdir
cmd = ['glib-compile-schemas', '--targetdir', outdir, srcdir]
kwargs['command'] = cmd
kwargs['input'] = []
kwargs['output'] = 'gschemas.compiled'
if state.subdir == '':
targetname = 'gsettings-compile'
else:
targetname = 'gsettings-compile-' + state.subdir
target_g = build.CustomTarget(targetname, state.subdir, kwargs)
return target_g
def gtkdoc(self, state, args, kwargs):
if len(args) != 1:
raise MesonException('Gtkdoc must have one positional a
|
smurfix/HomEvenT
|
modules/bool.py
|
Python
|
gpl-3.0
| 2,922 | 0.044781 |
# -*- coding: utf-8 -*-
##
## Copyright © 2007, Matthias Urlichs <matthias@urlichs.de>
##
## This program is free software: you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation, either version 3 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License (included; see the file LICENSE)
## for more details.
##
"""\
This code implements primitive "if true" and "if false" checks.
"""
from homevent.check import Check,register_condition,unregister_condition
from homevent.module import Module
class TrueCheck(Check):
name="true"
doc="always true."
def check(self,*args):
assert not args,"Truth doesn't have arguments"
return True
class FalseCheck(Check):
name="false"
doc="always false."
def check(self,*args):
assert not args,"Falsehood doesn't have arguments"
return False
class NoneCheck(Check):
name="null"
doc="check if the argument has a value."
def check(self,*args):
assert len(args)==1,u"The ‹null› check requires one argument"
return args[0] is None
class EqualCheck(Check):
name="equal"
doc="check if the arguments are the same."
def check(self,*args):
assert len(args)==2,u"The ‹equal› check requires two arguments"
a,b = args
if a is None: return b is None
try:
return float(a) == float(b)
except (ValueError,TypeError):
return str(a) == str(b)
class LessCheck(Check):
name="less"
doc="check if the first argument is smaller."
def check(self,*args):
assert len(args)==2,u"The ‹less› check requires two arguments"
a,b = args
if a is None or b is None: return False
try:
return float(a) < float(b)
except (ValueError,TypeError):
return str(a) < st
|
r(b)
class GreaterCheck(Check):
name="greater"
doc="check if the first argument is larger."
de
|
f check(self,*args):
assert len(args)==2,u"The ‹greater› check requires two arguments"
a,b = args
if a is None or b is None: return False
try:
return float(a) > float(b)
except (ValueError,TypeError):
return str(a) > str(b)
class BoolModule(Module):
"""\
This module implements basic boolean conditions
"""
info = u"Boolean conditions. There can be only … two."
def load(self):
register_condition(TrueCheck)
register_condition(FalseCheck)
register_condition(NoneCheck)
register_condition(EqualCheck)
register_condition(LessCheck)
register_condition(GreaterCheck)
def unload(self):
unregister_condition(TrueCheck)
unregister_condition(FalseCheck)
unregister_condition(NoneCheck)
unregister_condition(EqualCheck)
unregister_condition(LessCheck)
unregister_condition(GreaterCheck)
init = BoolModule
|
gioGats/GeneticText
|
generation_functions.py
|
Python
|
gpl-3.0
| 6,538 | 0.002906 |
from ga_utils import *
from random import randint, gauss, random, seed
import unittest
def generate_xover(population, midpoint_dist='uniform'):
"""
Generates a new value using crossover mutation
:param population: sorted iterable of current population
:param midpoint_dist: 'uniform' or 'normal' distributions for selecting midpoint
:return: new value
"""
if midpoint_dist is
|
'uniform':
midpoint = randint(0, len(population[0]))
elif midpoint_dist is 'normal':
midpoint = max(min(int(gauss(0.5 * len(population[0]), 0.5 * len(population[0]))), len(population[0])), 0)
else:
raise ValueError('Midpoint distribution must be uniform or normal')
mom, dad = ranked_selection(population), ranked_selection(population)
return midpoint_xover(mom, dad, midpoint)
def generate
|
_lmutate(population, locate_dist='uniform'):
"""
Generates a new value using location mutation
:param population: sorted iterable of current population
:param locate_dist: 'uniform' or 'normal' distributions for selecting locations
:return: new value
"""
if locate_dist is 'uniform':
a = randint(0, len(population[0]))
b = randint(0, len(population[0]))
elif locate_dist is 'normal':
a = max(min(int(gauss(0.5 * len(population[0]), 0.5 * len(population[0]))), len(population[0])), 0)
b = max(min(int(gauss(0.5 * len(population[0]), 0.5 * len(population[0]))), len(population[0])), 0)
else:
raise ValueError('Location distribution must be uniform or normal')
if a == b:
if randint(1, 2) == 1:
return location_mutation(ranked_selection(population), a, None)
else:
return location_mutation(ranked_selection(population), 0, a)
else:
return location_mutation(ranked_selection(population), min(a, b), max(a, b))
def generate_pmutate(population, locate_dist='uniform', pflip_dist='uniform'):
"""
Generates a new value using location mutation
:param population: sorted iterable of current population
:param locate_dist: 'uniform' or 'normal' distributions for selecting locations
:param pflip_dist: 'uniform' or 'normal' distributions for selecting pflip
:return: new value
"""
if locate_dist is 'uniform':
a = randint(0, len(population[0]))
b = randint(0, len(population[0]))
elif locate_dist is 'normal':
a = max(min(int(gauss(0.5 * len(population[0]), 0.5 * len(population[0]))), len(population[0])), 0)
b = max(min(int(gauss(0.5 * len(population[0]), 0.5 * len(population[0]))), len(population[0])), 0)
else:
raise ValueError('Location distribution must be uniform or normal')
if pflip_dist is 'uniform':
p = random()
elif pflip_dist is 'normal':
p = max(min(gauss(0.5, 0.5), 1.0), 0.0)
else:
raise ValueError('Pflip distribution must be uniform or normal')
if a == b:
if randint(1, 2) == 1:
return probability_mutation(ranked_selection(population), p, a, None)
else:
return probability_mutation(ranked_selection(population), p, 0, a)
else:
return probability_mutation(ranked_selection(population), p, min(a, b), max(a, b))
class TestGenFns(unittest.TestCase):
def setUp(self):
pass
def test_generate_xover(self):
population = [BitArray(bin='00000001'), BitArray(bin='00000011'),
BitArray(bin='00000111'), BitArray(bin='00001111'),
BitArray(bin='00011111'), BitArray(bin='00111111'),
BitArray(bin='01111111'), BitArray(bin='11111111')]
self.assertRaises(ValueError, lambda: generate_xover(population, midpoint_dist='error'))
seed(1)
uniform_xover = generate_xover(population, midpoint_dist='uniform')
normal_xover = generate_xover(population, midpoint_dist='normal')
self.assertEqual(type(uniform_xover), BitArray)
self.assertEqual(uniform_xover.bin, '00000011')
self.assertEqual(type(normal_xover), BitArray)
self.assertEqual(normal_xover.bin, '00000011')
def test_generate_lmutate(self):
population = [BitArray(bin='00000001'), BitArray(bin='00000011'),
BitArray(bin='00000111'), BitArray(bin='00001111'),
BitArray(bin='00011111'), BitArray(bin='00111111'),
BitArray(bin='01111111'), BitArray(bin='11111111')]
self.assertRaises(ValueError, lambda: generate_lmutate(population, locate_dist='error'))
seed(1)
uniform_lmutate = generate_lmutate(population, locate_dist='uniform')
normal_lmutate = generate_lmutate(population, locate_dist='normal')
self.assertEqual(type(uniform_lmutate), BitArray)
self.assertEqual(uniform_lmutate.bin, '01000011')
self.assertEqual(type(normal_lmutate), BitArray)
self.assertEqual(normal_lmutate.bin, '11111111')
def test_generate_pmutate(self):
population = [BitArray(bin='00000001'), BitArray(bin='00000011'),
BitArray(bin='00000111'), BitArray(bin='00001111'),
BitArray(bin='00011111'), BitArray(bin='00111111'),
BitArray(bin='01111111'), BitArray(bin='11111111')]
self.assertRaises(ValueError,
lambda: generate_pmutate(population, locate_dist='error', pflip_dist='uniform'))
self.assertRaises(ValueError,
lambda: generate_pmutate(population, locate_dist='uniform', pflip_dist='error'))
seed(2)
uu_pmutate = generate_pmutate(population, locate_dist='uniform', pflip_dist='uniform')
un_pmutate = generate_pmutate(population, locate_dist='uniform', pflip_dist='normal')
nu_pmutate = generate_pmutate(population, locate_dist='normal', pflip_dist='uniform')
nn_pmutate = generate_pmutate(population, locate_dist='normal', pflip_dist='normal')
self.assertEqual(type(uu_pmutate), BitArray)
self.assertEqual(uu_pmutate.bin, '10000111')
self.assertEqual(type(un_pmutate), BitArray)
self.assertEqual(un_pmutate.bin, '10011111')
self.assertEqual(type(nu_pmutate), BitArray)
self.assertEqual(nu_pmutate.bin, '01000001')
self.assertEqual(type(nn_pmutate), BitArray)
self.assertEqual(nn_pmutate.bin, '00000110')
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main(verbosity=2)
|
ytsapras/robonet_site
|
scripts/rome_fields_dict.py
|
Python
|
gpl-2.0
| 1,964 | 0.071792 |
field_dict={'ROME-FIELD-01':[ 267.835895375 , -30.0608178195 , '17:51:20.6149','-30:03:38.9442' ],
'ROME-FIELD-02':[ 269.636745458 , -27.9782661111 , '17:58:32.8189','-27:58:41.758' ],
'ROME-FIELD-03':[ 268.000049542 , -28.8195573333 , '17:52:00.0119','-28:49:10.4064' ],
'ROME-FIELD-04':[ 268.180171708 , -29.27851275 , '17:52:43.2412','-29:16:42.6459' ],
'ROME-FIELD-05':[ 268.35435 , -30.2578356389 , '17:53:25.044','-30:15:28.2083' ],
'ROME-FIELD-06':[ 268.356124833 , -29.7729819283 , '17:53:25.47','-29:46:22.7349' ],
'ROME-FIELD-07':[ 268.529571333 , -28.6937071111 , '17:54:07.0971','-28:41:37.3456' ],
'ROME-FIELD-08':[ 268.709737083 , -29.1867251944 , '17:54:50.3369','-29:11:12.2107' ],
'ROME-FIELD-09':[ 268.881108542 , -29.7704673333 , '17:55:31.4661','-29:46:13.6824' ],
'ROME-FIELD-10':[ 269.048498333 , -28.6440675 , '17:56:11.6396','-28:38:38.643' ],
'ROME-FIELD-11':[ 269.23883225 , -29.2716684211 , '17:56:57.3197','-29:16:18.0063' ],
'ROME-FIELD-12':[ 269.39478875 , -30.0992361667 , '17:57:34.7493','-30:05:57.2502' ],
'ROME-FIELD-13':[ 269.563719375 , -28.4422328996 , '17:58:15.2927','-28:26:32.0384' ],
'ROME-FIELD-14':[ 269.758843 , -29.1796030365 , '17:59:02.1223','-29:10:46.5709' ],
'ROME-FIELD-15':[ 269.78359875 , -29.63940425 , '17:59:08.0637','-29:38:21.855
|
3' ],
'ROME-FIELD-16':[ 270.074981708 , -28.5375585833 , '18:00:17.9956','-28:32:15.2109' ],
'ROME-FIELD-17':[ 270.81 , -28.0978333333 , '18:03:14.4','-28:05:52.2' ],
'ROME-FIELD-18':[ 270.290886667 , -27.9986032778 , '18:01:09.8128','-27:5
|
9:54.9718' ],
'ROME-FIELD-19':[ 270.312763708 , -29.0084241944 , '18:01:15.0633','-29:00:30.3271' ],
'ROME-FIELD-20':[ 270.83674125 , -28.8431573889 , '18:03:20.8179','-28:50:35.3666' ]}
|
synteny/AuroraBot
|
sessioncontroller/settings.py
|
Python
|
mit
| 256 | 0 |
import os
|
TELEGRAM_TOKEN = os.environ['TELEGRAM_TOKEN']
DATABASE = {
'HOST': os.getenv('DB_PORT_3306_TCP_ADDR', 'localhost'),
'USER': os.getenv('DB_MYSQL_USER', 'root'),
'PASSWORD': os.getenv('DB_MYSQL_PASSWORD', ''),
'NAME':
|
'aurora',
}
|
JavierGarciaD/banking
|
definitions.py
|
Python
|
mit
| 279 | 0 |
import os
|
def root_dir():
"""
:return: root folder path
"""
return os.path.dirname(os.path.abspath(__file__))
def db_path(db_name):
i
|
f db_name == 'forecast':
return os.path.join(root_dir(), "data", "forecast.sqlite")
else:
return None
|
f-andrey/sprutio
|
app/modules/webdav/actions/files/read.py
|
Python
|
gpl-3.0
| 4,120 | 0.002205 |
from core import FM
import traceback
class ReadFile(FM.BaseAction):
def __init__(self, request, path, session, **kwargs):
super(ReadFile, self).__init__(request=request, **kwargs)
self.path = path
self.session = session
def run(self):
request = self.get_rpc_request()
result = request.request_bytes('webdav/read_file', login=self.request.get_current_user(),
password=self.request.get_current_password(), path=self.path,
session=self.session)
answer = self.process_result(result)
if 'data' in answer.keys():
data = answer['data']
if 'content' in data.keys() and 'encoding' in data.keys():
data['encoding'] = data['encoding'].decode('utf-8').lower()
data['content'] = str(data['content'], data['encoding'], 'replace')
data['item'] = self.byte_to_unicode_dict(data['item'])
answer['data'] = data
if 'error' in answer.keys():
# FIXME
answer["error"] = answer['error'] if answer['error'] is not None else True
if 'message' in answer.keys():
try:
message = answer['message'].decode('utf-8') if answer['message'] is not None else ''
answer['message'] = message
except Exception as e:
self.application.logger.error(
"Handled exception in action ReadFile: " + str(e) + "Traceback:" + traceback.format_exc())
if 'traceback' in answer.keys():
try:
trace = answer['traceback'].decode('utf-8') if answer['traceback'] is not None else ''
answer['traceback'] = trace
except Exception as e:
self.application.logger.error(
"Handled exception in action ReadFile: " + str(e) + "Traceback:" + traceback.format_exc())
return answer
def byte_to_unicode_dict(self, answer):
decoded = {}
for key in answer:
if isinstance(key, bytes):
unicode_key = key.decode("utf-8")
else:
unicode_key = key
if isinstance(answer[key], dict):
decoded[unicode_key] = self.byte_to_unicode_dict(answer[key])
elif isinstance(answer[key], list):
decoded[unicode_key] = self.byte_to_unicode_list(answe
|
r[key])
elif isinstance(answer[key], int):
decoded[unicode_key] = answer[key]
elif isinstance(answer[key], float):
decoded[unicode_key] = answer[key]
elif isinstance(answer[key], str):
decoded[unicode_key] = answer[key]
elif answer[key] is None:
decoded[unicode_key] = answer[key]
else:
try:
decoded[uni
|
code_key] = answer[key].decode("utf-8")
except UnicodeDecodeError:
# Костыль для кракозябр
decoded[unicode_key] = answer[key].decode("ISO-8859-1")
return decoded
def byte_to_unicode_list(self, answer):
decoded = []
for item in answer:
if isinstance(item, dict):
decoded_item = self.byte_to_unicode_dict(item)
decoded.append(decoded_item)
elif isinstance(item, list):
decoded_item = self.byte_to_unicode_list(item)
decoded.append(decoded_item)
elif isinstance(item, int):
decoded.append(item)
elif isinstance(item, float):
decoded.append(item)
elif item is None:
decoded.append(item)
else:
try:
decoded_item = item.decode("utf-8")
except UnicodeDecodeError:
# Костыль для кракозябр
decoded_item = item.decode("ISO-8859-1")
decoded.append(decoded_item)
return decoded
|
metabolite-atlas/metatlas
|
metatlas/tools/formula_generator.py
|
Python
|
bsd-3-clause
| 4,342 | 0.066099 |
from __future__ import absolute_import
from __future__ import print_function
from copy import deepcopy
from six.moves import range
def get_formulae(mass,tol=5,charge=0,tol_type='ppm',max_tests=1e7,
min_h=0,max_h=200,
min_c=0,max_c=200,
min_n=0,max_n=6,
min_o=0,max_o=20,
min_p=0,max_p=4,
min_s=0,max_s=4,
min_na=0,max_na=0,
min_k=0,max_k=0,
min_cl=0,max_cl=0):
"""
performs brute force chemical formula generation. enumerate all possible matches within bounds.
Considers only [C,H,N,O,P,S,Na,K,Cl]
returns list of tuples (error,formula).
use it like this:
from metatlas.helpers import formula_generator as formulae
out = formulae.get_formulae(634.13226,1,min_c=20,min_h=15)
(4.457399995771993e-06, 'H26C32O14')
(9.535900062473956e-06, 'H29C34N5P1S3')
(3.076309997140925e-05, 'H151C24N6O3P2')
(5.4717500006518094e-05, 'H35C33N1O2P3S2')
....
"""
elements = [
{'num':0,'symbol':"H",'mass': 1.0078250321,'min':min_h,'max':max_h,'guess':0},
{'num':1,'symbol':"C",'mass': 12.000000000,'min':min_c,'max':max_c,'guess':0},
{'num':2,'symbol':"N",'mass': 14.003074005,'min':min_n,'max':max_n,'guess':0},
{'num':3,'symbol':"O",'mass': 15.994914622,'min':min_o,'max':max_o,'guess':0},
{'num':4,'symbol':"P",'mass': 30.97376151,'min':min_p,'max':max_p,'guess':0},
{'num':5,'symbol':"S",'mass': 31.97207069,'min':min_s,'max':max_s,'guess':0},
{'num':6,'symbol':"Na",'mass':22.989770,'min':min_na,'max':max_na,'guess':0},
{'num':7,'symbol':"K",'mass': 38.963708,'min':min_k,'max':max_k,'guess':0},
{'num':8,'symbol':"Cl",'mass':34.968852682,'min':min_cl,'max':max_cl,'guess':0}
]
electron=0.0005485799094
mass=mass+charge*electron #neutralize the molecule if charge is provided
if tol_type=='ppm':
tol=tol*mass/1e6
hits = do_calculations(mass,tol,elements,max_tests);
hits = sorted(hits,key=lambda x:x[0])
formulae = [] #store all formulae
for hit in hits:
formula = [] #store list of elements and stoichiometry
for element in hit[1]:
if element['guess'] != 0:
formula.appe
|
nd('%s%d'%(element['symbol'],element['guess']))
formulae.append((hit[0]
|
,''.join(formula)))
return formulae
def calc_mass(elements):
"""
?
"""
sum = 0.0
for el in elements:
sum += el['mass'] * el['guess']
return sum
def do_calculations(mass,tol,elements,max_tests):
"""
?
"""
limit_low = mass - tol
limit_high = mass + tol
test_counter = 0
hits = []
for n8 in range(elements[8]['min'],elements[8]['max']+1):
elements[8]['guess'] = n8
for n7 in range(elements[7]['min'],elements[7]['max']+1):
elements[7]['guess'] = n7
for n6 in range(elements[6]['min'],elements[6]['max']+1):
elements[6]['guess'] = n6
for n5 in range(elements[5]['min'],elements[5]['max']+1):
elements[5]['guess'] = n5
for n4 in range(elements[4]['min'],elements[4]['max']+1):
elements[4]['guess'] = n4
for n3 in range(elements[3]['min'],elements[3]['max']+1):
elements[3]['guess'] = n3
for n2 in range(elements[2]['min'],elements[2]['max']+1):
elements[2]['guess'] = n2
for n1 in range(elements[1]['min'],elements[1]['max']+1):
elements[1]['guess'] = n1
for n0 in range(elements[0]['min'],elements[0]['max']+1):
elements[0]['guess'] = n0
test_counter += 1
if test_counter > max_tests:
print('ERROR test limit exceeded')
return
theoretical_mass = calc_mass(elements)
if ((theoretical_mass >= limit_low) & (theoretical_mass <= limit_high)):
hits.append((abs(theoretical_mass-mass),deepcopy(elements)))
if theoretical_mass > limit_high: # n0
break
if (theoretical_mass > limit_high) & (n0==elements[0]['min']):
break
if (theoretical_mass > limit_high) & (n1==elements[1]['min']):
break
if (theoretical_mass > limit_high) & (n2==elements[2]['min']):
break
if (theoretical_mass > limit_high) & (n3==elements[3]['min']):
break
if (theoretical_mass > limit_high) & (n4==elements[4]['min']):
break
if (theoretical_mass > limit_high) & (n5==elements[5]['min']):
break
if (theoretical_mass > limit_high) & (n6==elements[6]['min']):
break
if (theoretical_mass > limit_high) & (n7==elements[7]['min']):
break
return hits
|
ninjaotoko/djblog
|
djblog/forms.py
|
Python
|
bsd-3-clause
| 1,755 | 0.011966 |
# *-* coding=utf-8 *-*
from django.conf import settings
from django import forms
from django.utils.translation import ugettext_lazy as _
from django.template.defaultfilters import slugify
from django.forms.widgets import flatatt
from django.utils.encoding import smart_unico
|
de, force_unicode
from django.template.loader import render_to_string
from django.utils.html import escape, conditional_escape
from django.utils.safestring
|
import mark_safe
from djblog.models import Post, Category
class PreviewContentWidget(forms.Textarea):
def render(self, name, value, attrs=None):
if value is None:
value = ''
value = smart_unicode(value)
final_attrs = self.build_attrs(attrs, name=name)
context = {
'id':final_attrs['id'],
'id_content':final_attrs['id_content'],
'attrs':flatatt(final_attrs),
'content':value,
'name': name,
'STATIC_URL':settings.STATIC_URL
}
return mark_safe(render_to_string('djeditor/djeditor_widget.html', context))
class PostAdminForm(forms.ModelForm):
#slug = forms.SlugField(required=False)
content_rendered = forms.CharField(widget=PreviewContentWidget(attrs={'id_content':'id_content'}), required=False)
category = forms.ModelMultipleChoiceField(widget=forms.CheckboxSelectMultiple(), queryset=Category.objects.all(), required=False)
def __init__(self, *args, **kwargs):
super(PostAdminForm, self).__init__(*args, **kwargs)
if 'instance' in kwargs:
obj = kwargs['instance']
#self.fields['category'].queryset = Category.objects.filter(blog_category = not obj.is_page)
class Meta:
model = Post
exclude = []
|
sketchturnerr/WaifuSim-backend
|
resources/middlewares/body_checker.py
|
Python
|
cc0-1.0
| 1,251 | 0.003457 |
import json
import sys
import falcon
def body_checker(required_params=(), documentation_link=None):
def hook(req, resp, resource, params):
if req.content_length in (None, 0, ):
raise falcon.HTTPBadRequest('Bad request',
'В запросе деолжны быть параметры, дружок.',
href=documentation_link)
#todo: https://github.com/falconry/falcon/pull/748
try:
body = json.loads(req.stream.read(sys.maxsize).decode('utf-8'))
except (ValueError, UnicodeDecodeError):
raise falcon.HTTPBadRequest('Bad request',
'Ты прислал плохой json, няша, попробуй прислать другой.',
href=documentation_link)
params = {}
description = "Ты забыл параметр '%s', няша."
for key in required_param
|
s:
if key not in body:
raise falcon.HTTPBadRequest('Bad request', description
|
% key, href=documentation_link)
params[key] = body[key]
req.context['parsed_body'] = params
return hook
|
ping/instagram_private_api
|
instagram_web_api/__init__.py
|
Python
|
mit
| 337 | 0.002967 |
# flake8: noqa
from .client import Client
from .compatpatch import ClientCompatPatch
from .errors import (
|
ClientError, ClientLoginError, ClientCookieExpiredError,
ClientConnectionError, ClientForbiddenError,
ClientThrottledError,ClientBadRequestError,
)
from .common
|
import ClientDeprecationWarning
__version__ = '1.6.0'
|
wuan/bo-server
|
tests/test_influxdb.py
|
Python
|
apache-2.0
| 989 | 0 |
import datetime
import unittest
from assertpy import assert_that
from blitzortung_server.influxdb import DataPoint
class TestAS(unittest.TestCase):
def testEmptyDataPoint(self):
timestamp = datetime.datetime.utcnow()
data_point = DataPoint("meas", time=timestamp)
json_repr = data_point.get()
assert_that(json_repr).is_equal_to({
"measurement": "meas",
"time": timestamp,
"tags": {},
"fields": {}
})
def testDataPoint(self):
timestamp = datetime.datetime.utcnow()
|
data_point = DataPoint("meas", time=timestamp)
data_point.fields['foo'] = 1.5
data_point.tags['bar'] = "qux"
da
|
ta_point.tags['baz'] = 1234
json_repr = data_point.get()
assert_that(json_repr).is_equal_to({
"measurement": "meas",
"time": timestamp,
"tags": {'bar': 'qux', 'baz': 1234},
"fields": {'foo': 1.5}
})
|
plotly/plotly.py
|
packages/python/plotly/plotly/validators/surface/colorbar/_thicknessmode.py
|
Python
|
mit
| 506 | 0.001976 |
import _plotly_utils.basevalidators
class ThicknessmodeValidator(_plotly_uti
|
ls.basevalidators.EnumeratedValidator):
def __init__(
self, plotly_name="thicknessmode", parent_name="surface.colorbar", **kwargs
):
super(ThicknessmodeValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
values=kwargs.pop("values", ["fractio
|
n", "pixels"]),
**kwargs
)
|
Hydrosys4/Master
|
interruptmod.py
|
Python
|
gpl-3.0
| 29,482 | 0.054949 |
from __future__ import print_function
from __future__ import division
from builtins import str
from past.utils import old_div
import logging
from datetime import datetime , time ,timedelta
import _strptime
import hardwaremod
import os
import subprocess
import emailmod
import interruptdbmod
import sensordbmod
import actuatordbmod
import autofertilizermod
import statusdataDBmod
import threading
import time as t
import ActuatorControllermod
ACTIONPRIORITYLEVEL=5
NONBLOCKINGPRIORITY=0
SAVEBLOCKINGBUSY=False
SAVEBLOCKINGDIFFBUSY=False
NOWTIMELIST=[]
#In hardware, an internal 10K resistor between the input channel and 3.3V (pull-up) or 0V (pull-down) is commonly used.
#https://sourceforge.net/p/raspberry-gpio-python/wiki/Inputs/
logger = logging.getLogger("hydrosys4."+__name__)
# status array, required to check the ongoing actions
elementlist= interruptdbmod.getelementlist()
waitingtime=1200
# ///////////////// -- STATUS VARIABLES -- ///////////////////////////////
AUTO_data={} # dictionary of dictionary
AUTO_data["default"]={"lasteventtime":datetime.utcnow()- timedelta(minutes=waitingtime),"lastinterrupttime":datetime.utcnow(),"validinterruptcount":0,"eventactivated":False,"lastactiontime":datetime.utcnow()- timedelta(minutes=waitingtime),"actionvalue":0, "alertcounter":0, "infocounter":0, "status":"ok" , "threadID":None , "blockingstate":False}
SENSOR_data={} # used for the associated sensor in a separate hardwareSetting Row
SENSOR_data["default"]={"Startcounttime":datetime.utcnow(),"InterruptCount":0}
PIN_attributes={} # to speed up the operation during interurpt
PIN_attributes["default"]={"logic":"pos","refsensor":"","bouncetimeSec":0.001}
BLOCKING_data={} # to speed up the operation during interurpt
BLOCKING_data["default"]={"BlockingNumbers":0,"BlockingNumbersThreadID":None}
def readstatus(element,item):
return statusdataDBmod.read_status_data(AUTO_data,element,item)
def savedata(sensorname,sensorvalue):
sensorvalue_str=str(sensorvalue)
sensorvalue_norm=hardwaremod.normalizesensordata(sensorvalue_str,sensorname)
sensordbmod.insertdataintable(sensorname,sensorvalue_norm)
return
def eventcallback(PIN):
bouncetimeSec=statusdataDBmod.read_status_data(PIN_attributes,PIN,"bouncetimeSec")
t.sleep(bouncetimeSec)
reading=hardwaremod.readinputpin(PIN)
refsensor=statusdataDBmod.read_status_data(PIN_attributes,PIN,"refsensor")
logic=statusdataDBmod.read_status_data(PIN_attributes,PIN,"logic")
#print "reference sensor:" , refsensor, "logic ", logic
#print PIN_attributes
# first Edge detection, can have two impleemntations depend on the "logic" setting
# in case logic=pos we have pull-down resistor, so the normal state is LOW, the first edge will be from LOW to HIGH
# in case logic<>pos we have pull-up resistor, so the normal state is High, the first edge will be from HIGH to LOW
if refsensor!="":
#["First Edge" , "First Edge + Level", "Second Edge" , "Second Edge + Level (inv)", "both Edges"]
#print "Logic " , logic , " reading ", reading , " bouncetimeSec " , bouncetimeSec
#detecting first edge
if logic=="pos":
if reading=="1":
#print "************************* First edge detected on PIN:", PIN
mode="First Edge"
elif reading=="0":
#print "************************* Second edge detected on PIN:", PIN
mode="Second Edge"
else:
if reading=="0":
#print "************************* First edge detected on PIN:", PIN
mode="First Edge"
elif reading=="1":
#print "************************* Second edge detected on PIN:", PIN
mode="Second Edge"
#print "interrupt --------------------> ", mode
interruptcheck(refsensor,mode,PIN)
# update status variables for the frequency sensor ----
sensorinterruptcount=statusdataDBmod.read_status_data(SENSOR_data,PIN,"InterruptCount")
sensorinterruptcount=sensorinterruptcount+1
statusdataDBmod.write_status_data(SENSOR_dat
|
a,PIN,"InterruptCount",sensorinterruptcount)
#if refsensor!="":
# x = threading.Thread(target=savedata, args=(refsensor,reading))
# x.start()
def setinterruptevents():
hardwaremod.removeallinterruptevents()
print("load interrupt list ")
interruptlist=interruptdbmod.sensorlist()
print("len interrupt list " , len(interruptlist))
for item in interruptlist:
print("got into the loop ")
# get PIN number
recordkey=hardwaremod.HW_INFO_NAME
recordvalue
|
=item
keytosearch=hardwaremod.HW_CTRL_PIN
PINstr=hardwaremod.searchdata(recordkey,recordvalue,keytosearch)
print("set event for the PIN ", PINstr)
if not PINstr=="":
keytosearch=hardwaremod.HW_CTRL_LOGIC
logic=hardwaremod.searchdata(recordkey,recordvalue,keytosearch)
# set Sw pull up / down mode
if logic=="pos":
hardwaremod.GPIO_setup(PINstr, "in", "pull_down")
evenslopetype="both"
else:
hardwaremod.GPIO_setup(PINstr, "in" , "pull_up")
evenslopetype="both"
#GPIO.RISING, GPIO.FALLING or GPIO.BOTH.
# link to the callback function
# the bouncetime is set by the frequency parameter, if this parameter is empty, the default bouncetime would be 200
keytosearch=hardwaremod.HW_CTRL_FREQ
frequency=hardwaremod.searchdata(recordkey,recordvalue,keytosearch)
if frequency=="":
bouncetimeINT=200
else:
frequencyINT=hardwaremod.toint(frequency,5)
bouncetimeINT=old_div(1000,frequencyINT) # in ms. this is ok to be trunk of the int. For frequencies higher than 1000 the bouncetime is exactly zero
# RPI.GPIO library does not accept bouncetime=0, it gives runtime error
if bouncetimeINT<=0:
bouncetimeINT=1 #ms
hardwaremod.GPIO_add_event_detect(PINstr, evenslopetype, eventcallback, bouncetimeINT)
# set fast reference call indexed with the PIN number which is the variable used when interrupt is called:
# search now to avoid searching later
global PIN_attributes
PIN=hardwaremod.toint(PINstr,0)
statusdataDBmod.write_status_data(PIN_attributes,PIN,"logic",logic)
recordkey=hardwaremod.HW_CTRL_PIN
recordvalue=PINstr
keytosearch=hardwaremod.HW_INFO_NAME
refsensor=hardwaremod.searchdata(recordkey,recordvalue,keytosearch) # return first occurence
statusdataDBmod.write_status_data(PIN_attributes,PIN,"refsensor",refsensor)
statusdataDBmod.write_status_data(PIN_attributes,PIN,"bouncetimeSec",0.4*float(bouncetimeINT)/1000)
# code below to enable blocking for N sec, it is necessary to trigger the bloccking status in case of levels already present when starting.
elementlist= interruptdbmod.getelementlist()
#print elementlist
for element in elementlist:
workmode=checkworkmode(element)
if (workmode!="None")and(workmode!=""):
sensor=interruptdbmod.searchdata("element",element,"sensor")
#saveblockingdiff(sensor)
print(" what a sensor ", sensor)
if sensor!="":
startblockingstate(element,10,False)
t.sleep(0.02)
return ""
def cyclereset(element):
#AUTO_data["default"]={"lasteventtime":datetime.utcnow()- timedelta(minutes=waitingtime),"lastinterrupttime":datetime.utcnow(),
#"validinterruptcount":0,"eventactivated":False,"lastactiontime":datetime.utcnow()- timedelta(minutes=waitingtime),
#"actionvalue":0, "alertcounter":0, "infocounter":0, "status":"ok" , "threadID":None , "blockingstate":False}
#SENSOR_data["default"]={"Startcounttime":datetime.utcnow(),"InterruptCount":0} # this is for the actual frequency sensor
#PIN_attributes["default"]={"logic":"pos","refsensor":"","bouncetimeSec":0.001} # this is relebant to the PINs
#BLOCKING_data["default"]={"BlockingNumbers":0,"BlockingNumbersThreadID":None} # tihs is relenat to the Interrupt trigger
global AUTO_data
waitingtime=hardwaremod.toint(interruptdbmod.searchdata("element",element,"preemptive_period"),0)
statusdataDBmod.write_status_data(AUTO_data,element,"lastactiontime",datetime.utcnow() - timedelta(minutes=waitingtime))
statusdataDBmod.write_status_data(AUTO_data,element,"lasteventtime",datetime.utcnow() - timedelta(minutes=waitingtime))
statusdataDBmod.write_status_data(AUTO_data,element,"status","ok")
statusdataDBmod.write_status_data(AUTO_data,element,"actionvalu
|
pulse-project/mss
|
mss/www/wizard/transaction.py
|
Python
|
gpl-3.0
| 7,120 | 0.000702 |
import logging
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext as _, ungettext
from mss.lib.xmlrpc import XmlRpc
xmlrpc = XmlRpc()
logger = logging.getLogger(__name__)
class Steps:
PREINST = "preinst"
DOWNLOAD = "download"
MEDIAS_AUTH = "medias_auth"
MEDIAS_ADD = "medias_add"
INSTALL = "install"
CONFIG = "config"
END = "end"
class State:
DISABLED = -1
TODO = 0
DONE = 1
class Transaction(object):
def __init__(self, request, modules_list=None):
if modules_list is not None:
self.modules_list = modules_list
self.setup()
else:
self.modules_list = request.session['modules_list']
self.steps = request.session['steps']
self.update()
self.save(request)
def setup(self):
result = xmlrpc.call('preinstall_modules', self.modules_list)
self.modules_info = result
self.modules_list = [m['slug'] for m in self.modules_info]
self.steps = [
{
'id': Steps.PREINST,
'state': State.TODO,
'title': _("Installation summary"),
'info': ungettext(
"The following addon will be installed.",
"The following addons will be installed.",
len(self.modules_list)
),
'show_modules': True,
'current': False
},
{
'id': Steps.DOWNLOAD,
'state': State.DISABLED,
'title': _("Addon download"),
'info': _("Download addons from the ServicePlace"),
'current': False,
},
{
'id': Steps.MEDIAS_AUTH,
'state': State.DISABLED,
'title': _("Medias authentication"),
'info': _("One or more medias need authentication"),
'current': False,
},
{
'id': Steps.MEDIAS_ADD,
'state': State.DISABLED,
'title': _("Medias add"),
'info': "",
'current': False,
},
{
'id': Steps.INSTALL,
'state': State.DISABLED,
'title': _("Installation"),
'info': "",
'current': False,
},
{
'id': Steps.CONFIG,
'state': State.DISABLED,
'title': _("Initial configuration"),
'info': "",
'current': False
},
{
'id': Steps.END,
'state': State.TODO,
'title': _("End of installation"),
'info': _("The installation is finished."),
'reboot': False,
'current': False
}
]
for module in self.modules_info:
if
|
not module['installed'] or not module["downloaded"]:
self.todo_step(Steps.INSTALL)
if not module["downloaded"]:
self.todo_step(Steps.DOWNLOAD)
if module["has_reposito
|
ries"]:
self.todo_step(Steps.MEDIAS_ADD)
if module["has_restricted_repositories"]:
self.todo_step(Steps.MEDIAS_AUTH)
if module["has_configuration"] or module["has_configuration_script"] or not module["downloaded"]:
self.todo_step(Steps.CONFIG)
if module['reboot']:
self.update_step({'id': Steps.END,
'title': _("Reboot"),
'reboot': True,
'info': _("The installation is finished. The server must be rebooted. The reboot can take a few minutes.")})
def update(self):
self.modules_info = xmlrpc.call('get_modules_details', self.modules_list)
downloaded = True
has_repositories = False
has_restricted_repositories = False
installed = True
configured = True
for module in self.modules_info:
if not module['downloaded']:
downloaded = False
if module['has_repositories']:
has_repositories = True
if module['has_restricted_repositories']:
has_restricted_repositories = True
if not module['installed']:
installed = False
if module["has_configuration_script"] and module['can_configure']:
configured = False
if not module['downloaded']:
configured = False
if self.get_state_step(Steps.DOWNLOAD) == State.TODO and downloaded:
self.done_step(Steps.DOWNLOAD)
if self.get_state_step(Steps.INSTALL) == State.TODO and installed:
self.done_step(Steps.INSTALL)
if self.get_state_step(Steps.MEDIAS_AUTH) == State.TODO and not has_restricted_repositories:
self.done_step(Steps.MEDIAS_AUTH)
if self.get_state_step(Steps.MEDIAS_ADD) == State.TODO and not has_repositories:
self.done_step(Steps.MEDIAS_ADD)
if self.get_state_step(Steps.CONFIG) == State.TODO and configured:
self.done_step(Steps.CONFIG)
def save(self, request):
request.session['modules_list'] = self.modules_list
request.session['steps'] = self.steps
def find_step(self, step):
for s in self.steps:
if s['id'] == step:
return s
raise Exception("Step does not exist ?!")
def get_state_step(self, step):
return self.find_step(step)['state']
def todo_step(self, step):
self.find_step(step)['state'] = State.TODO
def done_step(self, step):
self.find_step(step)['state'] = State.DONE
def update_step(self, step):
for s in self.steps:
if s['id'] == step['id']:
for key, value in step.items():
s[key] = value
def current_step(self):
for s in self.steps:
if s['current']:
return s
def set_current_step(self, step):
for s in self.steps:
if s['id'] == step:
s['current'] = True
else:
s['current'] = False
def first_step(self):
for step in self.steps:
if not step['state'] in (State.DONE, State.DISABLED):
return step
def next_step(self):
next = False
for step in self.steps:
if next and not step['state'] in (State.DONE, State.DISABLED):
return step
if step['current']:
next = True
# no next step, return home
return {'id': 'sections'}
def next_step_url(self):
return reverse(self.next_step()['id'])
def first_step_url(self):
return reverse(self.first_step()['id'])
def current_step_url(self):
return reverse(self.current_step()['id'])
|
pakozm/DNNJ
|
HyperoptScripts/hyperopt_mlp.py
|
Python
|
mit
| 1,224 | 0.007353 |
import math
import os
import sys
sys.path.append(os.path.dirname(sys.argv[0])+"/../TensorFlowScripts")
import train_GED
from hyperopt import hp, fmin, rand, tpe, STATUS_OK, Trials
space = {
'lr': hp.loguniform('lr', math.log(1e-5), math.log(30)),
'wd' : hp.choice('wd', [1e-1, 1e-2, 1e-3, 1e-4, 1e-5, 1e-6, 1e-7, 1e-8, 1e-9, 1e-10, 0.0]),
'hsize': hp.choice('hsize', [2048]),
'nlayers': hp.choice('nlayers', [3]),
'Lambda': hp.choice('Lambda', [0.0]),
'gamma': hp.choice('gamma', [0.999])
}
def objective(params):
train_loss,val_loss = train_GED.main(params)
print "LOSSES: ",train_loss,val_loss
|
return {
'loss': train_loss,
'loss_variance': train_loss * (1.0 - train_loss),
'true_loss': val_loss,
'true_loss_variance': val_loss * (1.0 - val_loss),
'status': STATUS_OK,
# # -- store other results like this
# 'eval_time': time.time(),
# 'other_stuff': {'type': None, 'value': [0, 1, 2]},
# # -- attachments are handled differently
# 'attachments':
# {'time_module': pickle.dumps(time.time)}
}
trials = Tria
|
ls()
best = fmin(objective, space=space, algo=tpe.suggest, max_evals=100)
print best
|
supercheetah/diceroller
|
pyinstaller/buildtests/import/error_during_import2.py
|
Python
|
artistic-2.0
| 48 | 0 |
import os
os.
|
environ["qwiejioqwjeioqwjeioqw
|
je"]
|
ezrosent/iterators-etc
|
hash/runTests.py
|
Python
|
mit
| 722 | 0.018006 |
from subprocess import Popen, PIPE
import itertools
ITERATORS_NUM = [0, 1, 4]
UPDATERS_NUM = [1, 2, 4, 8]
DURATION = [2]
PERCENTAGES = [(25, 25)]
KEY_RANGE = [65536]
INIT_SIZE = [1024]
PARAMETER_COMBINATIONS = [ITERATORS_NUM, UPDATERS_NUM, DURATION, PERCENTAGES, KEY_RANGE, INIT_SIZE]
for param in itertools.product(*PAR
|
AMETER_COMBINATIONS):
args = ["java", "-cp", ".:lib/java-getopt-1.0.13.jar" , "IteratorTest"]
args += ["-i", str(param[0])]
args += ["-u", str(param[1])]
args += ["-d", str(param[2])]
args += ["
|
-I", str(param[3][0])]
args += ["-R", str(param[3][1])]
args += ["-M", str(param[4])]
args += ["-s", str(param[5])]
pTest = Popen(args, stdout=PIPE)
result = pTest.communicate()[0]
print result
|
tiffanyj41/hermes
|
src/data_prep/jester_vectorize.py
|
Python
|
apache-2.0
| 3,684 | 0.001629 |
from src.utils import glove
import numpy as np
import string
class jester_vectorize():
def __init__(self, user_interactions, content, user_vector_type, content_vector_type, **support_files):
"""Set up the Jester Vectorizer.
Args:
user_interactions (rdd): The raw data of users interactions with
the system. For Jester, each "row" is as follows:
Row(joke_id, rating, user_id)
content (rdd): The raw data about the items in the dataset. For
Jester, each row is as follows: Row(joke_id, joke_text)
user_vector_type (str): The type of user vector desired. One of
'ratings', 'pos_ratings', 'ratings_to_interact', or None.
content_vector_type: The type of content vector desired. One of
'glove' or None.
support_files: Only one support file is used for this class:
glove_model: An instantiated glove model.
"""
self.user_vector_type = user_vector_type
self.content_vector_type = content_vector_type
self.user_interactions = user_interactions
self.content = content
# If no support files were passed in, initialize an empty support file
if support_files:
self.support_files = support_files
else:
self.support_files = {}
def get_user_vector(self):
"""Produce an RDD containing tuples of the form (user, item, rating).
There are three options when producing these user vectors:
ratings: The ratings the users assigned
pos_ratings: Only ratings > 0, all others are discarded
ratings_to_interact: Positive ratings are mapped to 1, negative to -1.
"""
uir = self.user_interactions.map(lambda row: (row.user_id, row.joke_id, row.rating))
if self.user_vector_type == 'ratings':
return uir
elif self.user_vector_type == 'pos_ratings':
return uir.filter(lambda (u, i, r): r > 0)
elif self.user_vector_type == 'ratings_to_interact':
return uir.map(lambda (u, i, r): (u, i, 1 if r > 0 else -1))
elif self.user_vector_type == 'none' or self.user_vector_type is None:
return None
else:
print "Please choose a user_vector_type between 'ratings', 'pos_ratings', 'ratings_to_interact', and 'none'"
return None
def get_content_vector(self):
"""Produce an RDD containing tuples of the form (item, content_vector).
There is one method of producing content vectors:
glove: Use the Stanford GloVe model to sum vector ratings of all
the words in the joke.
"""
if self.content_vector_type == 'glove':
# The model is initialized by the user and passed in via the
# support_file object
glove_model = s
|
elf.support_files["glove_model"]
# Transformation function
def joke_to_glove(row, glove):
vector = np.zeros(glove.vector_size)
for chunk in row.joke_text.split():
word = chunk.lower().strip(string.punctuation)
vector += glove[word]
return (row.joke_id, vector)
# Run the transformation function over the data
|
return self.content.map(lambda row: joke_to_glove(row, glove_model))
elif self.content_vector_type == 'none' or self.content_vector_type is None:
return None
else:
print "Please choose a content_vector_type between 'glove' or None"
return None
|
boothead/karl
|
karl/views/__init__.py
|
Python
|
gpl-2.0
| 864 | 0.001157 |
# Copyright (C) 2008-2009 Open Society Institute
# Thomas Moroz: tmoroz@sorosny.org
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU Genera
|
l Public License Version 2 as published
# by the Free Software Foundation. You may not use, modify or distribute
# this program under any other version of the GNU General Public License.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the im
|
plied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# a package
|
egetzel/wecrow
|
truehand2014/temboo/Library/Twilio/AvailablePhoneNumbers/TollFreeList.py
|
Python
|
apache-2.0
| 4,256 | 0.007519 |
# -*- coding: utf-8 -*-
###############################################################################
#
# TollFreeList
# Returns a list of toll-free available phone numbers that match the specified filters.
#
# Python version 2.6
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class TollFreeList(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the TollFreeList Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
Choreography.__init__(self, temboo_session, '/Library/Twilio/AvailablePhoneNumbers/TollFreeList')
def new_input_set(self):
return TollFreeListInputSet()
def _make_result_set(self, result, path):
return TollFreeListResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return TollFreeListChoreographyExecution(session, exec_id, path)
class TollFreeListInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the TollFreeList
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_AccountSID(self, value):
"""
Set the value of the AccountSID input for this Choreo. ((required, string) The AccountSID provided when you signed up for a Twilio account.)
"""
InputSet._set_input(self, 'AccountSID', value)
def set_AreaCode(self, value):
"""
Set the value of the AreaCode input for this Choreo. ((optional, integer) Find phone numbers in the specified area code. (US and Canada only).)
"""
InputSet._set_input(self, 'AreaCode', value)
def set_AuthToken(self, value):
"""
Set the value of the AuthToken input for this Choreo. ((required, string) The authorization token provided when you signed up for a Twilio account.)
"""
InputSet._set_input(self, 'AuthToken', value)
def set_Contains(self, value):
"""
Set the value of the Contains input for this Choreo. ((optional, string) A pattern to match phone numbers on. Valid characters are
|
'*' and [0-9a-zA-Z]. The '*' character will match any single digit.)
"""
|
InputSet._set_input(self, 'Contains', value)
def set_IsoCountryCode(self, value):
"""
Set the value of the IsoCountryCode input for this Choreo. ((optional, string) The country code to search within. Defaults to US.)
"""
InputSet._set_input(self, 'IsoCountryCode', value)
def set_PageSize(self, value):
"""
Set the value of the PageSize input for this Choreo. ((optional, integer) The number of results per page.)
"""
InputSet._set_input(self, 'PageSize', value)
def set_Page(self, value):
"""
Set the value of the Page input for this Choreo. ((optional, integer) The page of results to retrieve. Defaults to 0.)
"""
InputSet._set_input(self, 'Page', value)
def set_ResponseFormat(self, value):
"""
Set the value of the ResponseFormat input for this Choreo. ((optional, string) The format that the response should be in. Valid values are: json (the default) and xml.)
"""
InputSet._set_input(self, 'ResponseFormat', value)
class TollFreeListResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the TollFreeList Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. (The response from Twilio.)
"""
return self._output.get('Response', None)
class TollFreeListChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return TollFreeListResultSet(response, path)
|
mmolero/pyqode.python
|
runtests.py
|
Python
|
mit
| 236,912 | 0.000038 |
#! /usr/bin/env python
# Hi There!
# You may be wondering what this giant blob of binary data here is, you might
# even be worried that we're up to something nefarious (good for you for being
# paranoid!). This is a base64 encoding of a zip file, this zip file contains
# a fully functional basic pytest script.
#
# Pytest is a thing that tests packages, pytest itself is a package that some-
# one might want to install, especially if they're looking to run tests inside
# some package they want to install. Pytest has a lot of code to collect and
# execute tests, and other such sort of "tribal knowledge" that has been en-
# coded in its code base. Because of this we basically include a basic copy
# of pytest inside this blob. We do this because it let's you as a maintainer
# or application developer who wants people who don't deal with python much to
# easily run tests without installing the complete pytest package.
#
# If you're wondering how this is created: you can create it yourself if you
# have a complete pytest installation by using this command on the command-
# line: ``py.test --genscript=runtests.py``.
sources = """
eNrMvdt2HFmWGFaSZY8n52aNRpIlW3JUsjkRWZUZBMi6TU4lq1kkWAU1C6RBcAotFBQMZAaAaGZm
JCMiCaB7ei37B/wL/gWv5e/Qk/zsT/CDP8DL+3auEZFIsrtHqpkmIiPO2ee2zz5777Mv/8s//u3b
j6Ljf/bRRx8lq5s6q+p4USzfZDertJ5evv1Hx//vv/3oo36/H1hv8+VFkC5n8Gr6Bp/P18tpnRfL
dJ7XN3EQQPFeL1+sirIOimoYVDdV77wsFsHqJj5b5/M6XwbyOTlLK2izBDC93iw7D7gPCYJMy4sk
sZqNyuztGj4Oxr0goFaOLrOgzOp1ucxmwevXVtnXrwMBEazK4l0+y6qgvsyqjKpeZvNVVgaLrL4s
ZvChgKHM8vOboDj7VTatocuznEdU5lCxKGEYcbZ8l5fFcjzuEQz8z2owrrI6resyAhDDYJkusmHw
Lp2v4U+Z5hWMb3JUrrNBa91ZNvfr3l4JGszrbBEt0tUKSjqNdjZDNd63GRh55IxoVWarbDmbPE3n
VXdbjWpbNHZTwcNlIg1E+KO95PRylpfynQo8ms95FfNpiktXBVc5vDrLgvVyViyzID2vYc1rQhnC
I8Q5rIkIp3E4uEyr4Dxf5tVlNosDxLDXr6Xjr19T+VVawqgQ2Az/XeRLwJH8PEiDX2Q3e2WJ6FIG
j2BB87N1ndEbqqg6hOAAYaEK9gYm+B7OFrVeAF6muh/LIqgBhbM6VhhPfxc0BcHEnpCIp0mGFqez
GQwCNuSvszLi8jHOgyqFe0bg9Hq882ZZmb/LEt6YOLERPyY0ybznZNfyJuU359DLOsD5qep0Oc3s
WkN7fw9wVvpxn8srCkDlxnqJaWqCo5sVT1vUX6xhcWDO0rOqmMNs6i5AtYABDwni3bIf3NVw/P+c
Xg3ULADoSXByylhAEw2/rZK8apf5PJPPpqN1eTN2WoNNBZUTmb8kibjGMDgA5FP/9pNkVkyTpG+Q
OrueZqs62KdqNGgXLkywNWl+N9R/SVKX6TQ7S6dvLoHYJQn0BbdZo6DQ+PM0n0f9abGezxg0z2q6
vAmq9RmieD0O7labptTgQ+JuUzObTGWgK4LFZbUCChTBeIbBrluhJKxd0a7HStYEAY1xB5xWVQad
LRUOdq6JWT4gRhGWHwQ
|
Pg91x65iQAkNXqSOrYhUNWkvxMl/YxB6fmoUtcCc7p43Pt0AQrHCJyO9r
2fmUgx2jiEyqmuFdFFi98leJCAd+GeJUIO2YzmE9AL/rSiEmUpMkARJewjaosvn5wNrgDKH/9ZIq
PAROgZ+g5wwDJl5gWuRNH/nBc+78myxbER8CEKdFOQuK80CO4Ht4yN2D0+eeHCfB9DJdXmRVrCko
9xCofN3sIf6ME4Fl6IPzCVto+zS9mtE4lllPt6MYAywxdDaGHIwTngDvfDQdwlG/hAkyq0TVgmKp
oS2yRVHmv8YJwSOlgF1NZWIN5Nsb7Ey6ntdCY13UUoeRaWOWM2XIrnNAHMPzPIXC02L5LlvmGZD7
4KZYB9
|
N0GVSrbIosVCpUGXYpnJzcQeDGYBtOLzUQdRLmSzhA4bCv4TiE8mkwK2p8toj8EArDEmLn
AClqIk0azlmmhmw6jjMLTOjedbpYzS3K4fCHml3rA2cHPYSFA5o0TxdnszS4Hgf9e/2BHPfUXyKU
iKU1MQRcQ5hMPrjP5VNRwWtgRNbzzJo1j4+QFXT5B2EGCO2qS2oR92vPOgnckeYVrVA6L7N0doNc
WZUt6yCShSbG1qEGtAiALClwR3nNq0DYMIhtbOttTV5kneDsh9Wve3ZXeYTcxco/sppcg8Jkm2Fo
0rsGe7Cu9JxFLVuLeY42Qt73a0l5RjbmBRi7NFK2w5Ep4C73XXLJczChHjkfuIua5WiyXvxpYGYU
tjRAs44ed7Q8yQN7AQTZSFKT2h2r0UYQon7n8eA0PbD6eCdI3xVANph6g8g1LfNVXZRVMM/fwDpB
x/MpC133qAw/250WTIrzigqoefBYLTUXwlYAUwXiWpLgRo7a58Mh6jG0AgxEtDMMvGlkyAO7Xjdu
GRKv5Lcmid+Gtj9B5t/e169fY1WkLig0GyI6DM48Kq7h+NS87qDmQCPe5cW6mt80CPs+7knTNuAK
k/QcafNyZhPzvNJk3ixe/T5UXRN1XV8T926i7vTVpqbYV6TNRUASKS6/cFBImlvFrp7Np2lyusgr
BPmh1PCc+d1/CKKnUM7Zi53UziVZbt33p3nbE8D3p3MySUB5msNrTKdgQNeUeRTtNrFiOxpx64Q4
7H0rlfaY6taldPhH0towcQFK51KhJp+odVewx5cgE5k9DRuE2UIHrx2u1h631RY8NqmrNQ74foLf
TmGVqQmHOLb3/3aS6BJCHFccHNLSak0Lnhkw5CKrlmHdRtN+X3QCOMdbKYQiACKsQ4ffF2XVuLZH
1t9h0QT5zOI5SIfqO16zVt0fCjktyCda0gUyou/SMk/P5pmDgK9fE6DXr2NZHgFocdt0vIDMBtQW
DhJAFGBv6TyYrssSAbc2wminyTqcWAJZGHNpNkhnvypyVBhrucLqg2k2thdWMXJA5CJPwQprqtrB
NvkAWNoK41aWkP9+qut+atWQxXCWWlEBU2oDM+Iv3fvuNJwUa46bm453HKKUpcf6g2291uW+bS/S
rGmNd2PW1JTIX5veejponkhLCaqm7oUsHaAPfFTYDdVj+T2HGUHBUE7IeSGa6bhBNKwTj5sLkyp9
l0lXwkHr3jcFEDOl2ZOxhTjyziIPiMA0FGvArErvHOZjUp44+++qKOniB+oBs17AEQNDJ/FVc4xa
fcoac1TCpEtS5Cr9AOBRijdC1EOcm7lcv3SRVqNiySvSsbTNCmtgtFAfORtVzTHrFfo08L43udQd
npJN9BdaaFxB4GSikt3XKOEsvoL3mvs2Cqng6BIGA2Ofo1KlWi/4nkpXRXAoOE3fxMFjKIUTB7sP
JjGD8kDaciA3IqZl5+eoGVsv51lVaQion5nhBVeZNe7w9I0INYO9cKjeOd5keNdZSNscRqlxwtmC
fysfbMtVPnhPC9qYdptf0nU7xDzYCq6ODsdjmKPuYSHJ2Dis1iG1ap+tI1aa9Yi7p/BVJLYd0grl
6jvBFQpVovShm6NsBju7CK4QFd7xVgXpGzbnjITubJtJ9Tqo+bhWhqMxtdbm2oaAGWKFpMsnZw0G
pVmiky4g0jVpg96tuuygk3aQ9vbtPz7+c+tKvFwvl1n59r86/r8e8W04CGz5FLbsfI5bDg9+KEIX
iauyqAv4gFR/nuGZJdeRuLPkJDibnalHfTdOO1le4rO+QV/d9NxrN3WVnkyLWRbjP6reEWkT0/lh
tiqd63RcUpg4EDllJUQN/xs9CyGqGkM9IWNSPRphJ6ze5Cv7M/62PnMHipKLjQP7t1UMmITahoK/
+fNve707vTvS32A1X1/AjrwsijeVM5B0NiuIZQC6W8KhJsO5KIv1CtaOXyLtpzdRv5YZgQFjh1BC
BcJv/yAiOOlfZLDA6VzkV6odm8bC0Wi25ovZKrREwJQ2zaRfwVAzAFaDkD7JlyDEiZpmwvd+C6BI
wLlM+gd9qzaaIUDdy+IqOAiqeXGFQ4cdtl7do1nQTQbRwWSHyBeQ6EHcHwydSVGDTOD8WMAOjtQL
HqeeJQNvEvhFAI+W5/lFzAOOdVF1yWvqNg5fxiW+RC1bQHPbxAxpmoFjge/0Eq81yxgJVRUTzaki
i15ISSwlFRqkWVEe+A6ER/U0bFGlUC/UNSMUH9h32DMXuDUsrlbBaCI4PyfmmuBaT9TAKginfAZ4
GLnQ1QTad8jxVQnkFKgqoOoEMVNhgbv+gpUu5d5Q+27lAwjumh+W1C6LQn9PxrqECILW1HtzswSi
k8/oenMV848YVwcpTH88hvUbY3/g30Gzv8DEQKm7O/fj++dVcHf0VdVy04yroyd3SO1cXWbIvlNr
yL9aO6ACjgc7XqewRPJDll9+0dG1XtFxCP1+iT9e4g9YpSYgNgC5HVJcZ2k5K66WCWxMc3F5AH3c
X54X4/bLxqEWBfxrR/UeeqgenXHKIZOoQybCwxjmJLumY1lZaZBKAIln7NWbFxc8R95KTqgKP5ve
8Vv1iwmXmE4wvI5uTHR/7LOGVKU0mPba0DcSUC04lo4B9rjYtsDcqA3PFfuJfOnrDWeVJqEc9TE4
FlVybKmt8TUujnzShjR4niBLnACEhGmZapAQoE9d1qXhM9LVE3g8Vd1ArEWeLZvZZIWKKiLU0QK+
lgacFm6pptCRq5qjprkyDPWOyAAof6i6fOoyJ3mWZUuahGwm5a/SZa1M6ypmfGSic0B4lAMviiC9
Sm+aS+FPullPUgm4X3UTwpBZiCRT0boxaGUify+07VwU79My47KtsHDcNiiHwSe4UlJQRlj4PaXV
GFsXYy+RQQjo/kBPFICHk39VVPUCz8kFEIqz9cWFfamC/ITWvNVnIudDQwkCsQRU+lafxfVZgktM
TQLrFXxSg3z5SXBe2jeQCAOvWxIEj/XgT/Ojo6lqqavuQqjhns2uQ2eg/e+yOihzMszI+ZBHUVdz
oq7JV+sCKIzcQOU20uTsGvgzv7LbEqFSgs9Yd11F/EY1cS7IRmcPjiHSe9/sNYuImwrIPtvbntGA
/gNyml0UKIAC61fWIL+RlhN4hrOiykZXRTlrM7rpZygYYst79M/h4fPDvqWhMG0j073yG1dQ5COx
C/jPy1/sv3ix96S/QdehqmJx+l+PmfV9R8LhiW0nTHx0Gwr/ySdvrmaVTBxpP4TWqqVHGuTUlRp8
FsBHNNTTx5xFIfF0aDn4FumbzOrRhEFjkxP8Rx8c0EUz9o4TVODwn4kgjAIwvcymbxK6AUXh4F2W
aC0nEe1h4GCY34wurEFEeCr7/VVQnNYd8iir8X69EQikCIKCRKjU+Rk1mGmL7QYQgBdXaXWNeN/H
i8hGcRtqDLhs3YMiTsaabr5vbZCm429nZ//zOq8VW7gVLo01MomVYt9b637wKdXgtRWsUzd7BvuG
GoazDqiuQyYwYnFhzBBs9JN+cLcm+I/mIFVlY/d2mFUgT97Ts8THSGoskPLlO+HUjH75zphOT9S1
6ooIVjStsTq3CJZ10rawrNgO99RCXYAPcmONJw6Sed2DcUCtnhtWSTgau6bNrfTRgk3UKZ4On0jv
xCyEuRJBThZPn3yR2Uehr4ejwiVNH5TGcUQN89tfZDdnBfRmH7dKuV7VLSBAyF81mzNHmAuyBYCZ
6NUN6W1iZ1GiQW9De7daVipNmLTitU+nG+K4JgBi30tXAnbF2FPEttw9amA8q2NlNmrN9HDQNPlU
SM34freEDjw09XhvMmzZxrDXqjnwodgtkhqRFrZxY9IEfgZWQFWZplNRpd9iTouSLNXVVc3NX4Yq
jf7dKqb/J0H6JET5non0eRGenowfnDoCld8H5NsQysnd6hSF89EoeHFTX8KmvSuyr0deoYl8Fp4O
8QFVnvO0BmF8gW/eIa8Ar7PrbLqu8SosPG3MdaXoyLdplR3yecVbfjDude1wJEcNO1hlWrVezVBY
hhKmfl0oVY9AKNb2uTYvlnhWlkrFq35vUBrj5Pl6GwAa0+I3sIGgEkq03vKo9gCu6akPXRWKrcHg
MDagf6uiX/dSAWy15361zHHf7y3p33ZFv4bU/3q9XJVweNN9p4L7sG+tAEwJcL5E/yt1g1dm5/m1
p0Djmw4i1cta33Womk0zJbS2IvJa4SV51ICpL5fybD6TJjX8Xk/dVWS4sUDMX2VlfRPZajMY5LTA
QxdPXSopwjvzzdtU45JSTbjabeopBlim8eeqgp7Uc7pi6DBc1ziXz2LxZkAtF5r4t4kwHt/JLJxA
lYONOBt9uimVl/qAh8BInpTl85vsBoUEJCm4O6OT6Hq4O6CFvibPGmRNVKHTgXfI25yU6LWmSit8
cmqrLO2SZiL0XOqVa9vxmpVw90/TBk7gD9vPxCYtUG3L8rduZvhuD1DLR+38o9zxeExoZ8MKf1wO
wDQZJ7ArsRfTMq0uPU7B6mGEB29J92DAApa055cFPoGEXKUXG4/gWyYiPzdoRUhP3Fdz/1rdYYUG
chYIcF3qlWm5kgVJNrP4UL6zQCZTcXS3NpTYLSWrG40G25nXaUbkZs6ykXd9UZ/Rp4FRYzN3AZvC
IYO6L6gFaJJD9UZp3aL+43QFZIDuVWfq7I4IKrUw0NAHru4TEOzQkAGtaW1RsDb/Uzt5qFZ9GJij
jcbVUVF1fxiYWwrhC6wOGRbBONYi35BP+RJB5GpxGorSeVUEayTtOK+88igjavUhohr6MzCXdpnd
EE0fuC6MLeyHr30etgy8OVBvJvSYJ9HADHuyg9wNSCll6govS2CqyElypu5yEaOXdKU6czkhfeXB
Dz0bTBpE57AJgVOD9WTjErWbZ8UihWHiPgpgqtBwaTkjF1W2cXbATOt1OtcTgPJTymtA/kYj1KMt
8otLcoic5efnGVnjKJMtVyqj0cC4UBDL4ouYPVTZ4h7v8PLlZYbXMTOun1oAlffK7VcTzhyQFD16
qHxbjBUqbgmahAtSNOvjC9DGhrBISzgsK3TvK6Z5WisTPTUHRr51e2Ydh+rR6RnV1lsnnV+lN5WW
T+UIG2o6OjQU3mvH0Fx5clpRcnYaCFXDjcMeOW2dtgii5ottcNK/kDZYCCwsbiv8q/ZZiCpkwaXM
WB8A8+uvnC1A200o8zTYf1MxGE3rdBDQPiH1AmwONs8jv6FllpGzvLdm1SX0DFlU32BRcxXYUKR+
D9y1IasHtAss3uCAYBzBr9DlF21YatvBk+35DHekHnubBRbe9L3t/RINZUQHSSWpyorD48OmtGbR
h2Fgy7IW1liUl9ePuP8uEtxxvMtKGsVJ9/2ioopNsteJfl3L1/By7Jhii/1FtjcRGqQ0wfKzKF21
r1aT6e9xYimDPNUQn6ptfOaH8KwuX0
|
shirtsgroup/InterMol
|
intermol/forces/proper_periodic_dihedral_type.py
|
Python
|
mit
| 1,984 | 0.006552 |
import parmed.unit as units
from intermol.decorators import accepts_compatible_units
from intermol.forces.abstract_dihedral_type import AbstractDihedralType
class ProperPeriodicDihedralType(AbstractDihedralType):
__slots__ = ['phi', 'k', 'multiplicity', 'weight', 'improper']
@accepts_compatible_units(None, None, None, None,
phi=units.degrees,
k=units.kilojoules_per_mole,
multiplicity=units.dimensionless,
weight=unit
|
s.dimensionless,
improper=None)
def __init__(self, bondingtype1, bondingtype2, bondingtype3, bondingtype4,
phi=0.0 * units.degrees,
k=0.0 * units.kilojoules_per_mole,
|
multiplicity=0.0 * units.dimensionless,
weight=0.0 * units.dimensionless,
improper=False):
AbstractDihedralType.__init__(self, bondingtype1, bondingtype2, bondingtype3, bondingtype4, improper)
self.phi = phi
self.k = k
self.multiplicity = multiplicity
self.weight = weight
class ProperPeriodicDihedral(ProperPeriodicDihedralType):
"""
stub documentation
"""
def __init__(self, atom1, atom2, atom3, atom4, bondingtype1=None, bondingtype2=None, bondingtype3=None, bondingtype4=None,
phi=0.0 * units.degrees,
k=0.0 * units.kilojoules_per_mole,
multiplicity=0.0 * units.dimensionless,
weight=0.0 * units.dimensionless,
improper=False):
self.atom1 = atom1
self.atom2 = atom2
self.atom3 = atom3
self.atom4 = atom4
ProperPeriodicDihedralType.__init__(self, bondingtype1, bondingtype2, bondingtype3, bondingtype4,
phi=phi,
k=k,
multiplicity=multiplicity,
weight=weight,
improper=improper)
|
privacyidea/privacyidea
|
privacyidea/api/validate.py
|
Python
|
agpl-3.0
| 27,219 | 0.001397 |
# -*- coding: utf-8 -*-
#
# http://www.privacyidea.org
# (c) cornelius kölbel, privacyidea.org
#
# 2020-01-30 Jean-Pierre Höhmann <jean-pierre.hohemann@netknights.it>
# Add WebAuthn token
# 2018-01-22 Cornelius Kölbel <cornelius.koelbel@netknights.it>
# Add offline refill
# 2016-12-20 Cornelius Kölbel <cornelius.koelbel@netknights.it>
# Add triggerchallenge endpoint
# 2016-10-23 Cornelius Kölbel <cornelius.koelbel@netknights.it>
# Add subscription decorator
# 2016-09-05 Cornelius Kölbel <cornelius.koelbel@netknights.it>
# SAML attributes on fail
# 2016-08-30 Cornelius Kölbel <cornelius.koelbel@netknights.it>
# save client application type to database
# 2016-08-09 Cornelius Kölbel <cornelius@privacyidea.org>
# Add possibility to check OTP only
# 2015-11-19 Cornelius Kölbel <cornelius@privacyidea.org>
# Add support for transaction_id to saml_check
# 2015-06-17 Cornelius Kölbel <cornelius@privacyidea.org>
# Add policy decorator for API key requirement
# 2014-12-08 Cornelius Kölbel, <cornelius@privacyidea.org>
# Complete rewrite during flask migration
# Try to provide REST API
#
# This code is free software; you can redistribute it and/or
# modify it under the terms of the GNU AFFERO GENERAL PUBLIC LICENSE
# License as published by the Free Software Foundation; either
# version 3 of the License, or any later version.
#
# This code is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU AFFERO GENERAL PUBLIC LICENSE for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see <http://www.gnu.org/licenses/>.
#
__doc__ = """This module contains the REST API for doing authentication.
The methods are tested in the file tests/test_api_validate.py
Authentication is either done by providing a username and a password or a
serial number and a password.
**Authentication workflow**
Authentication workflow is like this:
In case of authenticating a user:
* :func:`privacyidea.lib.token.check_user_pass`
* :func:`privacyidea.lib.token.check_token_list`
* :func:`privacyidea.lib.tokenclass.TokenClass.authenticate`
* :func:`privacyidea.lib.tokenclass.TokenClass.check_pin`
* :func:`privacyidea.lib.tokenclass.TokenClass.check_otp`
In case if authenticating a serial number:
* :func:`privacyidea.lib.token.check_serial_pass`
* :func:`privacyidea.lib.token.check_token_list`
* :func:`privacyidea.lib.tokenclass.TokenClass.authenticate`
* :func:`privacyidea.lib.tokenclass.TokenClass.check_pin`
* :func:`privacyidea.lib.tokenclass.TokenClass.check_otp`
"""
from flask import (Blueprint, request, g, current_app)
from privacyidea.lib.user import get_user_from_param, log_used_user
from .lib.utils import send_result, getParam
from ..lib.decorators import (check_user_or_serial_in_request)
from .lib.utils import required
from privacyidea.lib.error import ParameterError
from privacyidea.lib.token import (check_user_pass, check_serial_pass,
check_otp, create_challenges_from_tokens, get_one_token)
from privacyidea.api.lib.utils import get_all_params
from privacyidea.lib.config import (return_saml_attributes, get_from_config,
return_saml_attributes_on_fail,
SYSCONF, ensure_no_config_object)
from privacyidea.lib.audit import getAudit
from privacyidea.api.lib.decorators import add_serial_from_response_to_g
from privacyidea.api.lib.prepolicy import (prepolicy, set_realm,
api_key_required, mangle,
save_client_application_type,
check_base_action, pushtoken_wait, webauthntoken_auth, webauthntoken_authz,
webauthntoken_request, check_application_tokentype)
from privacyidea.api.lib.postpolicy import (postpolicy,
check_tokentype, check_serial,
check_tokeninfo,
no_detail_on_fail,
no_detail_on_success, autoassign,
offline_info,
add_user_detail_to_response, construct_radius_response,
mangle_challenge_response, is_authorized)
from privacyidea.lib.policy import PolicyClass
from privacyidea.lib.event import EventConfiguration
import logging
from privacyidea.api.register import register_blueprint
from privacyidea.api.recover import recover_blueprint
from privacyidea.lib.utils import get_client_ip
from privacyidea.lib.event import event
from privacyidea.lib.challenge import get_challenges, extract_answered_challenges
from privacyidea.lib.subscriptions import CheckSubscription
from privacyidea.api.auth import admin_required
from privacyidea.lib.policy import ACTION
from privacyidea.lib.token import get_tokens
from privacyidea.lib.machine import list_machine_tokens
from privacyidea.lib.applications.offline import MachineApplication
imp
|
ort json
log = logging.getLogger(__name__)
validate_blueprint = Blueprint('validate_blueprint', __name__)
@validate_blueprint.before_request
@register_blueprint.before_request
@recover_blueprint.before_request
def
|
before_request():
"""
This is executed before the request
"""
ensure_no_config_object()
request.all_data = get_all_params(request)
request.User = get_user_from_param(request.all_data)
privacyidea_server = current_app.config.get("PI_AUDIT_SERVERNAME") or \
request.host
# Create a policy_object, that reads the database audit settings
# and contains the complete policy definition during the request.
# This audit_object can be used in the postpolicy and prepolicy and it
# can be passed to the innerpolicies.
g.policy_object = PolicyClass()
g.audit_object = getAudit(current_app.config, g.startdate)
g.event_config = EventConfiguration()
# access_route contains the ip addresses of all clients, hops and proxies.
g.client_ip = get_client_ip(request, get_from_config(SYSCONF.OVERRIDECLIENT))
# Save the HTTP header in the localproxy object
g.request_headers = request.headers
g.serial = getParam(request.all_data, "serial", default=None)
g.audit_object.log({"success": False,
"action_detail": "",
"client": g.client_ip,
"client_user_agent": request.user_agent.browser,
"privacyidea_server": privacyidea_server,
"action": "{0!s} {1!s}".format(request.method, request.url_rule),
"info": ""})
@validate_blueprint.route('/offlinerefill', methods=['POST'])
@check_user_or_serial_in_request(request)
@event("validate_offlinerefill", request, g)
def offlinerefill():
"""
This endpoint allows to fetch new offline OTP values for a token,
that is already offline.
According to the definition it will send the missing OTP values, so that
the client will have as much otp values as defined.
:param serial: The serial number of the token, that should be refilled.
:param refilltoken: The authorization token, that allows refilling.
:param pass: the last password (maybe password+OTP) entered by the user
:return:
"""
serial = getParam(request.all_data, "serial", required)
refilltoken = getParam(request.all_data, "refilltoken", required)
password = getParam(request.all_data, "pass", required)
tokenobj_list = get_tokens(serial=serial)
if len(tokenobj_list) != 1:
raise ParameterError("The token does not exist")
else:
tokenobj = tokenobj_list[0]
tokenattachments = list_machine_tokens(serial=serial, application="offline")
if tokenattachme
|
CodeLionX/CommentSearchEngine
|
cse/WpApiParser.py
|
Python
|
mit
| 1,919 | 0.00938 |
from cse.util import Util
fr
|
om collection
|
s import OrderedDict
from cse.pipeline import Handler
class WpApiParser(Handler):
def __init__(self):
super()
def parse(self, comments, url, assetId, parentId):
data = self.__buildDataSkeleton(url, assetId)
data["comments"] = self.__iterateComments(comments, parentId)
return data
def __buildDataSkeleton(self, url, assetId):
return {
"article_url" : url,
"article_id" : assetId,
"comments" : None
}
def __iterateComments(self, comments, parentId=None):
commentList = OrderedDict()
for comment in comments:
votes = 0
for action_summary in comment["action_summaries"]:
if action_summary["__typename"] == "LikeActionSummary":
votes = action_summary["count"]
commentObject = {
"comment_author": comment["user"]["username"],
"comment_text" : comment["body"],
"timestamp" : comment["created_at"],
"parent_comment_id" : parentId,
"upvotes" : votes,
"downvotes": 0
}
commentList[comment["id"]] = commentObject
try:
commentReplies = self.__iterateComments(comment["replies"]["nodes"], comment["id"])
except KeyError: # There may be a limit of the nesting level of comments on wp
commentReplies = {}
commentList.update(commentReplies)
return commentList
# inherited from cse.pipeline.Handler
def registeredAt(self, ctx):
pass
def process(self, ctx, data):
result = self.parse(
comments=data["comments"],
url=data["url"],
assetId=data["assetId"],
parentId=data["parentId"]
)
ctx.write(result)
|
MagicSolutions/cmsplugin-carousel
|
example/example/urls.py
|
Python
|
mit
| 582 | 0 |
from django.conf.urls.defaults import *
from django.conf.urls.i18n import i18n_patterns
from django.contrib import admin
|
from django.conf import s
|
ettings
admin.autodiscover()
urlpatterns = i18n_patterns(
'',
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('cms.urls')),
)
if settings.DEBUG:
urlpatterns = patterns(
'',
url(r'^media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT, 'show_indexes': True}),
url(r'', include('django.contrib.staticfiles.urls')),
) + urlpatterns
|
SeattleCentral/ITC110
|
examples/lecture04b.py
|
Python
|
mit
| 508 | 0 |
def change_counter():
print("Determine the value of your pocket change.")
quarters = int(input("E
|
nter the number of quarters: "))
dimes = int(input("Enter the number of dimes: "))
nickels = int(input("Enter the number of nickels: "))
pennies = int(input("Enter the number of pennies: "))
value = quarters * 0.25 + dimes * 0.10 + nickels * 0.05
|
+ pennies * 0.01
print("You have ${0} in pocket change!".format(round(value, 2)))
if __name__ == '__main__':
change_counter()
|
rvsingh011/NitK_Assignments
|
Sem1/Algorithm/Factorial_iter.py
|
Python
|
mit
| 1,055 | 0.001896 |
def fact_iter(n):
"""This function will find the Factorial of the given number by iterative
method. This function is coded in Pyhton 3.5."""
# check for integer
if not isinstance(n, int):
raise TypeError("Please only enter integer")
if n <= 0:
raise ValueError("Kindly Enter positive integer only ")
temp = 1
for num in range(1,n):
temp += temp * num
return t
|
emp
def fact_recu(n):
# check for integer
if not isinstance(n, int):
raise TypeError("Please only enter integer")
if n <= 0:
raise ValueError("Kindly Enter positive integer only ")
if n == 1:
return 1
else:
|
return n * fact_recu(n-1)
if __name__ == "__main__":
print("""Enter your choice
1 - factorial Iterative
2 - factorial Recursive""")
choice = int(input())
print("Enter the number")
number = int(input())
if choice == 1:
number = fact_iter(number)
if choice == 2:
number = fact_recu(number)
print(number)
|
gutooliveira/progScript
|
tekton/backend/apps/quero_app/model.py
|
Python
|
mit
| 346 | 0.00289 |
# -*- coding: utf-8 -*-
from __future__ import absolute_im
|
port, unicode_literals
from google.appengine.ext imp
|
ort ndb
from gaegraph.model import Node
from gaeforms.ndb import property
class Quero(Node):
item = ndb.StringProperty(required=True)
nome = ndb.StringProperty(required=True)
descricao = ndb.StringProperty(required=True)
|
leppa/home-assistant
|
homeassistant/components/season/sensor.py
|
Python
|
apache-2.0
| 4,134 | 0.000242 |
"""Support for tracking which astronomical or meteorological season it is."""
from datetime import datetime
import logging
import ephem
import voluptuous as vol
from homeassistant import util
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import CONF_NAME, CONF_TYPE
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
import homeassistant.util.dt as dt_util
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "Season"
EQUATOR = "equator"
NORTHERN = "northern"
SOUTHERN = "southern"
STATE_AUTUMN = "autumn"
STATE_SPRING = "spring"
STATE_SUMMER = "summer"
STATE_WINTER = "winter"
TYPE_ASTRONOMICAL = "astronomical"
TYPE_METEOROLOGICAL = "meteorological"
VALID_TYPES = [TYPE_ASTRONOMICAL, TYPE_METEOROLOGICAL]
HEMISPHERE_SEASON_SWAP = {
STATE_WINTER: STATE_SUMMER,
STATE_SPRING: STATE_AUTUMN,
STATE_AUTUMN: STATE_SPRING,
STATE_SUMMER: STATE_WINTER,
}
SEASON_ICONS = {
STATE_SPRING: "mdi:flower",
STATE_SUMMER: "mdi:sunglasses",
STATE_AUTUMN: "mdi:leaf",
STATE_WINTER: "mdi:snowflake",
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_TYPE, default=TYPE_ASTRONOMICAL): vol.In(VALID_TYPES),
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Display the current season."""
if None in (hass.config.latitude, hass.config.longitude):
_LOGGER.error("Latitude or longitude not set in Home Assistant config")
return False
latitude = util.convert(hass.config.latitude, float)
_type = config.get(CONF_TYPE)
name = config.get(CONF_NAME)
if latitude < 0:
hemisphere = SOUTHERN
elif latitude > 0:
hemisphere = NORTHERN
else:
hemisphere = EQUATOR
_LOGGER.debug(_type)
add_entities([Season(hass, hemisphere, _type, name)])
return True
def get_season(date, hemisphere, season_tracking_type):
"""Calculate the current season."""
if hemisphere == "equator":
return None
if season_tracking_type == TYPE_ASTRONOMICAL:
spring_start = ephem.next_equinox(str(date.year)).datetime()
summer_start = ephem.next_solstice(str(date.year)).datetime()
autumn_start = ephem.next_equinox(spring_start).datetime()
winter_start = ephem.next_solstice(summer_start).datetime()
else:
spring_start = datetime(2017, 3, 1).replace(year=date.year)
summer_start = spring_start.replace(month=6)
autumn_start = spring_start.replace(month=9)
winter_start = spring_start.replace(month=12)
if spring_start <= date < summer_start:
season = STATE_SPRING
elif summer_start <= date < autumn_start:
season = STATE_SUMMER
elif autumn_start <= date < winter_start:
season = STATE_AUTUMN
elif winter_start <= date or spring_start > date:
season = STATE_WINTER
# If user is located in the southern hemisphere swap the season
if hemisphere == NORTHERN:
return season
return HEMISPHERE_SEASON_SWAP.get(season)
class Season(Entity):
"""Representation of the current season."""
def __init__(self, hass, hemisphere, season_tracking_type, name):
"""Initialize the season."""
self.hass = hass
self._name = name
self.hemisphere = hemisphere
self.datetime = dt_util.utcnow().replace(tzinfo=None)
self.type = season_tracking_type
self.season = get_season(self.datetime, self.hemisphere, self.type
|
)
@property
def name(self):
"""Return the name."""
return self._name
@property
def state(self):
"""Return the current season."""
return self.season
@property
def icon(self):
"""Icon to use in the frontend, if any."""
|
return SEASON_ICONS.get(self.season, "mdi:cloud")
def update(self):
"""Update season."""
self.datetime = dt_util.utcnow().replace(tzinfo=None)
self.season = get_season(self.datetime, self.hemisphere, self.type)
|
Irides-Chromium/cipher
|
scale_strict.py
|
Python
|
gpl-3.0
| 1,750 | 0.013714 |
#!/usr/bin/python3
### rev: 5.0
### author: <zhq>
### features:
### errors included
### up to 63 bases (2 to 64)
### caps recognition and same output format (deprecated)
### for the function parameters, `cur` represents the current (input) base, `res` represents the result (output) base, and `num` represents the current (input) number.
def scale(cur, res, num):
# int, int, str -> str
# Default Settings
num = str(num)
|
iscaps = False
positive = True
# Input
if cur == res: return num
if num == "0": return "0"
assert cur in range(2, 65) and res in range(2, 65), "Base not defined."
if num[0] == "-":
positive = Fa
|
lse
num = num[1:]
result = 0
unit = 1
if cur != 10:
for i in num[::-1]:
value = ord(i)
if value in range(48, 58): value -= 48
elif value in range(65, 92): value -= 55
elif value in range(97, 123): value -= 61
elif value == 64: value = 62
elif value == 95: value = 63
assert value <= cur, "Digit larger than original base. v:%d(%s) b:%d\nCall: scale(%d, %d, %s)" % (value, i, cur, cur, res, num)
result += value * unit
unit *= cur
result = str(result)
# Output
if res != 10:
num = int(result or num)
result = ""
while num > 0:
num, value = divmod(num, res)
if value < 10: digit = value + 48
elif value < 36: digit = value + 55
elif value < 62: digit = value + 61
elif value == 62: digit = 64
elif value == 63: digit = 95
result = chr(digit) + result
if not positive: result = "-" + result
return result
|
xingyepei/edx-platform
|
lms/djangoapps/mobile_api/users/serializers.py
|
Python
|
agpl-3.0
| 4,342 | 0.001152 |
"""
Serializer for user API
"""
from rest_framework import serializers
from rest_framework.reverse import reverse
from django.template import defaultfilters
from courseware.access import has_access
from student.models import CourseEnrollment, User
from certificates.models import certificate_status_for_student, CertificateStatuses
from xmodule.course_module import DEFAULT_START_DATE
class CourseOverviewField(serializers.RelatedField):
"""Custom field to wrap a CourseDescriptor object. Read-only."""
def to_representation(self, course_overview):
course_id = unicode(course_overview.id)
request = self.context.get('request', None)
if request:
video_outline_url = reverse(
'video-summary-list',
kwargs={'course_id': course_id},
request=request
)
course_updates_url = reverse(
'course-updates-list',
kwargs={'course_id': course_id},
request=request
)
course_handouts_url = reverse(
'course-handouts-list',
kwargs={'course_id': course_id},
request=request
)
discussion_url = reverse(
'discussion_course',
kwargs={'course_id': course_id},
request=request
) if course_overview.is_discussion_tab_enabled() else None
else:
video_outline_url = None
course_updates_url = None
course_handouts_url = None
discussion_url = None
if course_overview.advertised_start is not None:
start_type = "string"
start_display = course_overview.advertised_start
elif course_overview.start != DEFAULT_START_DATE:
start_type = "timestamp"
start_display = defaultfilters.date(course_overview.start, "DATE_FORMAT")
else:
start_type = "empty"
start_display = None
return {
"id": course_id,
"name": course_overview.display_name,
"number": course_overview.display_number_with_default,
"org": course_overview.display_org_with_default,
"start": course_overview.start,
"start_display": start_display,
"start_type": start_type,
"end": course_overview.end,
"course_image": course_overview.course_image_url,
"social_urls": {
"facebook": course_overview.facebook_url,
},
"latest_updates": {
"video": None
},
"video_outline": video_outline_url,
"course_updates": course_updates_url,
"course_handouts": course_handouts_url,
"discussion_url": discussion_url,
"subscription_id": course_overview.clean_id(padding_char='_'),
"courseware_access": has_access(request.user, 'load_mobile', course_overview).to_json() if request else None
}
class CourseEnrollmentSerializer(serializers.ModelSerializer):
"""
Serializes CourseEnrollment models
"""
course = CourseOverviewField(source="course_overview", read_only=True)
certificate = serializers.SerializerMethodField()
def get_certificate(self, model):
"""Returns the information about the user's certificate in the course."""
certificate_info = certificate_status_for_student(model.user, model.course_id)
if certificate_info['status'] == CertificateStatuses.downloadable:
return {
"url": certificate_info['download_url'],
}
else:
return {}
class Meta(object):
model = CourseEnrollment
fields = ('created', 'mode', 'is_active', 'course', 'certificate')
lookup_field = 'username'
class UserSerializer(serializers.HyperlinkedModelSerializer):
"""
Serializes User models
"""
name = ser
|
ializers.ReadOnlyField(source='profile.name')
course_enrollments = serializers.HyperlinkedIdentityField(
view_name='courseenrollment-detail',
lookup_field='username'
)
class Me
|
ta(object):
model = User
fields = ('id', 'username', 'email', 'name', 'course_enrollments')
lookup_field = 'username'
|
odubno/microblog
|
venv/lib/python2.7/site-packages/migrate/tests/versioning/test_cfgparse.py
|
Python
|
bsd-3-clause
| 1,112 | 0.004496 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
from migrate.versioning import cfgparse
from migrate.versioning.repository import *
from migrate.versioning.template import Template
from migrate.tests import fixture
class TestConfigParser(fixture.Base):
def test_to_dict(self):
"""Correctly interpret config results as dictionaries"""
parser = cfgparse.Parser(dict(default_value=42))
self.assertTrue(len(parser.sections()) == 0)
parser.add_section('section')
parser.set('section','option','value'
|
)
self.assertEqual(parser.get('section', 'option'), 'value')
self.assertEqual(parser.to_dict()['section']['option'], 'value')
def test_table_config(self):
"""We should be able to specify the table to be used with a repository"""
|
default_text = Repository.prepare_config(Template().get_repository(),
'repository_name', {})
specified_text = Repository.prepare_config(Template().get_repository(),
'repository_name', {'version_table': '_other_table'})
self.assertNotEqual(default_text, specified_text)
|
saeki-masaki/cinder
|
cinder/volume/drivers/srb.py
|
Python
|
apache-2.0
| 33,594 | 0.000179 |
# Copyright (c) 2014 Scality
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Volume driver for the Scality REST Block storage system
This driver provisions Linux SRB volumes leveraging RESTful storage platforms
(e.g. Scality CDMI).
"""
import contextlib
import functools
import re
import sys
import time
from oslo_concurrency import lockutils
from oslo_concurrency import processutils as putils
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import excutils
from oslo_utils import units
import six
from six.moves import range
from cinder.brick.local_dev import lvm
from cinder import exception
from cinder.i18n import _, _LI, _LE, _LW
from cinder.image import image_utils
from cinder import utils
from cinder.volume import driver
from cinder.volume import utils as volutils
LOG = logging.getLogger(__name__)
srb_opts = [
cfg.StrOpt('srb_base_urls',
default=None,
help='Comma-separated list of REST servers IP to connect to. '
'(eg http://IP1/,http://IP2:81/path'),
]
CONF = cfg.CONF
CONF.register_opts(srb_opts)
ACCEPTED_REST_SERVER = re.compile(r'^http://'
'(\d{1,3}\.){3}\d{1,3}'
'(:\d+)?/[a-zA-Z0-9\-_\/]*$')
class retry(object):
SLEEP_NONE = 'none'
SLEEP_DOUBLE = 'double'
SLEEP_INCREMENT = 'increment'
def __init__(self, exceptions, count, sleep_mechanism=SLEEP_INCREMENT,
sleep_factor=1):
if sleep_mechanism not in [self.SLEEP_NONE,
self.SLEEP_DOUBLE,
self.SLEEP_INCREMENT]:
raise ValueError('Invalid value for `sleep_mechanism` argument')
self._exceptions = exceptions
self._count = count
self._sleep_mechanism = sleep_mechanism
self._sleep_factor = sleep_factor
def __call__(self, fun):
func_name = fun.func_name
@functools.wraps(fun)
def wrapped(*args, **kwargs):
sleep_time = self._sleep_factor
exc_info = None
for attempt in range(self._count):
if attempt != 0:
LOG.warning(_LW('Retrying failed call to %(func)s, '
'attempt %(attempt)i.'),
{'func': func_name,
'attempt': attempt})
try:
return fun(*args, **kwargs)
except self._exceptions:
exc_info = sys.exc_info()
if attempt != self._count - 1:
if self._sleep_mechanism == self.SLEEP_NONE:
continue
elif self._sleep_mechanism == self.SLEEP_INCREMENT:
time.sleep(sleep_time)
sleep_time += self._sleep_factor
elif self._sleep_mechanism == self.SLEEP_DOUBLE:
time.sleep(sleep_time)
sleep_time *= 2
else:
raise ValueError('Unknown sleep mechanism: %r'
% self._sleep_mechanism)
six.reraise(exc_info[0], exc_info[1], exc_info[2])
return wrapped
class LVM(lvm.LVM):
def activate_vg(self):
"""Activate the Volume Group associated with this instantiation.
:raises: putils.ProcessExecutionError
"""
cmd = ['vgchange', '-ay', self.vg_name]
try:
self._execute(*cmd,
root_helper=self._root_helper,
run_as_root=True)
except putils.ProcessExecutionError as err:
|
LOG.exception(_LE('Error activating Volume Group'))
LOG.error(_LE('Cmd :%s'), err.cmd)
LOG.error(_LE('StdOut :%s'), err.stdout)
LOG.error(_LE('StdErr :%s'), err.stderr)
raise
def deactivate_vg(self):
"""Deactivate the Volume Group associated with this instantiation.
This forces LVM to release any referenc
|
e to the device.
:raises: putils.ProcessExecutionError
"""
cmd = ['vgchange', '-an', self.vg_name]
try:
self._execute(*cmd,
root_helper=self._root_helper,
run_as_root=True)
except putils.ProcessExecutionError as err:
LOG.exception(_LE('Error deactivating Volume Group'))
LOG.error(_LE('Cmd :%s'), err.cmd)
LOG.error(_LE('StdOut :%s'), err.stdout)
LOG.error(_LE('StdErr :%s'), err.stderr)
raise
def destroy_vg(self):
"""Destroy the Volume Group associated with this instantiation.
:raises: putils.ProcessExecutionError
"""
cmd = ['vgremove', '-f', self.vg_name]
try:
self._execute(*cmd,
root_helper=self._root_helper,
run_as_root=True)
except putils.ProcessExecutionError as err:
LOG.exception(_LE('Error destroying Volume Group'))
LOG.error(_LE('Cmd :%s'), err.cmd)
LOG.error(_LE('StdOut :%s'), err.stdout)
LOG.error(_LE('StdErr :%s'), err.stderr)
raise
def pv_resize(self, pv_name, new_size_str):
"""Extend the size of an existing PV (for virtual PVs).
:raises: putils.ProcessExecutionError
"""
try:
self._execute('pvresize',
'--setphysicalvolumesize', new_size_str,
pv_name,
root_helper=self._root_helper,
run_as_root=True)
except putils.ProcessExecutionError as err:
LOG.exception(_LE('Error resizing Physical Volume'))
LOG.error(_LE('Cmd :%s'), err.cmd)
LOG.error(_LE('StdOut :%s'), err.stdout)
LOG.error(_LE('StdErr :%s'), err.stderr)
raise
def extend_thin_pool(self):
"""Extend the size of the thin provisioning pool.
This method extends the size of a thin provisioning pool to 95% of the
size of the VG, if the VG is configured as thin and owns a thin
provisioning pool.
:raises: putils.ProcessExecutionError
"""
if self.vg_thin_pool is None:
return
new_size_str = self._calculate_thin_pool_size()
try:
self._execute('lvextend',
'-L', new_size_str,
"%s/%s-pool" % (self.vg_name, self.vg_name),
root_helper=self._root_helper,
run_as_root=True)
except putils.ProcessExecutionError as err:
LOG.exception(_LE('Error extending thin provisioning pool'))
LOG.error(_LE('Cmd :%s'), err.cmd)
LOG.error(_LE('StdOut :%s'), err.stdout)
LOG.error(_LE('StdErr :%s'), err.stderr)
raise
@contextlib.contextmanager
def patched(obj, attr, fun):
'''Context manager to locally patch a method.
Within the managed context, the `attr` method of `obj` will be replaced by
a method which calls `fun` passing in the original `attr` attribute of
`obj` as well as any positional and keyword arguments.
At the end of the context, the original method is restored.
'''
orig = getattr(obj, attr)
def patch(*args, **kwargs):
return fun(orig, *args, **kwargs)
setattr(obj, attr, patch)
try:
yield
finally:
s
|
halexan/Headquarters
|
src/headquarters/packet/stream_parser.py
|
Python
|
mit
| 2,384 | 0 |
# Copyright (C) 2013 Nippon Telegraph and Telephone Corporation.
# Copyright (C) 2013 YAMAMOTO Takashi <yamamoto at valinux co jp>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an
|
"AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from abc import ABCMeta, abstractmethod
import six
@six.add_metaclass(ABCMeta)
class StreamParser(object):
"""Streaming parser base class.
An instance of a subclass of this class is used to extract m
|
essages
from a raw byte stream.
It's designed to be used for data read from a transport which doesn't
preserve message boundaries. A typical example of such a transport
is TCP.
"""
class TooSmallException(Exception):
pass
def __init__(self):
self._q = bytearray()
def parse(self, data):
"""Tries to extract messages from a raw byte stream.
The data argument would be python bytes newly read from the input
stream.
Returns an ordered list of extracted messages.
It can be an empty list.
The rest of data which doesn't produce a complete message is
kept internally and will be used when more data is come.
I.e. next time this method is called again.
"""
self._q.append(data)
msgs = []
while True:
try:
msg, self._q = self.try_parse(self._q)
except self.TooSmallException:
break
msgs.append(msg)
return msgs
@abstractmethod
def try_parse(self, q):
"""Try to extract a message from the given bytes.
This is an override point for subclasses.
This method tries to extract a message from bytes given by the
argument.
Raises TooSmallException if the given data is not enough to
extract a complete message but there's still a chance to extract
a message if more data is come later.
"""
pass
|
BurntSushi/pdoc
|
test/testdata/demopackage2.py
|
Python
|
unlicense
| 70 | 0 |
"""I'm a different package, but I'm in demopack
|
age.__all__!"""
x = 42
| |
pkimber/compose
|
compose/migrations/0011_auto_20151026_1203.py
|
Python
|
apache-2.0
| 401 | 0 |
# -*- coding: utf-8 -*-
from __future__ im
|
port unicode_literals
from django.db
|
import migrations, models
class Migration(migrations.Migration):
dependencies = [
('compose', '0010_auto_20151026_1126'),
]
operations = [
migrations.RenameField(
model_name='slideshow',
old_name='slideshow',
new_name='temp_slideshow',
),
]
|
nttks/edx-platform
|
lms/djangoapps/ga_instructor_task/api.py
|
Python
|
agpl-3.0
| 1,136 | 0.001761 |
# -*- coding: utf-8 -*-
"""
API for submitting background tasks by an instructor for a course.
Also includes methods for getting information about tasks that have
already been submitted, filtered either by running state or input
arguments.
"""
from ga_instructor_task.tasks import (
generate_score_detail_report,
generate_playback_status_report,
)
from instructor_task.api_helper import submit_task
def submit_generate_score_detail_report(request, course_key):
"""
Submits a task to generate a CSV score detail report.
"""
task_type = 'generate_score_detail_report'
task_class = generate_score_detail_report
task_input = {}
task_key = ""
return submit_task(request, task_type, ta
|
sk_class, course_key, task_input, task_key)
def submit_generate_playback_status_report(request, course_key):
"""
Submits a task to generate a
|
CSV playback status report.
"""
task_type = 'generate_playback_status_report'
task_class = generate_playback_status_report
task_input = {}
task_key = ""
return submit_task(request, task_type, task_class, course_key, task_input, task_key)
|
simonreich/coffeedatabase
|
lib/ckeyboard.py
|
Python
|
gpl-3.0
| 17,891 | 0.00218 |
# -*- coding: utf-8 -*-
"""
This file is part of coffeedatabase.
coffeedatabase is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
coffeedatabase is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with coffeedatabase.. If not, see <http://www.gnu.org/licenses/>.
"""
# system
import readline
import datetime
import configparser
# coffeedatabase
from lib import cuser
from lib import cpayment
from lib import citem
from lib import cdatabase
from lib import cprice
from lib import cbalance
|
# Completer Class
# For further reference please see
# https://stackoverflow.com/questions/7821661/how-to-code-autocompletion-in-python
class MyCompleter(object): # Cus
|
tom completer
def __init__(self, options):
self.options = sorted(options)
def complete(self, text, state):
if state == 0: # on first trigger, build possible matches
if text: # cache matches (entries that start with entered text)
self.matches = [s for s in self.options
if text in s]
else: # no text entered, all matches possible
self.matches = self.options[:]
# return match indexed by state
try:
return self.matches[state]
except IndexError:
return None
class ckeyboard:
def __init__(self):
# First, load the config
config = configparser.ConfigParser()
config.sections()
config.read('config.ini')
if not ('FILENAME' in config) or not ('LIST' in config):
print("Broken config file \"config.ini\".")
raise
self.fileUser = config['FILENAME']['fileUser']
self.filePayment = config['FILENAME']['filePayment']
self.fileItem = config['FILENAME']['fileItem']
self.fileMarks = config['FILENAME']['fileMarks']
self.filePrice = config['FILENAME']['filePrice']
self.inactiveMonths = config['LIST']['inactiveMonths']
self.fileTemplateBalanceMonth = config['FILENAME']['fileTemplateBalanceMonth']
self.fileOutBalanceMonth = config['FILENAME']['fileOutBalanceMonth']
self.fileTemplateListMonth = config['FILENAME']['fileTemplateListMonth']
self.fileOutListMonth = config['FILENAME']['fileOutListMonth']
self.fileOutFolder = config['FILENAME']['fileOutFolder']
if (self.fileUser == "") or \
(self.filePayment == "") or \
(self.fileMarks == "") or \
(self.filePrice == "") or \
(self.fileItem == ""):
print("Broken config file \"config.ini\".")
raise
# create databases, if they do not exist.
database = cdatabase.cdatabase(self.fileUser, self.filePayment, self.fileItem, self.fileMarks, self.filePrice)
self.user = cuser.cuser(self.fileUser, self.inactiveMonths)
self.payment = cpayment.cpayment(self.filePayment, self.user)
self.item = citem.citem(self.fileItem, self.fileMarks, self.user)
self.price = cprice.cprice(self.filePrice, self.item)
self.balance = cbalance.cbalance(self.user, self.payment, self.price, self.item, self.inactiveMonths, self.fileTemplateBalanceMonth, self.fileOutBalanceMonth, self.fileTemplateListMonth, self.fileOutListMonth, self.fileOutFolder)
def inputStandard(self, valueDescription, valueStandard):
""" Displays an input field, nicely formatted. If valueDescription contains \"Name\" or \"name\", autocompletion for the name database will be activated.
valueDescription: List of description for input values.
valueStandard: List of standard values.
"""
if not len(valueDescription) == len(valueStandard):
print("Input vector", valueDescription, "has not the same length as standard value vector", valueStandard)
raise
counter = 0
for description in valueDescription:
if description.lower() == "status":
# display special user input field
print("New status:")
print("1 - active")
print("2 - auto")
print("3 - inactive")
textInput = input(str(description) + " [" + valueStandard[counter] + "]: ")
if textInput == "":
textInput = valueStandard[counter]
if textInput == "1" or textInput == "active":
valueStandard[counter] = "active"
elif textInput == "2" or textInput == "auto":
valueStandard[counter] = "auto"
elif textInput == "3" or textInput == "inactive":
valueStandard[counter] = "inactive"
else:
print("The input " + str(textInput) + " was not understood. Please use 1, 2, or 3, active, auto, or inactive.")
raise
else:
if not valueStandard[counter] == "":
textInput = input(str(description) + " [" + valueStandard[counter] + "]: ")
else:
textInput = input(str(description) + ": ")
if not textInput == "":
valueStandard[counter] = textInput
counter += 1
return valueStandard
def userAdd(self):
""" Adds a user to the user database
"""
userDescription = ["Name", "Mail"]
userStandard = ["", "institut@gwdg.de"]
inputUser = self.inputStandard(userDescription, userStandard)
inputUser.append("active")
self.user.userAdd(inputUser)
# Make a dummy payment
now = datetime.datetime.now()
year = now.strftime("%Y")
month = now.strftime("%m")
day = now.strftime("%d")
user = self.user.getRowByName(inputUser[1], 1)
payment = [user[0], year, month, day, 0]
self.payment.paymentAdd(payment)
# Make dummy marks
mark = [user[0], year, month, day, 0]
for _marks in self.item.marks:
_marks.marksAdd(mark)
return 0
def userChangeInfo(self):
""" Displays user information and allows to change them.
"""
user = self.getRowByTextname(self.user.getNamelist(), self.user)
# remove id
userId = user[0]
del user[0]
print("")
userDescription = ["Name", "Mail", "Status"]
inputUser = self.inputStandard(userDescription, user)
# add user id
inputUser.insert(0, userId)
# save in database
self.user.setUser(inputUser)
return 0
def paymentAdd(self):
""" Adds a payment to the payment database
"""
user = self.getRowByTextname(self.user.getNamelist(), self.user)
# create dates
now = datetime.datetime.now()
year = now.strftime("%Y")
month = now.strftime("%m")
day = now.strftime("%d")
payment1 = [user[0], int(year), int(month), int(day)]
print("")
userDescription = ["Payment"]
payment2 = [""]
inputUser = self.inputStandard(userDescription, payment2)
# fill payment
payment = payment1 + payment2
# save in database
self.payment.paymentAdd(payment)
# print new balance
self.payment.getDataBinMonth()
self.balance.getDataBinMonth()
self.balance.getBalance(user[0])
return 0
def itemAdd(self):
""" Adds a user to the user database
"""
itemDescription = ["Name", "Unit"]
itemStandard = ["Coffee", "per cup"]
inputItem = self.inputStandard(itemDescription, itemStandard)
inputItem.append("active")
|
tommy-u/enable
|
kiva/pdf.py
|
Python
|
bsd-3-clause
| 25,949 | 0.000077 |
# Copyright (c) 2005-2014, Enthought, Inc.
# some parts copyright 2002 by Space Telescope Science Institute
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in enthought/LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
# Thanks for using Enthought open source!
""" PDF implementation of the core2d drawing library
:Author: Eric Jones, Enthought, Inc., eric@enthought.com
:Copyright: Space Telescope Science Institute
:License: BSD Style
The PDF implementation relies heavily on the ReportLab project.
"""
from __future__ import absolute_import, print_function
# standard library imports
from itertools import izip
import warnings
import copy
from numpy import array, pi
# ReportLab PDF imports
import reportlab.pdfbase.pdfmetrics
import reportlab.pdfbase._fontdata
from reportlab.pdfgen import canvas
# local, relative Kiva imports
from .arc_conversion import arc_to_tangent_points
from .basecore2d import GraphicsContextBase
from .line_state import is_dashed
from .constants import FILL, STROKE, EOF_FILL
import kiva.constants as constants
import kiva.affine as affine
cap_style = {}
cap_style[constants.CAP_ROUND] = 1
cap_style[constants.CAP_SQUARE] = 2
cap_style[constants.CAP_BUTT] = 0
join_style = {}
join_style[constants.JOIN_ROUND] = 1
join_style[constants.JOIN_BEVEL] = 2
join_style[constants.JOIN_MITER] = 0
# stroke, fill, mode
path_mode = {}
path_mode[constants.FILL_STROKE] = (1, 1, canvas.FILL_NON_ZERO)
path_mode[constants.FILL] = (0, 1, canvas.FILL_NON_ZERO)
path_mode[constants.EOF_FILL] = (0, 1, canvas.FILL_EVEN_ODD)
path_mode[constants.STROKE] = (1, 0, canvas.FILL_NON_ZERO)
path_mode[constants.EOF_FILL_STROKE] = (1, 1, canvas.FILL_EVEN_ODD)
# fixme: I believe this can be implemented but for now, it is not.
class CompiledPath(object):
pass
class Graph
|
icsContext(GraphicsContextBase):
"""
Simple wrapper around a PDF graphics context.
"""
def __init__(self, pdf_canvas, *args, **kwargs):
from .image import GraphicsContext as GraphicsContextImage
self.gc = pdf_canvas
self.current_pdf_path = None
self.current_point = (0, 0)
self.text_xy = None, None
# get an agg backend to assist in measuring text
sel
|
f._agg_gc = GraphicsContextImage((1, 1))
super(GraphicsContext, self).__init__(self, *args, **kwargs)
# ----------------------------------------------------------------
# Coordinate Transform Matrix Manipulation
# ----------------------------------------------------------------
def scale_ctm(self, sx, sy):
"""
scale_ctm(sx: float, sy: float) -> None
Sets the coordinate system scale to the given values, (sx, sy).
"""
self.gc.scale(sx, sy)
def translate_ctm(self, tx, ty):
"""
translate_ctm(tx: float, ty: float) -> None
Translates the coordinate syetem by the given value by (tx, ty)
"""
self.gc.translate(tx, ty)
def rotate_ctm(self, angle):
"""
rotate_ctm(angle: float) -> None
Rotates the coordinate space by the given angle (in radians).
"""
self.gc.rotate(angle * 180 / pi)
def concat_ctm(self, transform):
"""
concat_ctm(transform: affine_matrix)
Concatenates the transform to current coordinate transform matrix.
transform is an affine transformation matrix (see kiva.affine_matrix).
"""
self.gc.transform(transform)
def get_ctm(self):
""" Returns the current coordinate transform matrix.
XXX: This should really return a 3x3 matrix (or maybe an affine
object?) like the other API's. Needs thought.
"""
return affine.affine_from_values(*copy.copy(self.gc._currentMatrix))
def set_ctm(self, transform):
""" Set the coordinate transform matrix
"""
# We have to do this by inverting the current state to zero it out,
# then transform by desired transform, as Reportlab Canvas doesn't
# provide a method to directly set the ctm.
current = self.get_ctm()
self.concat_ctm(affine.invert(current))
self.concat_ctm(transform)
# ----------------------------------------------------------------
# Save/Restore graphics state.
# ----------------------------------------------------------------
def save_state(self):
""" Saves the current graphic's context state.
Always pair this with a `restore_state()`
"""
self.gc.saveState()
def restore_state(self):
""" Restores the previous graphics state.
"""
self.gc.restoreState()
# ----------------------------------------------------------------
# Manipulate graphics state attributes.
# ----------------------------------------------------------------
def set_should_antialias(self, value):
""" Sets/Unsets anti-aliasing for bitmap graphics context.
"""
msg = "antialias is not part of the PDF canvas. Should it be?"
raise NotImplementedError(msg)
def set_line_width(self, width):
""" Sets the line width for drawing
Parameters
----------
width : float
The new width for lines in user space units.
"""
self.gc.setLineWidth(width)
def set_line_join(self, style):
""" Sets style for joining lines in a drawing.
style : join_style
The line joining style. The available
styles are JOIN_ROUND, JOIN_BEVEL, JOIN_MITER.
"""
try:
sjoin = join_style[style]
except KeyError:
msg = "Invalid line join style. See documentation for valid styles"
raise ValueError(msg)
self.gc.setLineJoin(sjoin)
def set_miter_limit(self, limit):
""" Specifies limits on line lengths for mitering line joins.
If line_join is set to miter joins, the limit specifies which
line joins should actually be mitered. If lines aren't mitered,
they are joined with a bevel. The line width is divided by
the length of the miter. If the result is greater than the
limit, the bevel style is used.
Parameters
----------
limit : float
limit for mitering joins.
"""
self.gc.setMiterLimit(limit)
def set_line_cap(self, style):
""" Specifies the style of endings to put on line ends.
Parameters
----------
style : cap_style
the line cap style to use. Available styles
are CAP_ROUND, CAP_BUTT, CAP_SQUARE
"""
try:
scap = cap_style[style]
except KeyError:
msg = "Invalid line cap style. See documentation for valid styles"
raise ValueError(msg)
self.gc.setLineCap(scap)
def set_line_dash(self, lengths, phase=0):
"""
Parameters
----------
lengths : float array
An array of floating point values
specifing the lengths of on/off painting
pattern for lines.
phase : float
Specifies how many units into dash pattern
to start. phase defaults to 0.
"""
if is_dashed((phase, lengths)):
lengths = list(lengths) if lengths is not None else []
self.gc.setDash(lengths, phase)
def set_flatness(self, flatness):
"""
It is device dependent and therefore not recommended by
the PDF documentation.
"""
raise NotImplementedError("Flatness not implemented yet on PDF")
# ----------------------------------------------------------------
# Sending drawing data to a device
# ------------------------------------------------
|
Chibin/gpdb
|
src/test/tinc/tincrepo/mpp/gpdb/tests/storage/walrepl/load/__init__.py
|
Python
|
apache-2.0
| 1,772 | 0.005643 |
"""
Copyright (c) 2004-Present Pivotal Software, Inc.
This program and the accompanying materials are made available under
the terms of the under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import shutil
import tinctest
import tinctest
from gppylib.commands.base import Command
from mp
|
p.models import MPPTestCase
from mpp.gpdb.tests.storage.walrepl.lib.pg_util import GpUtility
from mpp.gpdb.tests.storage.walrepl.gpinitstandby import GpinitStandby
clas
|
s LoadClass(MPPTestCase):
def __init__(self,methodName):
self.gp =GpinitStandby()
super(LoadClass,self).__init__(methodName)
def run_skip(self, type):
tinctest.logger.info("skipping checkpoint")
cmd_str = 'gpfaultinjector -p %s -m async -H ALL -r primary -f checkpoint -y %s -o 0' % (os.getenv('PGPORT'), type)
cmd = Command('skip_chkpoint', cmd_str)
cmd.run(validateAfter =False)
def skip_checkpoint(self):
''' Routine to inject fault that skips checkpointing '''
self.run_skip('reset')
self.run_skip('suspend')
def init_standby(self):
pg = GpUtility()
pg.install_standby()
def cleanup(self):
# Remove standby and reset the checkpoint fault
self.gp.run(option='-r')
self.run_skip('reset')
|
voidcc/POXPOF
|
pox.py
|
Python
|
apache-2.0
| 1,289 | 0 |
#!/bin/sh -
# Copyright 2011-2012 James McCauley
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# If you have PyPy 1.6+ in a directory called pypy alongside pox.py, we
# use it.
# Otherwise, we try to use a Python interpreter called python2.7, which
# is a good idea if you're using Python from MacPorts, for example.
# We fall back to just "python" and hope that works.
''''true
#export OPT="-u -O"
export OPT="-u"
export FLG=""
if [ "$(basename $0)" = "debug-pox.py" ]; then
export OPT=""
export FLG="--debug"
fi
if [ -x pypy/bin/pypy ]; then
exec pypy/bin/pypy $OPT "$0" $FLG "$@"
fi
if type python2.7 > /dev/null 2> /dev
|
/null; then
exec python2.7 $OPT "$0" $FLG "$@"
fi
exec python $OP
|
T "$0" $FLG "$@"
'''
from pox.boot import boot
if __name__ == '__main__':
boot()
|
trachelr/mne-python
|
examples/visualization/plot_evoked_topomap_delayed_ssp.py
|
Python
|
bsd-3-clause
| 2,301 | 0 |
"""
===============================================
Create topographic ERF maps in delayed SSP mode
===============================================
This script shows how to apply SSP projectors delayed, that is,
at the evoked stage. This is particularly useful to support decisions
related to the trade-off between denoising and preserving signal.
In this example we demonstrate how to use topographic maps for delayed
SSP application.
"""
# Authors: Denis Engemann <denis.engemann@
|
gmail.com>
# Christian Brodbeck <christianbrodbeck@nyu.edu>
# Alexandre Gramfort <alexandre.gramfort@telecom-paristech.fr>
#
# License: BSD (3-clause)
import numpy as np
import mne
from mne import io
from mne.datasets import sample
print(__doc__)
data_path = sample.data_path()
###############################################################################
# Set parameters
raw_fname = data_path + '/MEG/sample/sample_audvis_filt-0-40_raw.fif'
event_fname = data_path + '/MEG/sampl
|
e/sample_audvis_filt-0-40_raw-eve.fif'
ecg_fname = data_path + '/MEG/sample/sample_audvis_ecg_proj.fif'
event_id, tmin, tmax = 1, -0.2, 0.5
# Setup for reading the raw data
raw = io.Raw(raw_fname)
events = mne.read_events(event_fname)
# delete EEG projections (we know it's the last one)
raw.del_proj(-1)
# add ECG projs for magnetometers
[raw.add_proj(p) for p in mne.read_proj(ecg_fname) if 'axial' in p['desc']]
# pick magnetometer channels
picks = mne.pick_types(raw.info, meg='mag', stim=False, eog=True,
include=[], exclude='bads')
# We will make of the proj `delayed` option to
# interactively select projections at the evoked stage.
# more information can be found in the example/plot_evoked_delayed_ssp.py
epochs = mne.Epochs(raw, events, event_id, tmin, tmax, picks=picks,
baseline=(None, 0), reject=dict(mag=4e-12), proj='delayed')
evoked = epochs.average() # average epochs and get an Evoked dataset.
###############################################################################
# Interactively select / deselect the SSP projection vectors
# set time instants in seconds (from 50 to 150ms in a step of 10ms)
times = np.arange(0.05, 0.15, 0.01)
evoked.plot_topomap(times, proj='interactive')
# Hint: the same works for evoked.plot and viz.plot_topo
|
adrianholovaty/django
|
tests/modeltests/validation/models.py
|
Python
|
bsd-3-clause
| 4,204 | 0.00785 |
from datetime import datetime
from django.core.exceptions import ValidationError
from django.db import models
def validate_answer_to_universe(value):
if value != 42:
raise ValidationError('This is not the answer to life, universe and everything!', code='not42')
class ModelToValidate(models.Model):
name = models.CharField(max_length=100)
created = models.DateTimeField(default=datetime.now)
number = models.IntegerField(db_column='number_val')
parent = models.ForeignKey('self', blank=True, null=True, limit_choices_to={'number': 10})
email = models.EmailField(blank=True)
url = models.URLField(blank=True)
f_with_custom_validator = models.IntegerField(blank=True, null=True, validators=[validate_answer_to_universe])
def clean(self):
super(ModelToValidate, self).clean()
if self.number == 11:
raise ValidationError('Invalid number supplied!')
class UniqueFieldsModel(models.Model):
unique_charfield = models.CharField(max_length=100, unique=True)
unique_integerfield = models.IntegerField(unique=True)
non_unique_field = models.IntegerField()
class CustomPKModel(models.Model):
my_pk_field = models.CharField(max_length=100, primary_key=True)
class UniqueTogetherModel(models.Model):
cfield = models.CharField(max_length=100)
ifield = models.IntegerField()
efield = models.EmailField()
class Meta:
unique_together = (('ifield', 'cfield',), ['ifield', 'efield'])
class UniqueForDateModel(models.Model):
start_date = models.DateField()
end_date = models.DateTimeField()
count = models.IntegerField(unique_for_date="start_date", unique_for_year="end_date")
order = models.IntegerField(unique_for_month="end_date")
name = models.CharField(max_length=100)
class CustomMessagesModel(models.Model):
other = models.IntegerField(blank=True, null=True)
number = models.IntegerField(db_column='number_val',
error_messages={'null': 'NULL', 'not42': 'AAARGH', 'not_equal': '%s != me'},
validators=[vali
|
date_answer_to_universe]
)
class Author(models.Model):
name = models.CharField(max_length=100)
class Article(models.Model):
title = models.CharField(max_length=100)
author = models.ForeignKey(Author)
pub_date = models.DateTimeField(blank=True)
def clean(self):
if self.pub_date is None:
self.pub_date = datetime.now(
|
)
class Post(models.Model):
title = models.CharField(max_length=50, unique_for_date='posted', blank=True)
slug = models.CharField(max_length=50, unique_for_year='posted', blank=True)
subtitle = models.CharField(max_length=50, unique_for_month='posted', blank=True)
posted = models.DateField()
def __unicode__(self):
return self.name
class FlexibleDatePost(models.Model):
title = models.CharField(max_length=50, unique_for_date='posted', blank=True)
slug = models.CharField(max_length=50, unique_for_year='posted', blank=True)
subtitle = models.CharField(max_length=50, unique_for_month='posted', blank=True)
posted = models.DateField(blank=True, null=True)
class UniqueErrorsModel(models.Model):
name = models.CharField(max_length=100, unique=True, error_messages={'unique': u'Custom unique name message.'})
no = models.IntegerField(unique=True, error_messages={'unique': u'Custom unique number message.'})
class GenericIPAddressTestModel(models.Model):
generic_ip = models.GenericIPAddressField(blank=True, null=True, unique=True)
v4_ip = models.GenericIPAddressField(blank=True, null=True, protocol="ipv4")
v6_ip = models.GenericIPAddressField(blank=True, null=True, protocol="ipv6")
class GenericIPAddrUnpackUniqueTest(models.Model):
generic_v4unpack_ip = models.GenericIPAddressField(blank=True, unique=True, unpack_ipv4=True)
# A model can't have multiple AutoFields
# Refs #12467.
assertion_error = None
try:
class MultipleAutoFields(models.Model):
auto1 = models.AutoField(primary_key=True)
auto2 = models.AutoField(primary_key=True)
except AssertionError as assertion_error:
pass # Fail silently
assert str(assertion_error) == u"A model can't have more than one AutoField."
|
peterwharrison/SWAMP
|
SWAMP.py
|
Python
|
gpl-3.0
| 27,840 | 0.000647 |
#!/usr/bin/env python
'''
____ _ _ _ ____ _______ ____
/ ___)| | | | | |/ _ \ / _ _ \| _ \
\___ \| | | | | | (_) | | | | | | |_) )
___) | |_| |_| | ___ | | | | | | __/
(____/ \_______/|_| |_|_| |_| |_|_|
Sliding Window Alignment Masker for PAML - 31-03-14
SWAMP analyses multiple sequence alignments in a phylogenetic context,
looking for regions of higher than expected non-synonymous substitutions
along a branch, over a short sequence window. If a user defined
threshold is exceeded then the window of sequence is masked to prevent
its inclusion in downstream evolutionary analyses. This masking
approach removes sequence data that violates the assumptions of the
phylogenetic models implemented in the software package PAML that could
otherwise give a false signal of positive selection.
Copyright (C) 2015 Peter Harrison
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
#==============================================================================
import sys
import os
import argparse
#==============================================================================
#Main==========================================================================
#==============================================================================
def main():
'''Main runs only when the file itself is executed.
If a part of the script is imported, main will not be executed.'''
parser = argparse.ArgumentParser(description="SWAMP - Sliding Window \
Alignment Masker for PAML - Version \
31-03-14", epilog="Copyright (C) 2014 \
Peter Harrison", add_help=False)
# Required parameters
parameters = parser.add_argument_group('Required parameters')
parameters.add_argument("-i", "--infolder", type=str,
help="An input folder containing .phy and rst \
files. This folder can contain multiple \
subfolders each with .phy and rst files.")
parameters.add_argument("-b", "--branchnames", type=str,
help="A file listing which branches to analyse \
and which sequences to mask, see documentation \
for file format in README.md.")
parameters.add_argument("-t", "--threshold", type=int,
help="A threshold integer of the number of \
non-synonymous changes at and above which the \
window will be masked.")
parameters.add_argument("-w", "--windowsize", type=int,
help="The window size (in codons) for the sliding \
window scan.")
# Optional arguments
options = parser.add_argument_group('Optional arguments')
options.add_argument("-h", "--help", action="help",
help="Show help and exit.")
options.add_argument("-m", "--minseqlength", type=int, default=33,
help="The required minimum number of informative \
codons in each sequence of the alignment \
post-masking, a warning will be provided if sequences\
are less than this length.")
options.add_argument("-s", "--interscan", action="store_true",
help="Activates interscan masking as desribed in the \
documentation, README.md.")
options.add_argument("-p", "--print-alignment", type=str,
help="Prints out a summary of the given alignment \
file. No other option can be used at the same time.")
# Check if the user supplied any arguments. If not, print help.
if len(sys.argv) == 1:
parser.print_help()
sys.exit(1)
# Shorten the access to command line arguments.
args = parser.parse_args()
if args.print_alignment:
print_alignment_summary(args.print_alignment)
sys.exit(0)
elif args.infolder is None:
print "Parameter -i [--infolder] is required!"
sys.exit(1)
elif args.branchnames is None:
print "Parameter -b [--branchnames] is required!"
sys.exit(1)
elif args.threshold is None:
print "Parameter -t [--threshold] is required!"
sys.exit(1)
elif args.windowsize is None:
print "Parameter -w [--windowsize] is required!"
sys.exit(1)
# Print user provided folder and SWAMP user options
print "Scanning folder:\t", args.infolder
params = (args.threshold, args.windowsize, args.minseqlength)
print "With selected parameters:"
print "\tThreshold:\t%s\n\tWindowsize:\t%s\n\tMinimum length:\t%s" % params
if args.interscan:
print "\tInterscan enabled"
# Get all files recursively.
file_list = list_folder_filtered(args.infolder, ".phy")
# Ignore any already-masked files.
file_list = [x for x in file_list if not x.endswith("_masked.phy")]
print "Found %s phylip files to scan with SWAMP." % len(file_list)
# Obtain sequences to be masked and branches to analyse from file
branchcodes = read_branchcodes(args.branchnames)
# Sliding window scan all found phylip alignments
sliding_window_scan(file_list, args.threshold, args.windowsize,
args.interscan, branchcodes,
args.minseqlength)
#=======================
|
=======================================================
#Functions=====================================================================
#==============================================================================
def print_alignment_summary(infil
|
e):
'''Prints out a summary of the given alignment file'''
seq_dict = read_phylip(infile)
aln_length = len(seq_dict.values()[0])
print "%15s Alignment length: %d codons" % ('', aln_length)
total_masked_sites = 0
for species in seq_dict:
seq = seq_dict[species]
codons = get_codons(seq)
n_nongap_sites = 0
n_masked_sites = 0
for codon in codons:
if codon != '---':
n_nongap_sites += 1
if codon == 'NNN':
n_masked_sites += 1
total_masked_sites += 1
pct_masked = (float(n_masked_sites) / float(n_nongap_sites)) * 100
print ("%15s %.50s... %5d codons %4d masked (%.0f%%)" % (species, seq,
n_nongap_sites, n_masked_sites, pct_masked))
print "%15s Total masked codons: %d" % ('', total_masked_sites)
def list_folder_filtered(infolder, extension):
'''Recursivly returns files with given extension from the specified folder
and subfolders'''
try:
file_list = []
# Walk directory tree
for path, subdirs, files in os.walk(infolder):
for name in files:
if name.endswith(extension): # Check file ends with extension
phyfile = os.path.join(path, name) # Get full path
file_list.append(phyfile) # Add path to file list
# If no phylip files are found print error
if not file_list:
print "!----ERROR----!"
print "%s doesn't exist or does not contain .phy files!" % infolder
sys.exit(1)
return file_list
except KeyboardInterrupt:
sys.exit(1)
def read_branchcodes(infile):
'''Parses branch information file for
|
BradleyMoore/Snake
|
game_objects.py
|
Python
|
mit
| 3,550 | 0.006761 |
from game_state import BOX, COLS, ROWS, SCREEN
import levels
from random import randint
import pygame
from time import sleep
class Level(object):
def __init__(self):
self.size = 1
self.color = 255,120,0
self.x = 0
self.y = 0
self.layout = []
self.wall = []
def create_level(self, level):
self.wall = []
if level == 1:
self.layout = levels.one
elif level == 2:
self.layout = levels.two
elif level == 3:
self.layout = levels.three
elif level == 4:
self.layout = levels.four
elif level == 5:
self.layout = levels.five
elif level == 6:
self.layout = levels.six
for i in xrange(len(self.layout)):
for j in xrange(len(self.layout[0])):
if self.layout[i][j] == 1:
self.wall.append((j, i))
def draw(self):
for self.x, self.y in self.wall:
pygame.draw.rect(SCREEN, self.color, (self.x*BOX, self.y*BOX, self.size*BOX, self.size*BOX))
class Snake(object):
def __init__(self):
# reset values
self.init_size = 10
self.initx = COLS/2
self.inity = ROWS/2
# grow values
self.grow_to = 15
self.size = 1
# movement values
self.x = self.initx
self.y = self.inity
self.speed = 1
self.vx = 0
self.vy = -self.speed
|
self.turn('up', 'down')
# snake body values
self.body = []
self.color = 0,0,255
self.length = 1
def draw(self):
for x, y in self.body:
pygame.draw.rect(SCREEN, self.color,
|
(x*BOX, y*BOX, self.size*BOX, self.size*BOX))
def eat(self, amount, state, wall):
self.grow(amount)
state.foodleft = state.foodleft - 1
state.score_reset()
state.increase_food_count(self, wall)
def grow(self, amount):
self.grow_to = self.grow_to + amount
def move(self):
self.x = self.x + self.vx
self.y = self.y + self.vy
x, y = self.x, self.y
self.body.insert(0, (x, y))
length = len(self.body)
if length > self.grow_to:
self.body.pop()
def turn(self, turn, oturn):
# don't go back on self; would be insta-death
if turn == oturn:
pass
elif turn == 'up':
self.vx = 0
self.vy = -self.speed
elif turn == 'down':
self.vx = 0
self.vy = self.speed
elif turn == 'left':
self.vx = -self.speed
self.vy = 0
elif turn == 'right':
self.vx = self.speed
self.vy = 0
class Food(object):
def __init__(self):
self.color = 255,0,0
self.size = 1
self.grow_value = 10
self.speed_value = 1
self.eaten_counter = 0
self.x, self.y = (randint(1, COLS-2)), (randint(1, ROWS-2))
def check(self, x, y, wall):
if (x, y) in wall:
self.x, self.y = (randint(1, COLS-2)), (randint(1, ROWS-2))
self.check(self.x, self.y)
def draw(self):
pygame.draw.rect(SCREEN, self.color, (self.x*BOX, self.y*BOX, self.size*BOX, self.size*BOX))
def get_eaten(self, wall):
self.x, self.y = (randint(1, COLS-2)), (randint(1, ROWS-2))
self.check(self.x, self.y, wall)
return self.grow_value
|
yikelu/nfl_fantasy_data
|
htmls2csvs.py
|
Python
|
gpl-2.0
| 2,265 | 0.004415 |
# pylint: disable = C0301
from bs4 import BeautifulSoup
from urllib2 import urlopen
import pandas as pd
pos_idx_map = {
'qb': 2,
'rb': 3,
'wr': 4,
'te': 5,
}
def make_url(pos, wk):
ii = pos_idx_map[pos]
fstr = "http://fantasydata.com/nfl-stats/nfl-fantasy-football-stats.aspx?fs=1&stype=0&sn=1&w=%s&s=&t=0&p=%s&st=FantasyPointsPPR&d=1&ls=&live=false" \
% (wk, ii)
return fstr
def html2df(soup):
table = soup.find('table')
headers = [header.text.lowe
|
r() for header in table.find_all('th')]
rows = []
for row in table.find_all('tr'):
rows.append([val.text.encode('utf8') for val in row.find_all('td')])
rows = [rr for rr in rows if len(rr) > 0]
df = pd.DataFrame.from_records(rows)
df.columns = headers
return df
def position_html_local(posn):
dflist = []
for ii i
|
n range(1, 17):
fname = '%s%s.html' % (posn, ii)
with open(fname) as f:
df = html2df(BeautifulSoup(f))
df['wk'] = ii
df.columns = header_clean(df.columns, posn)
dflist.append(df)
return pd.concat(dflist)
def position_html(posn):
dflist = []
for ii in range(1, 17):
fname = make_url(posn, ii)
df = html2df(BeautifulSoup(urlopen(fname)))
df['wk'] = ii
df.columns = header_clean(df.columns, posn)
dflist.append(df)
return pd.concat(dflist)
pos_header_suffixes = {
'qb': ['_pass', '_rush'],
'rb': ['_rush', '_recv'],
'wr': ['_recv'],
'te': ['_recv'],
}
exclude_cols = ['rk', 'player', 'team', 'pos', 'fantasy points',
'wk', 'fum', 'lost', 'qb rating']
def header_clean(header, posn):
res = []
if posn in pos_header_suffixes:
suffixes = pos_header_suffixes[posn]
seen_dict = {hh: 0 for hh in header}
for hh in header:
if not hh in exclude_cols:
hres = hh + suffixes[seen_dict[hh]]
seen_dict[hh] += 1
res.append(hres)
else:
res.append(hh)
else:
res = header
return res
if __name__ == '__main__':
data_all = {}
for pp in ['qb', 'wr', 'rb', 'te']:
data_all[pp] = position_html_local(pp)
data_all[pp].to_pickle('%s.pkl' % pp)
|
workflo/dxf2gcode
|
source/Gui/myCanvasClass.py
|
Python
|
gpl-3.0
| 26,794 | 0.014109 |
# -*- coding: utf-8 -*-
"""
Special purpose canvas including all required plotting function etc.
@newfield purpose: Purpose
@newfield sideeffect: Side effect, Side effects
@purpose: Plotting all
@author: Christian Kohl�ffel
@since: 22.04.2011
@license: GPL
"""
from copy import copy
from PyQt4 import QtCore, QtGui
from Core.Point import Point
from Core.Shape import ShapeClass
from Gui.WpZero import WpZero
from Gui.Arrow import Arrow
from Gui.StMove import StMove
from Gui.RouteText import RouteText
#import math
import Core.Globals as g
import Core.constants as c
import logging
logger=logging.getLogger("DxfImport.myCanvasClass")
class MyGraphicsView(QtGui.QGraphicsView):
"""
This is the used Canvas to print the graphical interface of dxf2gcode.
All GUI things should be performed in the View and plotting functions in
the scene
@sideeffect: None
"""
def __init__(self, parent = None):
"""
Initialisation of the View Object. This is called by the gui created
with the QTDesigner.
@param parent: Main is passed as a pointer for reference.
"""
super(MyGraphicsView, self).__init__(parent)
self.currentItem=None
self.setTransformationAnchor(QtGui.QGraphicsView.AnchorUnderMouse)
self.setResizeAnchor(QtGui.QGraphicsView.AnchorViewCenter)
#self.setDragMode(QtGui.QGraphicsView.RubberBandDrag )
self.setDragMode(QtGui.QGraphicsView.NoDrag)
self.parent=parent
self.mppos=None
self.selmode=0
self.rubberBand = QtGui.QRubberBand(QtGui.QRubberBand.Rectangle, self)
def tr(self,string_to_translate):
"""
Translate a string using the QCoreApplication translation framework
@param: string_to_translate: a unicode string
@return: the translated unicode string if it was possible to translate
"""
return unicode(QtGui.QApplication.translate("MyGraphicsView",
string_to_translate,
None,
QtGui.QApplication.UnicodeUTF8))
def contextMenuEvent(self, event):
"""
Create the contextmenu.
@purpose: Links the new Class of ContextMenu to Graphicsview.
"""
menu=MyDropDownMenu(self,self.scene(),event.pos())
def keyPressEvent(self,event):
"""
Rewritten KeyPressEvent to get other behavior while Shift is pressed.
@purpose: Changes to ScrollHandDrag while Control pressed
@param event: Event Parameters passed to function
"""
if (event.key()==QtCore.Qt.Key_Shift):
self.setDragMode(QtGui.QGraphicsView.ScrollHandDrag)
elif (event.key()==QtCore.Qt.Key_Control):
self.selmode=1
else:
pass
super(MyGraphicsView, self).keyPressEvent(event)
def keyReleaseEvent (self,event):
"""
Rewritten KeyReleaseEvent to get other behavior while Shift is pressed.
@purpose: Changes to RubberBandDrag while Control released
@param event: Event Parameters passed to function
"""
if (event.key()==QtCore.Qt.Key_Shift):
self.setDragMode(QtGui.QGraphicsView.NoDrag)
#self.setDragMode(QtGui.QGraphicsView.RubberBandDrag )
elif (event.key()==QtCore.Qt.Key_Control):
self.selmode=0
else:
pass
super(MyGraphicsView, self).keyPressEvent(event)
def wheelEvent(self,event):
"""
With Mouse Wheel the object is scaled
@purpose: Scale by mouse wheel
@param event: Event Parameters passed to function
"""
scale=(1000+event.delta())/1000.0
self.scale(scale,scale)
def mousePressEvent(self, event):
"""
Right Mouse click shall have no function, Therefore pass only left
click event
@purpose: Change inherited mousePressEvent
@param event: Event Parameters passed to function
"""
|
if(self.dragMode())==1:
super(MyGraphicsView, self).mousePressEvent(event)
elif event.button() == QtCore.Qt.LeftButton:
self.mppos=ev
|
ent.pos()
else:
pass
def mouseReleaseEvent(self, event):
"""
Right Mouse click shall have no function, Therefore pass only left
click event
@purpose: Change inherited mousePressEvent
@param event: Event Parameters passed to function
"""
delta=2
if(self.dragMode())==1:
#if (event.key()==QtCore.Qt.Key_Shift):
#self.setDragMode(QtGui.QGraphicsView.NoDrag)
super(MyGraphicsView, self).mouseReleaseEvent(event)
#Selection only enabled for left Button
elif event.button() == QtCore.Qt.LeftButton:
#If the mouse button is pressed without movement of rubberband
if self.rubberBand.isHidden():
rect=QtCore.QRect(event.pos().x()-delta,event.pos().y()-delta,
2*delta,2*delta)
#logger.debug(rect)
self.currentItems=self.items(rect)
# it=self.itemAt(event.pos())
# if it==None:
# self.currentItems=[]
# else:
# self.currentItems=[it]
#self.currentItems=[self.itemAt(event.pos())]
#logger.debug("No Rubberband")
#If movement is bigger and rubberband is enabled
else:
rect=self.rubberBand.geometry()
#All items in the selection mode=QtCore.Qt.ContainsItemShape
self.currentItems=self.items(rect)
self.rubberBand.hide()
#logger.debug("Rubberband Selection")
#All items in the selection
#self.currentItems=self.items(rect)
#print self.currentItems
scene=self.scene()
#logger.debug(rect)
if self.selmode==0:
for item in scene.selectedItems():
# item.starrow.setSelected(False)
# item.stmove.setSelected(False)
# item.enarrow.setSelected(False)
item.setSelected(False)
for item in self.currentItems:
if item.isSelected():
item.setSelected(False)
else:
#print (item.flags())
item.setSelected(True)
else:
pass
self.mppos=None
#super(MyGraphicsView, self).mouseReleaseEvent(event)
def mouseMoveEvent(self, event):
"""
MouseMoveEvent of the Graphiscview. May also be used for the Statusbar.
@purpose: Get the MouseMoveEvent and use it for the Rubberband Selection
@param event: Event Parameters passed to function
"""
if not(self.mppos is None):
Point = event.pos() - self.mppos
if (Point.manhattanLength() > 3):
#print 'the mouse has moved more than 3 pixels since the oldPosition'
#print "Mouse Pointer is currently hovering at: ", event.pos()
self.rubberBand.show()
self.rubberBand.setGeometry(QtCore.QRect(self.mppos, event.pos()).normalized())
scpoint=self.mapToScene(event.pos())
self.setStatusTip('X: %3.1f; Y: %3.1f' %(scpoint.x(),-scpoint.y()))
self.setToolTip('X: %3.1f; Y: %3.1f' %(scpoint.x(),-scpoint.y()))
super(MyGraphicsView, self).mouseMoveEvent(event)
|
concurrence/concurrence
|
test/testtimer.py
|
Python
|
bsd-3-clause
| 2,659 | 0.015795 |
from __future__ import with_statement
import time
from concurrence import unittest, Tasklet, Channel, TimeoutError
from concurrence.timer import Timeout
class TimerTest(unittest.TestCase):
def testPushPop(self):
self.assertEquals(-1, Timeout.current())
Timeout.push(30)
self.assertAlmostEqual(30, Timeout.current(), places = 1)
Time
|
out.pop()
self.assertEquals(-1, Timeout.current())
Timeout.push(30)
self.assertAlmostEqual(30, Timeout.current(), places = 1)
Tasklet.sleep(1.0)
self.assertAlmostEqual(29, Timeout.current(), places = 1)
#push a temporary short timeout
Timeout.push(5)
self.assertAlmostEqual(5, Timeout.current(
|
), places = 1)
Timeout.pop()
self.assertAlmostEqual(29, Timeout.current(), places = 1)
#try to push a new longer timeout than the parent timeout
#this should fail, e.g. it will keep the parent timeout
Timeout.push(60)
self.assertAlmostEqual(29, Timeout.current(), places = 1)
Timeout.pop()
self.assertAlmostEqual(29, Timeout.current(), places = 1)
Timeout.pop()
self.assertEquals(-1, Timeout.current())
def testPushPop2(self):
self.assertEquals(-1, Timeout.current())
Timeout.push(-1)
self.assertEquals(-1, Timeout.current())
Timeout.pop()
self.assertEquals(-1, Timeout.current())
Timeout.push(10)
self.assertAlmostEqual(10, Timeout.current(), places = 1)
Timeout.push(5)
self.assertAlmostEqual(5, Timeout.current(), places = 1)
Timeout.pop()
self.assertAlmostEqual(10, Timeout.current(), places = 1)
Timeout.pop()
self.assertEquals(-1, Timeout.current())
def testTimer(self):
ch = Channel()
def sender(times):
for i in range(times):
Tasklet.sleep(1.0)
ch.send(True)
with Timeout.push(10):
Tasklet.new(sender)(4)
for i in range(4):
ch.receive(Timeout.current())
start = time.time()
try:
with Timeout.push(2.5):
Tasklet.new(sender)(4)
for i in range(4):
ch.receive(Timeout.current())
self.fail('expected timeout')
except TimeoutError, e:
end = time.time()
self.assertAlmostEqual(2.5, end - start, places = 1)
if __name__ == '__main__':
unittest.main(timeout = 10)
|
proyectos-analizo-info/pyanalizo
|
src/app-ape/deprecated/get-propuestas-electorales-v5.py
|
Python
|
gpl-3.0
| 2,267 | 0.014116 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License version 3 as published by
the Free Software Foundation.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/gpl-3.0.txt>.
'''
import sys
sys.path.append('../shared')
import functions as f
import ape, operator
def main():
d_output = ape.format_results('results.json')
crlf = '\r\n'
output = []
s = '======================================='
for item in sorted(d_output.iteritems(), key = operator.itemgetter(0)):
d_item = item[1]
f.append(output, s + crlf + 'Propuestas tarea - ' + item[0] + (' (' + d_item['task_id'] + ')') + crlf + s)
f.append(output, d_item['breadcrumbs'])
f.append(output, d_item['pages'] + crlf + '------------------')
answers = d_item['answers']
for answer in answers:
answ = answer
if 'desconocido' in answer:
answer = answer.split('_')
answer = answer[0] + ' (' + answer[1] + ')'
else:
answer = '(' + str(answer) + ')'
f.append(output, 'Propuestas analis
|
ta ' + answer + crlf + '---------------------------------------')
|
f.append(output, 'Hora de inicio: ' + f.formatTime(answers[answ]['answer_end_date']) + crlf + 'Hora de fin: ' + f.formatTime(answers[answ]['answer_start_date']))
for item in answers[answ]['answer'].split('\n'):
if item.replace(' ', '') != '':
f.append(output, item + crlf + '----------')
f.write_file('propuestas.txt', str(crlf * 2).join(output))
if __name__ == '__main__':
main()
|
dmalatesta/spaceterm
|
entities/star.py
|
Python
|
gpl-3.0
| 729 | 0 |
from entities.base import Base
import game.constants
class Star(Base):
"""The Star objects represents a star (or a blackhole!)"""
symbol = 'O'
decoration = game.constants.STYLE_BOLD
color = game.constants.COLOR_WK
isCollider = True
def setClass(self, cl):
"""
Sets the star class, or spectral type.
Uses the Harvard spectral clssification only for simplicity,
but it could be extended to include also Yerkes.
See: http://en.wikipedia.org/wiki/Stellar_classification
TODO Drastically reduct chance of generating a blackhole!
"""
self.classification = cl #
|
TODO:
|
do some validation?
self.color = game.constants.STAR_COLORS[cl]
|
systemsoverload/codeeval
|
easy/132_major_element/solution.py
|
Python
|
mit
| 359 | 0.008357 |
import sys
def
|
solve(l):
numbers = l.split(',')
row_hash = {}
for num in numbers:
count = row_hash.setdefault(num, 0)
row_hash[num] = count + 1
if row_hash[num] > len(numbers) / 2:
return num
return None
with open(sys.argv[1], 'r') as f:
for
|
line in f.readlines():
print solve(line.strip())
|
qedsoftware/commcare-hq
|
corehq/apps/app_manager/tests/test_profile.py
|
Python
|
bsd-3-clause
| 4,112 | 0.001217 |
# coding: utf-8
from django.test import SimpleTestCase
from corehq.apps.app_manager.commcare_settings import get_commcare_settings_lookup, get_custom_commcare_settings
from corehq.apps.app_manager.models import Application
from corehq.apps.app_manager.tests.util import TestXmlMixin
import xml.etree.ElementTree as ET
from corehq.apps.builds.models import BuildSpec
from corehq import toggles
from toggle.shortcuts import update_toggle_cache, clear_toggle_cache
class ProfileTest(SimpleTestCase, TestXmlMixin):
file_path = ('data',)
def setUp(self):
self.app = Application(build_spec=BuildSpec(
version='2.7.0'
),
name=u"TÉST ÁPP",
domain="potter",
langs=['en']
)
update_toggle_cache(toggles.CUSTOM_PROPERTIES.slug, 'potter', True, toggles.NAMESPACE_DOMAIN)
def tearDown(self):
clear_toggle_cache(toggles.CUSTOM_PROPERTIES.slug, 'potter', toggles.NAMESPACE_DOMAIN)
def _test_profile(self, app):
profile = app.create_profile()
assert isinstance(profile, bytes), type(profile)
assert u"TÉST ÁPP" in profile.decode('utf-8')
profile_xml = ET.fromstring(profile)
types = {
'features': self._test_feature,
'properties': self._test_property,
}
for p_type, test_method in types.items():
for key, value in app.profile.get(p_type, {}).items():
setting = get_commcare_settings_lookup()[p_type][key]
test_method(profile_xml, key, value, setting)
def _get_node(self, profile, key, xpath_template):
xpath = xpath_template.format(key)
node = profile.find(xpath)
self.assertIsNotNone(node, 'Node not found: {}.'.format(xpath))
return node
def _test_feature(self, profile, key, value, setting):
node = self._get_node(profile, key, './features/{}')
self.assertEqual(node.get('active'), value, 'Feature "{}"'.format(key))
def _test_property(self, profile, key, value, setting):
node = self._get_node(profile, key, "./property[@key='{}']")
self.assertEqual(node.get('value'), value, 'Property "{}"'.format(key))
force = setting.get('force', False)
force_actual = node.get('force')
if not force:
self.assertIn(
force_actual,
[None, 'false'],
'"force" incorrect for property "{}"'.format(key)
)
else:
self.assertEqual(
force_actual,
'true',
'"force" incorrect for property "{}"'.format(key)
)
def _test_custom_property(self, profile, key, value):
node = self._get_node(profile, key, "./property[@key='{}']")
self.assertEqual(node.get('value'), value, 'Property "{}"'.format(key))
force_actual = node.get('force')
self.assertEqual(
force_actual,
'true',
'"force" should always be true for custom properties"{}"'.format(key)
)
def test_profile_properties(self):
for setting in get_custom_commcare_setti
|
ngs():
if se
|
tting['id'] == 'users':
continue
for value in setting.get('values', []):
self.app.profile = {
setting['type']: {
setting['id']: value
}
}
self._test_profile(self.app)
# custom properties do not rely on SETTINGS so need to be tested separately
self.app.profile = {
'custom_properties': {
'random': 'value'
}
}
profile = self.app.create_profile()
self._test_profile(self.app)
self._test_custom_property(ET.fromstring(profile), 'random', 'value')
def test_version(self):
profile_xml = ET.fromstring(self.app.create_profile())
root = profile_xml.find('.')
self.assertEqual(root.get('requiredMajor'), '2')
self.assertEqual(root.get('requiredMinor'), '7')
|
fairbird/OpenPLI-TSimage
|
lib/python/Screens/StreamingClientsInfo.py
|
Python
|
gpl-2.0
| 4,194 | 0.030281 |
from Screen import Screen
from Screens.MessageBox import MessageBox
from Components.MenuList import MenuList
from Components.ActionMap import ActionMap
from Components.Sources.StreamService import StreamServiceList
from Components.Sources.StaticText import StaticText
from Components.Label import Label
from enigma import eStreamServer
from ServiceReference import ServiceReference
import socket
try:
from Plugins.Extensions.OpenWebif.controllers.stream import streamList
except:
streamList = []
class StreamingClientsInfo(Screen):
def __init__(self, session):
Screen.__init__(self, session)
self.streamServer = eStreamServer.getInstance()
self.clients = []
self["menu"] = MenuList(self.clients)
self["ke
|
y_red"] = StaticText(_("Close"))
self["key_green"] = StaticText("")
self["key_yellow"] = StaticText("")
self["info"] = Label()
self.updateClients()
self["actions"] = ActionMap(["ColorActions", "SetupActions"],
{
"canc
|
el": self.close,
"ok": self.stopCurrentStream,
"red": self.close,
"green": self.stopAllStreams,
"yellow": self.stopCurrentStream
})
def updateClients(self):
self["key_green"].setText("")
self["key_yellow"].setText("")
self.setTitle(_("Streaming clients info"))
self.clients = []
if self.streamServer:
for x in self.streamServer.getConnectedClients():
service_name = ServiceReference(x[1]).getServiceName() or "(unknown service)"
ip = x[0]
if int(x[2]) == 0:
strtype = "S"
else:
strtype = "T"
try:
raw = socket.gethostbyaddr(ip)
ip = raw[0]
except:
pass
info = ("%s %-8s %s") % (strtype, ip, service_name)
self.clients.append((info, (x[0], x[1])))
if StreamServiceList and streamList:
for x in StreamServiceList:
ip = "ip n/a"
service_name = "(unknown service)"
for stream in streamList:
if hasattr(stream, 'getService') and stream.getService() and stream.getService().__deref__() == x:
service_name = ServiceReference(stream.ref.toString()).getServiceName()
ip = stream.clientIP or ip
info = ("T %s %s %s") % (ip, service_name, _("(VU+ type)"))
self.clients.append((info,(-1, x)))
self["menu"].setList(self.clients)
if self.clients:
self["info"].setText("")
self["key_green"].setText(_("Stop all streams"))
self["key_yellow"].setText(_("Stop current stream"))
else:
self["info"].setText(_("No stream clients"))
def stopCurrentStream(self):
self.updateClients()
if self.clients:
client = self["menu"].l.getCurrentSelection()
if client:
self.session.openWithCallback(self.stopCurrentStreamCallback, MessageBox, client[0] +" \n\n" + _("Stop current stream") + "?", MessageBox.TYPE_YESNO)
def stopCurrentStreamCallback(self, answer):
if answer:
client = self["menu"].l.getCurrentSelection()
if client:
if client[1][0] != -1:
if self.streamServer:
for x in self.streamServer.getConnectedClients():
if client[1][0] == x[0] and client[1][1] == x[1]:
if not self.streamServer.stopStreamClient(client[1][0], client[1][1]):
self.session.open(MessageBox, client[0] +" \n\n" + _("Error stop stream!"), MessageBox.TYPE_WARNING)
elif StreamServiceList and streamList:
self.session.open(MessageBox, client[0] +" \n\n" + _("Not yet implemented!"), MessageBox.TYPE_WARNING)
# TODO
#for x in streamList[:]:
# if hasattr(x, 'getService') and x.getService() and x.getService().__deref__() == client[1][1]:
# x.execEnd()
# if x in streamList:
# streamList.remove(x)
self.updateClients()
def stopAllStreams(self):
self.updateClients()
if self.clients:
self.session.openWithCallback(self.stopAllStreamsCallback, MessageBox, _("Stop all streams") + "?", MessageBox.TYPE_YESNO)
def stopAllStreamsCallback(self, answer):
if answer:
if self.streamServer:
for x in self.streamServer.getConnectedClients():
self.streamServer.stopStream()
# TODO
#if StreamServiceList and streamList:
# for x in streamList[:]:
# if hasattr(x, 'execEnd'):
# x.execEnd()
# if x in streamList:
# streamList.remove(x)
self.updateClients()
if not self.clients:
self.close()
|
axiak/py-rangeset
|
rangeset/__init__.py
|
Python
|
mit
| 11,032 | 0.002085 |
"""
This module provides a RangeSet data structure. A range set is, as the
name implies, a set of ranges. Intuitively, you could think about a
range set as a subset of the real number line, with arbitrary gaps.
Some examples of range sets on the real number line:
1. -infinity to +infinity
2. -1 to 1
3. 1 to 4, 10 to 20
4. -infinity to 0, 10 to 20
5. (the empty set)
The code lives on github at: https://github.com/axiak/py-rangeset.
Overview
-------------
.. toctree::
:maxdepth: 2
The rangeset implementation offers immutable objects that represent the range
sets as described above. The operations are largely similar to the
`set object <http://docs.python.org/library/stdtypes.html#set>`_ with the
obvious exception that mutating methods such as ``.add`` and ``.remove``
are not available. The main object is the ``RangeSet`` object.
"""
import blist
import operator
import functools
import collection
|
s
__version__ = (0, 0, 11)
__all__ = ('INFINITY', 'NEGATIVE_INFINITY',
'RangeSet')
class _Indeterminate(object):
def timetuple(self):
return ()
def __eq__(self, other):
return other is self
class _Infinity(_Indeterminate):
def __lt__(self, other):
return False
def __gt__(self, other):
return True
def __str
|
__(self):
return 'inf'
__repr__ = __str__
class _NegativeInfinity(_Indeterminate):
def __lt__(self, other):
return True
def __gt__(self, other):
return False
def __str__(self):
return '-inf'
__repr__ = __str__
INFINITY = _Infinity()
NEGATIVE_INFINITY = _NegativeInfinity()
class RangeSet(object):
__slots__ = ['ends']
def __init__(self, start, end):
if isinstance(end, _RawEnd):
ends = start
else:
if isinstance(start, _Indeterminate) and isinstance(end, _Indeterminate) and \
start == end:
raise LogicError("A range cannot consist of a single end the line.")
if start > end:
start, end = end, start
ends = blist.sortedlist([(start, _START), (end, _END)])
object.__setattr__(self, "ends", ends)
def __getinitargs__(self):
return (self.ends, _RAW_ENDS)
def __merged_ends(self, *others):
sorted_ends = blist.sortedlist(self.ends)
for other in others:
for end in RangeSet.__coerce(other).ends:
sorted_ends.add(end)
return sorted_ends
def __merged_ends_inplace(self, *others):
sorted_ends = self.ends
for other in others:
for end in RangeSet.__coerce(other).ends:
sorted_ends.add(end)
return sorted_ends
@classmethod
def __coerce(cls, value):
if isinstance(value, RangeSet):
return value
elif isinstance(value, tuple) and len(value) == 2:
return cls(value[0], value[1])
else:
return cls.mutual_union(*[(x, x) for x in value])
@classmethod
def __iterate_state(cls, ends):
state = 0
for _, end in ends:
if end == _START:
state += 1
else:
state -= 1
yield _, end, state
def __or__(self, *other):
sorted_ends = self.__merged_ends(*other)
new_ends = []
for _, end, state in RangeSet.__iterate_state(sorted_ends):
if state > 1 and end == _START:
continue
elif state > 0 and end == _END:
continue
new_ends.append((_, end))
return RangeSet(blist.sortedlist(new_ends), _RAW_ENDS)
def __ior__(self, *other):
sorted_ends = self.__merged_ends(self, *other)
new_ends = []
for _, end, state in RangeSet.__iterate_state(sorted_ends):
if state > 1 and end == _START:
continue
elif state > 0 and end == _END:
continue
new_ends.append((_, end))
return RangeSet(new_ends, _RAW_ENDS)
union = __or__
def __setattr__(self, name, value):
raise AttributeError("This class does not support setting values.")
def __iand__(self, *other, **kwargs):
min_overlap = kwargs.pop('minimum', 2)
if kwargs:
raise ValueError("kwargs is not empty: {0}".format(kwargs))
sorted_ends = self.__merged_ends_inplace(*other)
new_ends = []
for _, end, state in RangeSet.__iterate_state(sorted_ends):
if state == min_overlap and end == _START:
new_ends.append((_, end))
elif state == (min_overlap - 1) and end == _END:
new_ends.append((_, end))
return RangeSet(blist.sortedlist(new_ends), _RAW_ENDS)
def __and__(self, *other, **kwargs):
min_overlap = kwargs.pop('minimum', 2)
if kwargs:
raise ValueError("kwargs is not empty: {0}".format(kwargs))
sorted_ends = self.__merged_ends(*other)
new_ends = []
for _, end, state in RangeSet.__iterate_state(sorted_ends):
if state == min_overlap and end == _START:
new_ends.append((_, end))
elif state == (min_overlap - 1) and end == _END:
new_ends.append((_, end))
return RangeSet(blist.sortedlist(new_ends), _RAW_ENDS)
intersect = __and__
def __ror__(self, other):
return self.__or__(other)
def __rand__(self, other):
return self.__and__(other)
def __rxor__(self, other):
return self.__xor__(other)
def __xor__(self, *other):
sorted_ends = self.__merged_ends(*other)
new_ends = []
old_val = None
for _, end, state in RangeSet.__iterate_state(sorted_ends):
if state == 2 and end == _START:
new_ends.append((_, _NEGATE[end]))
elif state == 1 and end == _END:
new_ends.append((_, _NEGATE[end]))
elif state == 1 and end == _START:
new_ends.append((_, end))
elif state == 0 and end == _END:
new_ends.append((_, end))
return RangeSet(blist.sortedlist(new_ends), _RAW_ENDS)
symmetric_difference = __xor__
def __contains__(self, test):
last_val, last_end = None, None
if not self.ends:
return False
if isinstance(test, _Indeterminate):
return False
for _, end, state in RangeSet.__iterate_state(self.ends):
if _ == test:
return True
elif last_val is not None and _ > test:
return last_end == _START
elif _ > test:
return False
last_val, last_end = _, end
return self.ends[-1][0] == test
def issuperset(self, test):
if isinstance(test, RangeSet):
rangeset = test
else:
rangeset = RangeSet.__coerce(test)
difference = rangeset - ~self
return difference == rangeset
__ge__ = issuperset
def __gt__(self, other):
return self != other and self >= other
def issubset(self, other):
return RangeSet.__coerce(other).issuperset(self)
__le__ = issubset
def __lt__(self, other):
return self != other and self <= other
def isdisjoint(self, other):
return not bool(self & other)
def __nonzero__(self):
return bool(self.ends)
__bool__ = __nonzero__
def __invert__(self):
if not self.ends:
new_ends = ((NEGATIVE_INFINITY, _START),
(INFINITY, _END))
return RangeSet(new_ends, _RAW_ENDS)
new_ends = blist.sortedlist(self.ends)
head, tail = [], []
if new_ends[0][0] == NEGATIVE_INFINITY:
new_ends.pop(0)
else:
head = [(NEGATIVE_INFINITY, _START)]
if new_ends[-1][0] == INFINITY:
new_ends.pop(-1)
else:
tail = [(INFINITY, _END)]
new_ends = blist.sortedlist((value[0], _NEGATE[value[1]])
for value in new_ends)
new_ends.update(head)
new_ends.update
|
estaban/pyload
|
module/plugins/accounts/StahnuTo.py
|
Python
|
gpl-3.0
| 1,610 | 0.001863 |
# -*- coding: utf-8 -*-
"""
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License,
or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, see <http://www.gnu.org/licenses/>.
"""
import re
from module.plugins.Account import Account
from module.utils import parseFileSize
class StahnuTo(Account):
__name__ = "StahnuTo"
__version__ = "0.02"
__type__ = "account"
__description__ = """StahnuTo accoun
|
t plugin"""
__author_name__ = "zoidberg"
__author_mail__ = "zoidberg@mujmail.cz"
def loadAccountInfo(self, user,
|
req):
html = req.load("http://www.stahnu.to/")
m = re.search(r'>VIP: (\d+.*)<', html)
trafficleft = parseFileSize(m.group(1)) * 1024 if m else 0
return {"premium": trafficleft > (512 * 1024), "trafficleft": trafficleft, "validuntil": -1}
def login(self, user, data, req):
html = req.load("http://www.stahnu.to/login.php", post={
"username": user,
"password": data['password'],
"submit": "Login"})
if not '<a href="logout.php">' in html:
self.wrongPassword()
|
clever-crow-consulting/otm-core
|
opentreemap/treemap/lib/udf.py
|
Python
|
agpl-3.0
| 2,743 | 0 |
import json
from django.db import transaction
from django.core.exceptions import ValidationError
from django.utils.translation import ugettext as _
from treemap.audit import Role, FieldPermission
from treemap.udf import (UserDefinedFieldDefinition)
def udf_exists(params, instance):
"""
A little helper function to enable searching for a udf using
the same syntax as udf_create.
Unfortunately, udf_create is designed to read a dict of data
that is quite different than the syntax you'd use for querying
the UserDefinedFieldDefinition model directly.
To make a more consistent API for the common case of first
checking if a UDF exists using a data dict, and if not,
creating it using the same API, this function exists.
"""
data = _parse_params(params)
udfs = UserDefinedFieldDefinition.objects.filter(
instance=instance,
model_type=data['model_type'],
name=data['name'])
return udfs.exists()
@transaction.atomic
def udf_create(params, instance):
data = _parse_params(params)
name, model_type, datatype = (data['name'], data['model_type'],
data['datatype'])
udfs = UserDefinedFieldDefinition.objects.filter(
instance=instance,
model_type=model_type,
name=name)
if udfs.exists():
raise ValidationError(
{'udf.name':
[_("A user defined field with name "
"'%(udf_name)s' already exists") % {'udf_name': name}]})
if model_type not in {cls.__name__ for cls
in instance.editable_udf_models()}:
raise ValidationError(
{'udf.model': [_('Invalid model')]})
udf = UserDefinedFieldDefinition(
name=name,
model_type=model_type,
iscollection=False,
instance=instance,
datatype=datatype)
udf.save()
field_name = udf.canonical_name
# Add a restrictive permission for this UDF to all roles in the
# instance
for role in Role.objects.filter(instance=instance):
Fi
|
eldPermission.
|
objects.get_or_create(
model_name=model_type,
field_name=field_name,
permission_level=FieldPermission.NONE,
role=role,
instance=role.instance)
return udf
def _parse_params(params):
name = params.get('udf.name', None)
model_type = params.get('udf.model', None)
udf_type = params.get('udf.type', None)
datatype = {'type': udf_type}
if udf_type in ('choice', 'multichoice'):
datatype['choices'] = params.get('udf.choices', None)
datatype = json.dumps(datatype)
return {'name': name, 'model_type': model_type,
'datatype': datatype}
|
MaxTyutyunnikov/lino
|
obsolete/docs/examples/sprl2.py
|
Python
|
gpl-3.0
| 1,300 | 0.011538 |
from lino.apps.ledger.ledger_tables import LedgerSchema
from lino.reports import Report
from lino.adamo.rowattrs import Field, Pointer, Detail
class SchemaOverview(Report):
def __init__(self,schema):
self.schema=schema
Report.__init__(self)
def getIterator(self):
return self.schema.getTableList()
def setupReport(self):
self.addVurtColumn(
label="TableName",
meth=lambda row: row.item.getTableName(),
width=15)
self.addVurtColumn(
label="Fields",
meth=lambda row:\
", ".join([fld.name for fld in row.item.getFields()
if isinstance(fld,Field) \
and not isinstance(fld,Pointer)]),
width=20)
self.addVurtColumn(
label="Pointers",
meth=lambda row:\
"
|
, ".join([fld.name for fld in row.item.getFields()
if isinstance(fld,Pointer)]),
|
width=15)
self.addVurtColumn(
label="Details",
meth=lambda row:\
", ".join([fld.name for fld in row.item.getFields()
if isinstance(fld,Detail)]),
width=25)
sch=LedgerSchema()
rpt=SchemaOverview(sch)
rpt.show()
|
lavjain/incubator-hawq
|
tools/bin/gppylib/programs/verify.py
|
Python
|
apache-2.0
| 11,749 | 0.010043 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
'''
Created on May 9, 2011
'''
from gppylib.mainUtils import addStandardLoggingAndHelpOptions, ProgramArgumentValidationException
import os
from datetime import datetime
from optparse import OptionGroup
from gppylib import gplog
from gppylib import userinput
from gppylib.commands.gp import get_masterdatadir
from gppylib.gpparseopts import OptParser, OptChecker
from gppylib.mainUtils import UserAbortedException
from gppylib.operations import Operation
from gppylib.operations.verify import AbortVerification, CleanVerification, CleanDoneVerifications, FinalizeVerification, FinalizeAllVerifications, ResumeVerification, SuspendVerification, VerificationType, VerifyFilerep, VerificationState
logger = gplog.get_default_logger()
MAX_BATCH_SIZE=128
class GpVerifyProgram(Operation):
def __init__(self, options, args):
count = sum([1 for opt in ['full', 'verify_file', 'verify_dir', 'clean', 'results', 'abort', 'suspend', 'resume']
if options.__dict__[opt] is not None])
if count == 0:
raise ProgramArgumentValidationException("""Must specify one verify request type (--full, --file, or --directorytree)
or action (--clean, --results, --abort, --suspend, or --resume)""")
elif count > 1:
raise ProgramArgumentValidationException("""Only specify one verify request type (--full, --file, or --directorytree)
or action (--clean, --results, --abort, --suspend, or --resume)""")
self.full = options.full
self.verify_file = options.verify_file
self.verify_dir = options.verify_dir
self.results = options.results
self.clean = options.clean
self.abort = options.abort
self.suspend = options.suspend
self.resume = options.resume
count = sum([1 for opt in ['full', 'verify_file', 'verify_dir']
if options.__dict__[opt] is not None])
if options.ignore_file is not None and count != 1:
raise ProgramArgumentValidationException("--fileignore must be used with --full, --file, or --directorytree")
if options.ignore_dir is not None and count != 1:
raise ProgramArgumentValidationException("--dirignore must be used with --full, --file, or --directorytree")
if options.results_level is not None and count != 1:
raise ProgramArgumentValidationException("--resultslevel must be used with --full, --file, or --directorytree")
self.ignore_file = options.ignore_file
self.ignore_dir = options.ignore_dir
self.results_level = options.results_level
if options.parallel < 1 or options.parallel > MAX_BATCH_SIZE:
raise ProgramArgumentValidationException('Parallelism value must be between 1 and %d' % MAX_BATCH_SIZE)
self.batch_default = options.parallel
self.content = None
if options.content is not None:
if self.clean or self.results or self.abort or self.suspend or self.resume:
raise ProgramArgumentValidationException("--content can only be used when spooling new verifications, with --full, --file, or --directorytree.")
self.content = int(options.content)
if options.token is None:
if self.abort or self.suspend or self.resume:
raise ProgramArgumentValidationException("If --abort, --suspend, or --resume is provided, --token must be given.")
self.token = options.token
def execute(self):
"""
Based on options, sends different verifi
|
cation messages to the backend.
"""
if self.results:
self._do_results(self.token, self.batch_default)
elif self.clean:
self._do_clean(self.token, self.batch_default)
elif self.abort:
AbortVerification(token = self.token,
batch_default = self.batch_default).run()
elif self.resume:
ResumeVerification(token = self.token,
|
batch_default = self.batch_default).run()
elif self.suspend:
SuspendVerification(token = self.token,
batch_default = self.batch_default).run()
else:
if self.token is None:
self.token = datetime.now().strftime("%Y%m%d%H%M%S")
VerifyFilerep(content = self.content,
full = self.full,
verify_file = self.verify_file,
verify_dir = self.verify_dir,
token = self.token,
ignore_dir = self.ignore_dir,
ignore_file = self.ignore_file,
results_level = self.results_level,
batch_default = self.batch_default).run()
def _do_results(self, token, batch_default):
if self.token is not None:
entry = FinalizeVerification(token = token,
batch_default = batch_default).run()
entries = [entry]
else:
entries = FinalizeAllVerifications(batch_default).run()
master_datadir = get_masterdatadir()
for entry in entries:
logger.info('---------------------------')
logger.info('Token: %s' % entry['vertoken'])
logger.info('Type: %s' % VerificationType.lookup[entry['vertype']])
logger.info('Content: %s' % (entry['vercontent'] if entry['vercontent'] > 0 else "ALL"))
logger.info('Started: %s' % entry['verstarttime'])
logger.info('State: %s' % VerificationState.lookup[entry['verstate']])
if entry['verdone']:
path = os.path.join(master_datadir, 'pg_verify', entry['vertoken'])
logger.info('Details: %s' % path)
def _do_clean(self, token, batch_default):
if self.token is not None:
CleanVerification(token = self.token,
batch_default = self.batch_default).run()
else:
if not userinput.ask_yesno(None, "\nAre you sure you want to remove all completed verification artifacts across the cluster?", 'N'):
raise UserAbortedException()
CleanDoneVerifications(batch_default = self.batch_default).run()
def cleanup(self): pass
@staticmethod
def createParser():
"""
Creates the command line options parser object for gpverify.
"""
description = ("Initiates primary/mirror verification.")
help = []
parser = OptParser(option_class=OptChecker,
description=' '.join(description.split()),
version='%prog version $Revision: #1 $')
parser.setHelp(help)
addStandardLoggingAndHelpOptions(parser, True)
addTo = OptionGroup(parser, "Request Type")
parser.add_option_group(addTo)
addTo.add_option('--full', dest='full', action='store_true',
help='Perform a full verification pass. Use --token option to ' \
'give the verification pass an identifier.')
addTo.add_option('--file', dest='verify_file', metavar='<file>',
|
rtucker-mozilla/WhistlePig
|
vendor-local/lib/python/tastypie/admin.py
|
Python
|
bsd-3-clause
| 582 | 0.001718 |
from django.conf import settings
from django.contrib import admin
if 'django.contrib.auth' in setting
|
s.INSTALLED_APPS:
from tastypie.models import ApiKey
class ApiKeyInline(admin.StackedInline):
model = ApiKey
extra = 0
ABSTRACT_APIKEY = getattr(settings, 'TASTYPIE_ABSTRACT_APIKEY', False)
if ABSTRACT_APIKEY and not isinstance(ABSTRACT_APIKEY, bool):
raise TypeError("'TASTYPIE_ABSTRACT_APIKEY' must be either 'True' "
"or 'False'.")
|
if not ABSTRACT_APIKEY:
admin.site.register(ApiKey)
|
PMantovani/road-irregularity-detector
|
classification_algorithm/src/svm_training.py
|
Python
|
apache-2.0
| 4,872 | 0.001437 |
import sys
import csv
import pickle
import numpy as np
from sklearn import svm
from sklearn.metrics import f1_score
from sklearn.metrics import confusion_matrix
from sklearn.preprocessing import StandardScaler
training_data = []
training_classes = []
test_data = []
test_classes = []
path = '../../data/'
filename = path + 'processed_data'
out_filename = path + 'results/'
# pars
|
e number of classes
if '-t' in sys.argv or '--two' in sys.argv:
two_classes = True
else:
two_classes = False
# parse axis independence
if '-a' in sys.argv or '--axis-independent' in sys.argv:
axi
|
s_indep = True
else:
axis_indep = False
c_parameter = 1
# parse value of C
if '-c' in sys.argv:
ind = sys.argv.index('-c')
c_parameter = float(sys.argv[ind+1])
# parse value of gamma
gamma_parameter = 1
if '-g' in sys.argv:
ind = sys.argv.index('-g')
gamma_parameter = float(sys.argv[ind+1])
# parse kernel type
kernel_type = 'linear'
if '-k' in sys.argv:
ind = sys.argv.index('-k')
kernel_type = sys.argv[ind+1]
# check if we are performing a sweep run
sweep_run = False
if '-s' in sys.argv:
sweep_run = True
if two_classes:
print 'Rodando classificador para 2 classes'
else:
print 'Rodando classificador para 3 classes'
if axis_indep:
print 'Rodando com independencia de eixo'
else:
print 'Rodando sem independencia de eixo'
print 'Tipo de kernel: ' + str(kernel_type)
if sweep_run:
print 'Rodando com varredura de parametros'
else:
print 'Parametros de classificacao: C=' + str(c_parameter) + ' gamma=' + str(gamma_parameter)
out_filename += kernel_type
if not sweep_run:
out_filename += '_c_' + str(c_parameter)
out_filename += '_g_' + str(gamma_parameter)
if two_classes:
filename += '_2'
out_filename += '_2'
if axis_indep:
filename += '_axis_indep'
out_filename += '_axis_indep'
filename += '.csv'
out_filename += '.csv'
with open(filename, 'r') as p_data:
csv_reader = csv.reader(p_data)
to_training_data = True
first_row = True
for row in csv_reader:
if not first_row:
r = np.array(row, dtype=float)
if to_training_data:
training_classes.append(int(r[0]))
if axis_indep:
training_data.append([r[1], r[2], r[3], r[4], r[5], r[6]])
else:
training_data.append([r[1], r[2], r[3], r[4], r[5], r[6], r[7],
r[8], r[9], r[10], r[11], r[12], r[13], r[14]])
to_training_data = False
else:
test_classes.append(int(r[0]))
if axis_indep:
test_data.append([r[1], r[2], r[3], r[4], r[5], r[6]])
else:
test_data.append([r[1], r[2], r[3], r[4], r[5], r[6], r[7],
r[8], r[9], r[10], r[11], r[12], r[13], r[14]])
to_training_data = True
first_row = False
scaler = StandardScaler()
training_normalized = scaler.fit_transform(training_data)
test_normalized = scaler.transform(test_data)
if sweep_run:
sweep_values = [0.00001, 0.0001, 0.001, 0.01, 0.1, 1, 10, 100, 1000]
f1_matrix = np.empty((len(sweep_values), len(sweep_values)))
for i, gamma in enumerate(sweep_values):
for j, c in enumerate(sweep_values):
classifier = svm.SVC(gamma=gamma, C=c, kernel=kernel_type, degree=2, cache_size=4000)
classifier.fit(training_normalized, training_classes)
predicted = classifier.predict(test_normalized)
f1_matrix[i][j] = str(f1_score(test_classes, predicted, average='macro'))
print 'F1 Score: ' + str(f1_matrix[i][j])
print 'Finished processing gamma = ' + str(gamma)
with open(out_filename, 'w') as out_file:
out_file.write('gamma\\C')
for sweep in sweep_values:
out_file.write(',' + str(sweep))
out_file.write('\n')
for i, row in enumerate(f1_matrix):
out_file.write(str(sweep_values[i]))
for cell in row:
out_file.write(',' + str(cell))
out_file.write('\n')
else:
classifier = svm.SVC(gamma=gamma_parameter, C=c_parameter, kernel=kernel_type, degree=2)
classifier.fit(training_normalized, training_classes)
predicted = classifier.predict(test_normalized)
np.set_printoptions(precision=2, suppress=True)
conf_matrix = np.transpose(confusion_matrix(test_classes, predicted))
f1 = str(f1_score(test_classes, predicted, average='macro'))
print 'F1 Score: ' + str(f1)
with open(out_filename, 'w') as out_file:
out_file.write(str(conf_matrix) + '\n')
out_file.write(f1)
# save model to file
pickle.dump(classifier, open('../../data/model.sav', 'wb'))
pickle.dump(scaler, open('../../data/scaler.sav', 'wb'))
|
vileopratama/vitech
|
src/addons/account_check_printing/account_journal_dashboard.py
|
Python
|
mit
| 1,200 | 0.004167 |
# -*- coding: utf-8 -*-
from openerp import models, api, _
class account_journal(models.Model):
_inherit = "account.journal"
@api.multi
def get_journal_dashboard_datas(self):
domain_checks_to_print = [
('journal_id', '=', self.id),
('payment_method_id.code', '=', 'check_printing'),
('state','=','posted')
]
return dict(
super(account_jour
|
nal, self).get_journal_dashboard_datas(),
num_checks_to_print=len(self.env['account.payment'].search(domain_checks_to_print))
)
@api.multi
def action_checks_to_print(self):
return {
|
'name': _('Checks to Print'),
'type': 'ir.actions.act_window',
'view_mode': 'list,form,graph',
'res_model': 'account.payment',
'context': dict(
self.env.context,
search_default_checks_to_send=1,
journal_id=self.id,
default_journal_id=self.id,
default_payment_type='outbound',
default_payment_method_id=self.env.ref('account_check_printing.account_payment_method_check').id,
),
}
|
wagnerand/addons-server
|
src/olympia/amo/views.py
|
Python
|
bsd-3-clause
| 8,643 | 0.000694 |
import json
import os
import re
import sys
import django
from django.conf import settings
from django.contrib.auth.models import AnonymousUser
from django.contrib.sitemaps.views import x_robots_tag
from django.core.exceptions import PermissionDenied, ViewDoesNotExist
from django.core.paginator import EmptyPage, PageNotAnInteger
from django.db.transaction import non_atomic_requests
from django.http import Http404, HttpResponse, HttpResponseNotFound, JsonResponse
from django.template.response import TemplateResponse
from django.utils.cache import patch_cache_control
from django.views.decorators.cache import never_cache
from django_statsd.clients import statsd
from rest_framework.exceptions import NotFound
from rest_framework.response import Response
from rest_framework.views import APIView
from olympia import amo
from olympia.amo.utils import HttpResponseXSendFile, use_fake_fxa
from olympia.api.exceptions import base_500_data
from olympia.api.serializers import SiteStatusSerializer
from olympia.users.models import UserProfile
from . import monitors
from .sitemap import get_sitemap_path, get_sitemaps, render_index_xml
@never_cache
@non_atomic_requests
def heartbeat(request):
# For each check, a boolean pass/fail status to show in the template
status_summary = {}
checks = [
'memcache',
'libraries',
'elastic',
'path',
'rabbitmq',
'signer',
'database',
]
for check in checks:
with statsd.timer('monitor.%s' % check):
status, _ = getattr(monitors, check)()
# state is a string. If it is empty, that means everything is fine.
status_summary[check] = {'state': not status, 'status': status}
# If anything broke, send HTTP 500.
status_code = 200 if all(a['state'] for a in status_summary.values()) else 500
return JsonResponse(status_summary, status=status_code)
@never_cache
@non_atomic_requests
def client_info(request):
if getattr(settings, 'ENV', None) != 'dev':
raise PermissionDenied
keys = (
'HTTP_USER_AGENT',
'HTTP_X_COUNTRY_CODE',
'HTTP_X_FORWARDED_FOR',
'REMOTE_ADDR',
)
data = {key: request.META.get(key) for key in keys}
return JsonResponse(data)
@non_atomic_requests
def robots(request):
"""Generate a robots.txt"""
_service = request.META['SERVER_NAME'] == settings.SERVICES_DOMAIN
if _service or not settings.ENGAGE_ROBOTS:
response = HttpResponse('User-agent: *\nDisallow: /', content_type='text/plain')
else:
ctx = {
'apps': amo.APP_USAGE,
'mozilla_user_id': settings.TASK_USER_ID,
'mozilla_user_username': 'mozilla',
}
response = TemplateResponse(
request, 'amo/robots.html', context=ctx, content_type='text/plain'
)
return response
@non_atomic_requests
def contribute(request):
path = os.path.join(settings.ROOT, 'contribute.json')
return HttpResponse(open(path, 'rb'), content_type='application/json')
@non_atomic_requests
def handler403(request, exception=None, **kwargs):
return TemplateResponse(request, 'amo/403.html', status=403)
@non_atomic_requests
def handler404(request, exception=None, **kwargs):
if getattr(request, 'is_api', False):
# It's a v3+ api request (/api/vX/ or /api/auth/)
return JsonResponse({'detail': str(NotFound.default_detail)}, status=404)
elif re.match(r'^/api/\d\.\d/', getattr(request, 'path_info', '')):
# It's a legacy API request in the form of /api/X.Y/. We use path_info,
# which is set in LocaleAndAppURLMiddleware, because there might be a
# locale and app prefix we don't care about in the URL.
response = HttpResponseNotFound()
patch_cache_control(response, max_age=60 * 60 * 48)
return response
return TemplateResponse(request, 'amo/404.html', status=404)
@non_atomic_requests
def handler500(request, **kwargs):
# To avoid database queries, the handler500() cannot evaluate the user - so
# we need to avoid making log calls (our custom adapter would fetch the
# user from the current thread) and set request.user to anonymous to avoid
# its usage in context processors.
request.user = AnonymousUser()
if getattr(request, 'is_api', False):
# API exceptions happening in DRF code would be handled with by our
# custom_exception_handler function in olympia.api.exceptions, but in
# the rare case where the exception is caused by a middleware or django
# itself, it might not, so we need to handle it here.
return HttpResponse(
json.dumps(base_500_data()), content_type='application/json', status=500
)
return TemplateResponse(request, 'amo/500.html', status=500)
@non_atomic_requests
def csrf_failure(request, reason=''):
from django.middleware.csrf import REASON_NO_REFERER, REASON_NO_CSRF_COOKIE
ctx = {
'reason': reason,
'no_referer': reason == REASON_NO_REFERER,
'no_cookie': reason == REASON_NO_CSRF_COOKIE,
}
return TemplateResponse(request, 'amo/403.html', context=ctx, status=403)
@non_atomic_requests
def version(request):
path = os.path.join(settings.ROOT, 'version.json')
with open(path) as f:
contents = json.loads(f.read())
py_info = sys.version_info
contents['python'] = '{major}.{minor}'.format(
major=py_info.major, minor=py_info.minor
)
contents['django'] = '{major}.{minor}'.format(
major=django.VERSION[0], minor=django.VERSION[1]
)
path = os.path.join(settings.ROOT, 'package.json')
with open(path) as f:
data = json.loads(f.read())
contents['addons-linter'] = data['dependencies']['addons-linter']
res = HttpResponse(json.dumps(contents), content_type='application/json')
res.headers['Access-Control-Allow-Origin'] = '*'
return res
def _frontend_view(*args, **kwargs):
"""View has migrated to addons-frontend but we still have the url so we
can reverse() to it in addons-server code.
If you ever hit this url somethunk gun wrong!"""
raise ViewDoesNotExist()
@non_atomic_requests
def frontend_view(*args, **kwargs):
"""Wrap _frontend_view so we can mock it in tests."""
return _frontend_view(*args, **kwargs)
# Special attribute that our <ModelBase>.get_absolute_url() looks for to
# determine whether it's a frontend view (that requires a different host prefix
# on admin insta
|
nces) or not.
frontend_view.is_frontend_view = True
def fake_fxa_authorization(request):
|
"""Fake authentication page to bypass FxA in local development envs."""
if not use_fake_fxa():
raise Http404()
interesting_accounts = UserProfile.objects.exclude(groups=None).exclude(
deleted=True
)[:25]
return TemplateResponse(
request,
'amo/fake_fxa_authorization.html',
context={'interesting_accounts': interesting_accounts},
)
class SiteStatusView(APIView):
authentication_classes = []
permission_classes = []
@classmethod
def as_view(cls, **initkwargs):
return non_atomic_requests(super().as_view(**initkwargs))
def get(self, request, format=None):
return Response(SiteStatusSerializer(object()).data)
class InvalidSection(Exception):
pass
@non_atomic_requests
@x_robots_tag
def sitemap(request):
section = request.GET.get('section') # no section means the index page
app = request.GET.get('app_name')
page = request.GET.get('p', 1)
if 'debug' in request.GET and settings.SITEMAP_DEBUG_AVAILABLE:
try:
sitemaps = get_sitemaps()
if not section:
if page != 1:
raise EmptyPage
content = render_index_xml(sitemaps)
else:
sitemap_object = sitemaps.get((section, amo.APPS.get(app)))
if not sitemap_object:
raise InvalidSection
content = sitemap_object.render(app, page)
except EmptyPage:
raise Http404('Page %s empty' % page)
except PageNotAn
|
cchurch/ansible-modules-core
|
cloud/openstack/_keystone_user.py
|
Python
|
gpl-3.0
| 14,116 | 0.001063 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Based on Jimmy Tang's implementation
DOCUMENTATION = '''
---
module: keystone_user
version_added: "1.2"
deprecated: Deprecated in 2.0. Use os_user instead
short_description: Manage OpenStack Identity (keystone) users, tenants and roles
description:
- Manage users,tenants, roles from OpenStack.
options:
login_user:
description:
- login username to authenticate to keystone
required: false
default: admin
login_password:
description:
- Password of login user
required: false
default: 'yes'
login_tenant_name:
description:
- The tenant login_user belongs to
required: false
default: None
version_added: "1.3"
token:
description:
- The token to be uses in case the password is not specified
required: false
default: None
endpoint:
description:
- The keystone url for authentication
required: false
default: 'http://127.0.0.1:35357/v2.0/'
user:
description:
- The name of the user that has to added/removed from OpenStack
required: false
default: None
password:
description:
- The password to be assigned to the user
required: false
default: None
tenant:
description:
- The tenant name that has be added/removed
required: false
default: None
tenant_description:
description:
- A description for the tenant
required: false
default: None
email:
description:
- An email address for the user
required: false
default: None
role:
description:
- The name of the role to be assigned or created
required: false
default: None
state:
description:
- Indicate desired state of the resource
choices: ['present', 'absent']
default: present
requirements:
- "python >= 2.6"
- python-keystoneclient
author: "Ansible Core Team (deprecated)"
'''
EXAMPLES = '''
# Create a tenant
- keystone_user: tenant=demo tenant_description="Default Tenant"
# Create a user
- keystone_user: user=john tenant=demo password=secrete
# Apply the admin role to the john user in the demo tenant
- keystone_user: role=admin user=john tenant=demo
'''
try:
from keystoneclient.v2_0 import client
except ImportError:
keystoneclient_found = False
else:
keystoneclient_found = True
def authenticate(endpoint, token, login_user, login_password, login_tenant_name):
"""Return a keystone client object"""
if token:
return client.Client(endpoint=endpoint, token=token)
else:
return client.Client(auth_url=endpoint, username=login_user,
password=login_password, tenant_name=login_tenant_name)
def tenant_exists(keystone, tenant):
""" Return True if tenant already exists"""
return tenant in [x.name for x in keystone.tenants.list()]
def user_exists(keystone, user):
"""" Return True if user already exists"""
return user in [x.name for x in keystone.users.list()]
def get_tenant(keystone, name):
""" Retrieve a tenant by name"""
tenants = [x for x in keystone.tenants.list() if x.name == name]
count = len(tenants)
if count == 0:
raise KeyError("No keystone tenants with name %s" % name)
elif count > 1:
raise ValueError("%d tenants with name %s" % (count, name))
else:
return tenants[0]
def get_user(keystone, name):
""" Retrieve a user by name"""
users = [x for x in keystone.users.list() if x.name == name]
count = len(users)
if count == 0:
raise KeyError("No keystone users with name %s" % name)
elif count > 1:
raise ValueError("%d users with name %s" % (count, name))
else:
return users[0]
def get_role(keystone, name):
""" Retrieve a role by name"""
roles = [x for x in keystone.roles.list() if x.name == name]
count = len(roles)
if count == 0:
raise KeyError("No keystone roles with name %s" % name)
elif count > 1:
raise ValueError("%d roles with name %s" % (count, name))
else:
return roles[0]
def get_tenant_id(keystone, name):
return get_tenant(keystone, name).id
def get_user_id(keystone, name):
return get_user(keystone, name).id
def ensure_tenant_exists(keystone, tenant_name, tenant_description,
check_mode):
""" Ensure that a tenant exists.
Return (True, id) if a new tenant was created, (False, None) if it
already existed.
"""
# Check if tenant already exists
try:
tenant = get_tenant(keystone, tenant_name)
except KeyError:
# Tenant doesn't exist yet
pass
else:
if tenant.description == tenant_description:
return (False, tenant.id)
else:
# We need to update the tenant description
if check_mode:
return (True, tenant.id)
else:
tenant.update(description=tenant_description)
return (True, tenant.id)
# We now know we will have to create a new tenant
if check_mode:
return (True, None)
ks_tenant = keystone.tenants.create(tenant_name=tenant_name,
description=tenant_description,
enabled=True)
return (True, ks_tenant.id)
def ensure_tenant_absent(keystone, tenant, check_mode):
""" Ensure that a tenant does not exist
Return True if the tenant was removed, False if it didn't exist
in the first place
"""
if not tenant_exists(keystone, tenant):
return False
# We now know we will have to delete the tenant
if check_mode:
return True
def ensure_user_exists(keystone, user_name, password, email, tenant_name,
check_mode):
""" Check if user exists
Return (True, id) if a new user was created, (False, id) user already
exists
"""
# Check if tenant already exists
try:
user = get_user(keystone, user_name)
except KeyError:
# Tenant doesn't exist yet
pass
else:
# User does exist, we're done
return (False, user.id)
# We now know we will have to create a new user
if check_mode:
return (True, None)
tenant = get_tenant(keystone, tenant_name)
user = keystone.users.create(name=user_name, password=password,
email=email, tenant_id=tenant.id)
return (True, user.id)
def ensure_role_exists(keystone, role_name):
# Get the role if it exists
try:
role = get_role(keystone, role_name)
# Role does exist, we're done
retur
|
n (False, role.id)
except KeyError:
# Role doesn't exist yet
pass
role = keystone.roles.create(role_name)
return (True, role.id)
def ensure_user_role_exists(keystone, user_name, tenant_name, role_name,
check_mode):
""" Check if role exis
|
ts
Return (True, id) if a new role was created or if the role was newly
assigned to the user for the tenant. (False, id) if the role already
exists and was already assigned to the user ofr the tenant.
"""
# Check if the user has the role in the tenant
user = get_user(keystone, user_name)
tenant = get_tenant(keystone, tenant_name)
roles = [x for x in keystone.roles.roles_for_user(user, tenant)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.