text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2016, John McNamara, jmcnamara@cpan.org
#
from ..excel_comparsion_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.maxDiff = None
filename = 'table19.xlsx'
test_dir = 'xlsxwriter/test/comparison/'
self.got_filename = test_dir + '_test_' + filename
self.exp_filename = test_dir + 'xlsx_files/' + filename
self.ignore_files = []
self.ignore_elements = {}
def test_create_file(self):
"""Test the creation of a simple XlsxWriter file with tables."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
worksheet.set_column('C:F', 10.288)
worksheet.add_table('C3:F13',
{'columns': [{},
{},
{},
{'header': " Column4 "}]})
workbook.close()
self.assertExcelEqual()
| jkyeung/XlsxWriter | xlsxwriter/test/comparison/test_table19.py | Python | bsd-2-clause | 1,269 | 0 |
# Django
from django.db import models
# Third-party apps
import jsonfield # http://pypi.python.org/pypi/django-jsonfield/
from lxml import etree # http://lxml.de/
# Internal
from .log import default_logger as logger
from .utils.introspection import ModelFactory
from .utils.serializers import deserialize_function
from .utils.xmlhelper import XMLHelper
from .utils import sum_dicts
class Mapping(models.Model):
"""A mapping configuration."""
data_map = jsonfield.JSONField(default='{}') # need a default value
label = models.CharField(max_length=255, unique=True) # label for reference
def __unicode__(self):
return u'%s' % (
self.label,
)
@property
def log_desc(self):
return u'<Mapping: %s>' % (self,)
def load_xml(self, xml, root_path=None):
"""Loads a piece of XML in the DB, i.e. map XML data to a Django Model.
Args:
xml: a string being the XML data to load
root_path: the root (dotted path) of the XML data. Not mandatory but needed when the XML is not the root as defined in the mapping.
e.g. If you defined a mapping for rss.channel.item
and the XML you are passing actually starts with the channel element, you must then set root_path to rss.channel
Returns:
A dict summarizing the number of objects created per element-mapping.
#The number of created Models per "element-mapping"
"""
log_desc = '%s - Loading XML' % (self.log_desc,)
try:
# Parse the XML
root = etree.fromstring(xml, parser=etree.XMLParser())
except Exception as e:
logger.error('%s => XML cannot be parsed. [KO]\n%s' % (log_desc, e))
return 0
nb_created = {k: 0 for k in self.data_map.keys()}
# For each element-mapping
for e_path, conf in self.data_map.iteritems():
nb_created[e_path] = nb_created[e_path] + self._map_elements_by_path(e_path, conf, root, root_path)
logger.info('%s => %s' % (log_desc, ' ; '.join(['%s: %s objects created' % (k, v) for (k, v) in nb_created.items()])))
return nb_created
def load_xml_chunks(self, xml_chunks, root_path):
"""Loads a collection of XML chunks being all of the same kind.
Args:
xml_chunks: a list of XML string data to load
root_path: the root (dotted path) of the XML data. Not mandatory but needed when the XML is not the root as defined in the mapping.
e.g. If you defined a mapping for rss.channel.item
and the XML you are passing actually starts with the channel element, you must then set root_path to rss.channel
Returns:
A dict summarizing the number of objects created per element-mapping.
#The number of created Models per "element-mapping"
TODO: Make it more efficient instead of a simple loop.
"""
log_desc = '%s - Loading XML chunks' % (self.log_desc,)
logger.info('%s => start' % (log_desc,))
nb_created = {}
for xml in xml_chunks:
nb_created = sum_dicts(nb_created, self.load_xml(xml, root_path))
logger.info('%s => end' % (log_desc,))
return nb_created
def _map_elements_by_path(self, path, conf, node, node_path):
"""Maps all the elements matching the path in the node with the mapping configuration.
Args:
path: the path of the elements to seek
conf: the mapping configuration
node: the node from which to seek
node_path: the path of the node
Returns:
The number of Models created in the DB for all the found elements.
"""
# Get the configuration
get_id = conf.get('get_id', None)
models = conf.get('models', None)
if models is None:
logger.error('%s => No models found in the configuration. [KO]\nconfiguration=%s' % (log_desc, conf))
return 0
log_desc = '%s - Mapping all the elements matching path=%s to %s Models' % (self.log_desc, path, len(models))
# Get all the matching elements
elems = XMLHelper.get_elements(path, node, node_path)
# Log if no elements were found.
if not elems:
logger.warning('%s => No elements found. node_path=%s' % (log_desc, node_path))
return 0
nb_created = 0
for elem in elems:
nb_created = nb_created + self._map_element(elem, models, get_id)
nb_elems = len(elems)
nb_targeted = nb_elems * len(models)
logger.info('%s => Found: %s, Targeted Objects: %s, Created Objects: %s %s' % (
log_desc,
nb_elems,
nb_targeted,
nb_created,
(nb_targeted == nb_created and ['[OK]'] or ['=> numbers different [KO]'])[0]
)
)
return nb_created
def _map_element(self, element, models, get_id=None):
"""Maps an element to several models.
Args:
element: an XML element
models: the models to mapped
get_id: the function to use to calculate the ID of the element to identify it amongst the other.
Returns:
The number of Models created in the DB for the passed element.
"""
elem_id = '(id:%s) ' % (self._resolve_get_id(get_id)(element),)
status = {k: '[KO]' for k in models.keys()}
nb_created = 0
for app_model, fields in models.iteritems():
try:
ins = self._map_to_model(element, app_model, fields)
status[app_model] = 'pk=%s' % (ins.pk)
nb_created = nb_created + 1
logger.info('%s - Mapping the element %sto the Model %s with fields %s => object created, pk=%s [0K]' % (
self.log_desc,
elem_id,
app_model,
fields,
ins.pk,
)
)
except Exception as err:
logger.error('%s - Mapping the element %sto the Model %s with fields %s => Cannot be mapped. [K0]\n%s' % (
self.log_desc,
elem_id,
app_model,
fields,
err,
)
)
logger.info('%s - Element %smapped to %s Models => %s' % (
self.log_desc,
elem_id,
len(models),
' ; '.join(['%s: %s' % (k, v) for (k, v) in status.items()]),
)
)
return nb_created
def _map_to_model(self, element, app_model, fields):
"""Maps an element to a Model.
Args:
element: the XML element to map
app_model: the model to map defined by: app_label.model_name
fields: the fields mapping
Returns:
The instance of the created Model.
"""
ins = ModelFactory.create(app_model)
self._map_to_fields(element, ins, fields)
ins.save()
return ins
def _map_to_fields(self, element, ins, fields):
"""Maps an element to the fields.
Args:
element: the XML element to map
ins: the instance of the created Model
fields: the fields mapping
"""
for field, configuration in fields.items():
if isinstance(configuration, basestring):
setattr(ins, field, XMLHelper.get_text_unescape(element, configuration))
elif isinstance(configuration, list):
values = (XMLHelper.get_text_unescape(element, v) for v in configuration)
setattr(ins, field, ' '.join(values))
elif isinstance(configuration, dict):
pass # TODO: handles advanced transformers
def _resolve_get_id(self, get_id):
"""Resolves which function should be used to calculate the ID of an element.
Args:
get_id: a function/method or a string to use an inner element.
Returns:
A function that will take an element and returns an ID.
"""
# Try to deserialize it
try:
return deserialize_function(get_id)
except:
pass
# Deserialization could not figure it out
# so let's assume it is a tag and we want to use the text of the element
if isinstance(get_id, basestring):
return lambda x: XMLHelper.get_text(x, get_id)
# Nothing works, returns the get_id
return lambda x: get_id
| YAmikep/django-xmlmapping | xmlmapping/models.py | Python | bsd-3-clause | 8,995 | 0.002779 |
"""Usage: dbutils.py [-dfh]
Options:
-d --dropall Deletes all collections in the database. Use this very wisely.
-f --force Forces all questions to 'yes'
-h --help show this
"""
import sys
from docopt import docopt
from laserpony import app
from laserpony.util import db
##UTILITY FUNCTIONS
#snagged this from http://code.activestate.com/recipes/577058/
def query_yes_no(question, default="yes"):
"""Ask a yes/no question via raw_input() and return their answer.
"question" is a string that is presented to the user.
"default" is the presumed answer if the user just hits <Enter>.
It must be "yes" (the default), "no" or None (meaning
an answer is required of the user).
The "answer" return value is one of "yes" or "no".
"""
valid = {"yes":True, "y":True, "ye":True,
"no":False, "n":False}
if default == None:
prompt = " [y/n] "
elif default == "yes":
prompt = " [Y/n] "
elif default == "no":
prompt = " [y/N] "
else:
raise ValueError("invalid default answer: '%s'" % default)
while True:
sys.stdout.write(question + prompt)
choice = raw_input().lower()
if default is not None and choice == '':
return valid[default]
elif choice in valid:
return valid[choice]
else:
sys.stdout.write("Please respond with 'yes' or 'no' "\
"(or 'y' or 'n').\n")
# Switch over to the virtual environment version of python
arguments = docopt(__doc__, argv=sys.argv[1:], version='0.1')
delete_db_message = "Are you absolutely sure you want to delete the entire database?"
if arguments['--dropall']:
if arguments['--force']:
db.connection.drop_database(app.config['MONGODB_SETTINGS']['DB'])
else:
if query_yes_no(delete_db_message, default="no"):
db.connection.drop_database(app.config['MONGODB_SETTINGS']['DB'])
| JackRamey/LaserPony | dbutils.py | Python | mit | 1,980 | 0.007071 |
from GUIComponent import GUIComponent
from VariableText import VariableText
from os import statvfs
from enigma import eLabel
# TODO: Harddisk.py has similiar functions, but only similiar.
# fix this to use same code
class DiskInfo(VariableText, GUIComponent):
FREE = 0
USED = 1
SIZE = 2
def __init__(self, path, type, update = True):
GUIComponent.__init__(self)
VariableText.__init__(self)
self.type = type
self.path = path
if update:
self.update()
def update(self):
try:
stat = statvfs(self.path)
except OSError:
return -1
if self.type == self.FREE:
try:
percent = '(' + str((100 * stat.f_bavail) // stat.f_blocks) + '%)'
free = stat.f_bfree * stat.f_bsize
if free < 10000000:
free = _("%d Kb") % (free >> 10)
elif free < 10000000000:
free = _("%d Mb") % (free >> 20)
else:
free = _("%d Gb") % (free >> 30)
self.setText(" ".join((free, percent, _("free diskspace"))))
except:
# occurs when f_blocks is 0 or a similar error
self.setText("-?-")
GUI_WIDGET = eLabel
| bally12345/enigma2 | lib/python/Components/DiskInfo.py | Python | gpl-2.0 | 1,054 | 0.032258 |
'''
Copy right (c) zhouyuding1990@gmail.com
'''
import unittest
from chapter22_elementary import Vertex
from chapter22_elementary import breath_first_search_input_adjacency_list
def printX(x):
print x
class MyTestCase(unittest.TestCase):
def test_something(self):
self.assertEqual(True, False)
def test_run_BFS_on_adjacency_list(self):
''' 4X4 vertex'''
v1 = Vertex(1)
v3 = Vertex(3)
v4 = Vertex(4)
v6 = Vertex(6)
v7 = Vertex(7)
v10 = Vertex(10)
v14 = Vertex(14)
v15 = Vertex(15)
v1.ADJ_ = [v4, v6]
v3.ADJ_ = [v6, v7]
v4.ADJ_ = [v1]
v6.ADJ_ = [v1, v3, v7, v10]
v7.ADJ_ = [v3, v6, v10]
v10.ADJ_ = [v6, v7, v14, v15]
v14.ADJ_ = [v10, v15]
v15.ADJ_ = [v14]
print """
input graph
0 1 2 3
+---+---+---+---+
0 | | * | | * |
+---+---+---+---+
1 | * | | * | * |
+---+---+---+---+
2 | | | * | |
+---+---+---+---+
3 | | | * | * |
+---+---+---+---+
"""
G = [v1, v3, v4, v6, v7, v10, v14, v15]
print "start with v3"
breath_first_search_input_adjacency_list(G, v3, lambda u: printX(u))
if __name__ == '__main__':
unittest.main()
| YudingZhou/Yo-Fatty | ITA/graphic/chapter22_elementary_test.py | Python | gpl-2.0 | 1,375 | 0 |
# -*- coding: utf-8 -*-
# Copyright(C) 2013 Laurent Bachelier
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
from weboob.tools.backend import Module, BackendConfig
from weboob.tools.value import ValueBackendPassword
from weboob.capabilities.bank import CapBank, AccountNotFound
from .browser import CitelisBrowser
__all__ = ['CitelisModule']
class CitelisModule(Module, CapBank):
NAME = 'citelis'
DESCRIPTION = u'Citélis'
MAINTAINER = u'Laurent Bachelier'
EMAIL = 'laurent@bachelier.name'
LICENSE = 'AGPLv3+'
VERSION = '1.1'
BROWSER = CitelisBrowser
CONFIG = BackendConfig(
ValueBackendPassword('merchant_id', label='Merchant ID', masked=False),
ValueBackendPassword('login', label='Account ID', masked=False),
ValueBackendPassword('password', label='Password'))
def create_default_browser(self):
return self.create_browser(self.config['merchant_id'].get(),
self.config['login'].get(),
self.config['password'].get())
def iter_accounts(self):
return self.browser.get_accounts_list()
def get_account(self, _id):
for account in self.iter_accounts():
if account.id == _id:
return account
raise AccountNotFound()
def iter_history(self, account):
return self.browser.iter_history(account)
| sputnick-dev/weboob | modules/citelis/module.py | Python | agpl-3.0 | 2,036 | 0 |
class Solution(object):
def reverseBits(self, n):
"""
:type n: int
:rtype: int
"""
ret = 0
for i in range(32):
ret += (n%2) * 2**(31-i)
n /= 2
return ret | xingjian-f/Leetcode-solution | 190. Reverse Bits.py | Python | mit | 231 | 0.030303 |
import time
from datetime import datetime
def format_ts_from_float(ts):
return int(ts) * 1000000000
def format_ts_from_date(ts):
return format_ts_from_float(time.mktime(ts.timetuple()))
def format_ts_from_str(ts, pattern='%Y-%m-%d %H:%M:%S'):
return format_ts_from_date(datetime.strptime(ts, pattern))
def format_ts_from_last_modified(ts, pattern='%a, %d %b %Y %H:%M:%S %Z'):
ts = datetime.strptime(ts, pattern)
return int(time.mktime(ts.timetuple()) * 1000)
| rdo-infra/ci-config | ci-scripts/infra-setup/roles/rrcockpit/files/telegraf_py3/influxdb_utils.py | Python | apache-2.0 | 487 | 0 |
# vim: ts=4:sw=4:expandtab
# BleachBit
# Copyright (C) 2014 Andrew Ziem
# http://bleachbit.sourceforge.net
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Create cleaners from CleanerML (markup language)
"""
import os
import sys
import traceback
import xml.dom.minidom
import Cleaner
import Common
from Action import ActionProvider
from Common import _
from General import boolstr_to_bool, getText
from FileUtilities import listdir
class CleanerML:
"""Create a cleaner from CleanerML"""
def __init__(self, pathname, xlate_cb=None):
"""Create cleaner from XML in pathname.
If xlate_cb is set, use it as a callback for each
translate-able string.
"""
self.action = None
self.cleaner = Cleaner.Cleaner()
self.option_id = None
self.option_name = None
self.option_description = None
self.option_warning = None
self.xlate_cb = xlate_cb
if None == self.xlate_cb:
self.xlate_cb = lambda x, y = None: None # do nothing
dom = xml.dom.minidom.parse(pathname)
self.handle_cleaner(dom.getElementsByTagName('cleaner')[0])
def get_cleaner(self):
"""Return the created cleaner"""
return self.cleaner
def os_match(self, os_str):
"""Return boolean whether operating system matches"""
# If blank or if in .pot-creation-mode, return true.
if len(os_str) == 0 or None != self.xlate_cb:
return True
# Otherwise, check platform.
if os_str == 'linux' and sys.platform.startswith('linux'):
return True
if os_str == 'windows' and sys.platform == 'win32':
return True
return False
def handle_cleaner(self, cleaner):
"""<cleaner> element"""
if not self.os_match(cleaner.getAttribute('os')):
return
self.cleaner.id = cleaner.getAttribute('id')
self.handle_cleaner_label(cleaner.getElementsByTagName('label')[0])
description = cleaner.getElementsByTagName('description')
if description and description[0].parentNode == cleaner:
self.handle_cleaner_description(description[0])
for option in cleaner.getElementsByTagName('option'):
try:
self.handle_cleaner_option(option)
except:
print str(sys.exc_info()[1])
print option.toxml()
self.handle_cleaner_running(cleaner.getElementsByTagName('running'))
def handle_cleaner_label(self, label):
"""<label> element under <cleaner>"""
self.cleaner.name = _(getText(label.childNodes))
translate = label.getAttribute('translate')
if translate and boolstr_to_bool(translate):
self.xlate_cb(self.cleaner.name)
def handle_cleaner_description(self, description):
"""<description> element under <cleaner>"""
self.cleaner.description = _(getText(description.childNodes))
self.xlate_cb(self.cleaner.description)
def handle_cleaner_running(self, running_elements):
"""<running> element under <cleaner>"""
# example: <running type="command">opera</running>
for running in running_elements:
detection_type = running.getAttribute('type')
value = getText(running.childNodes)
self.cleaner.add_running(detection_type, value)
def handle_cleaner_option(self, option):
"""<option> element"""
self.option_id = option.getAttribute('id')
self.option_description = None
self.option_name = None
self.handle_cleaner_option_label(
option.getElementsByTagName('label')[0])
description = option.getElementsByTagName('description')
self.handle_cleaner_option_description(description[0])
warning = option.getElementsByTagName('warning')
if warning:
self.handle_cleaner_option_warning(warning[0])
if self.option_warning:
self.cleaner.set_warning(self.option_id, self.option_warning)
for action in option.getElementsByTagName('action'):
self.handle_cleaner_option_action(action)
self.cleaner.add_option(
self.option_id, self.option_name, self.option_description)
def handle_cleaner_option_label(self, label):
"""<label> element under <option>"""
self.option_name = _(getText(label.childNodes))
translate = label.getAttribute('translate')
translators = label.getAttribute('translators')
if not translate or boolstr_to_bool(translate):
self.xlate_cb(self.option_name, translators)
def handle_cleaner_option_description(self, description):
"""<description> element under <option>"""
self.option_description = _(getText(description.childNodes))
self.xlate_cb(self.option_description)
translators = description.getAttribute('translators')
self.xlate_cb(self.option_description, translators)
def handle_cleaner_option_warning(self, warning):
"""<warning> element under <option>"""
self.option_warning = _(getText(warning.childNodes))
self.xlate_cb(self.option_warning)
def handle_cleaner_option_action(self, action_node):
"""<action> element under <option>"""
command = action_node.getAttribute('command')
provider = None
for actionplugin in ActionProvider.plugins:
if actionplugin.action_key == command:
provider = actionplugin(action_node)
if None == provider:
raise RuntimeError("Invalid command '%s'" % command)
self.cleaner.add_action(self.option_id, provider)
def list_cleanerml_files(local_only=False):
"""List CleanerML files"""
cleanerdirs = (Common.local_cleaners_dir,
Common.personal_cleaners_dir)
if not local_only and Common.system_cleaners_dir:
cleanerdirs += (Common.system_cleaners_dir, )
for pathname in listdir(cleanerdirs):
if not pathname.lower().endswith('.xml'):
continue
import stat
st = os.stat(pathname)
if sys.platform != 'win32' and stat.S_IMODE(st[stat.ST_MODE]) & 2:
print "warning: ignoring cleaner '%s' because it is world writable" % pathname
continue
yield pathname
def load_cleaners():
"""Scan for CleanerML and load them"""
for pathname in list_cleanerml_files():
try:
xmlcleaner = CleanerML(pathname)
except:
print "Error reading file '%s'" % pathname
traceback.print_exc()
else:
cleaner = xmlcleaner.get_cleaner()
if cleaner.is_usable():
Cleaner.backends[cleaner.id] = cleaner
else:
print "debug: '%s' is not usable" % pathname
def pot_fragment(msgid, pathname, translators=None):
"""Create a string fragment for generating .pot files"""
if translators:
translators = "#. %s\n" % translators
else:
translators = ""
ret = '''%s#: %s
msgid "%s"
msgstr ""
''' % (translators, pathname, msgid)
return ret
def create_pot():
"""Create a .pot for translation using gettext"""
f = open('../po/cleanerml.pot', 'w')
for pathname in listdir('../cleaners'):
if not pathname.lower().endswith(".xml"):
continue
strings = []
try:
CleanerML(pathname,
lambda newstr, translators=None:
strings.append([newstr, translators]))
except:
print "error reading '%s'" % pathname
traceback.print_exc()
else:
for (string, translators) in strings:
f.write(pot_fragment(string, pathname, translators))
f.close()
if __name__ == '__main__':
if 2 == len(sys.argv) and 'pot' == sys.argv[1]:
create_pot()
| maximilianofaccone/puppy-siberian | usr/share/bleachbit/CleanerML.py | Python | gpl-3.0 | 8,532 | 0.00082 |
from extractors import XPathExtractor
from parser import Parser
from rules import ConstRule, Map, MapRule, SubPathRule, UrlRule, XPathRule
def is_list(obj):
return isinstance(obj, (list, tuple))
def is_str(obj):
return isinstance(obj, (str, unicode))
def parse_xpath_rule(line):
l = len(line)
if l == 2:
# Basic XPath
if is_str(line[1]):
return XPathRule(line[0], line[1])
# SubPath
elif is_list(line[1]):
sub_rules = [parse_xpath_rule(sub_rule) for sub_rule in line[1]]
return SubPathRule(line[0], *sub_rules)
elif l == 3:
# Keyword XPath
if isinstance(line[2], dict):
return XPathRule(line[0], line[1], **line[2])
# MapRule
elif is_list(line[2]):
maps = []
for map_line in line[2]:
len_map = len(map_line)
if len_map == 3:
maps.append(Map(map_line[0], map_line[1], XPathExtractor(map_line[2])))
elif len_map == 4:
join = map_line[3].pop('join', None)
maps.append(Map(map_line[0], map_line[1], XPathExtractor(map_line[2], **map_line[3]), **{'join': join}))
extractor = XPathExtractor(line[1]) if is_str(line[1]) else line[1]
return MapRule(line[0], extractor, *maps)
print 'Unknown rule : %r' % (line,)
def itemagic(const=None, url=None, xpath=None, *args):
rules = []
# Build const rules
if is_list(const):
for line in const:
rules.append(ConstRule(line[0], line[1]))
elif isinstance(const, dict):
for field in const:
rules.append(ConstRule(field, const[field]))
# Build url rule
if is_str(url):
rules.append(UrlRule(url))
# Build xpath rules
if is_list(xpath):
for line in xpath:
rules.append(parse_xpath_rule(line))
return Parser(*rules) | n6g7/scrapy-itemagic | itemagic/magic.py | Python | mit | 1,649 | 0.032141 |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
#
# workspace.py
#
# Copyright (c) 2014
# Author: Claudio Driussi <claudio.driussi@gmail.com>
#
from sqlalchemy.ext.declarative import declarative_base
from .db import *
class WorkSpace(object):
"""Encapsulate an whole SQLAlchemy orm from an DynaQ db definition object"""
def __init__(self, db, engine):
"""init the workspace
:param db: DynaQ db definition object
:param engine: SQLAlchemy engine string
:return: None
"""
self.db = db
self.engine = engine
self.metadata = sa.MetaData()
self.Base = declarative_base(self.engine, self.metadata)
self.tables = {}
def generate_orm(self, prefix='', pref_tabels={}, defaults={}):
"""Generate the SQLAlchemy orm objects
the objects are stored in self.tables dictionary
:param prefix: an optional prefix for table names ie: if the table
name is "user" and prefix is "data_" the name become "data_user"
:param pref_tabels: an optional dict for prefix bypass names for
singles names ie: if pref_tabels is {'zip_codes': ''} the name of
zip table is "zip_codes" even if prefix is "data_"
:param defaults: function for handle default values (not handled yet)
:return: an self.sa_obj() objet for convenient handle of orm classes
"""
# build objects
self.tables = {}
for table in self.db.tables.values():
self.tables[table.alias] = \
type(table.name.capitalize(),(self.Base,),
self._set_table(table, prefix, pref_tabels, defaults))
# build relations
for alias in self.tables:
self._set_retations(alias)
return self.sa_obj()
def _set_table(self, table, prefix='', pref_tabels={}, defaults={}):
"""Create a SQLAlchemy class object
This private method called from self.generate_orm method is the core
for SQLAlchemy objects creation, all supported features are implemented
here.
:param table: the DynaQ table for class generation
:param prefix: same of generate_orm
:param pref_tabels: same of generate_orm
:param defaults: same of generate_orm
:return: the class object for the table
"""
def get_name(tname):
s = tname.replace('_'+USRFLD_KEY,'')
pref = pref_tabels[s] if s in pref_tabels else prefix
return pref + tname
table_data = {}
table_data['__tablename__'] = get_name(table.name)
table_data['__dqt__'] = table
for f in table.fields:
foreignkey = None
if isinstance(f.type, Table):
foreignkey = "%s.%s" % (get_name(f.type.name), f.type.key.name)
db_type = f.get_type()
sa_type = db_type.sa_type
if db_type.length and sa_type in [sa.Numeric, sa.Float]:
sa_type = sa_type(db_type.length, f.get('decimals'))
if db_type.length and sa_type in [sa.String, sa.String, sa.CHAR, sa.LargeBinary, sa.Text,]:
sa_type = sa_type(db_type.length)
if foreignkey:
c = sa.Column(sa_type, sa.ForeignKey(foreignkey))
else:
c = sa.Column(sa_type, primary_key=f == table.key)
c.__dqf__ = f
default = defaults.get(f.get('default'), None)
if default:
c.ColumnDefault(default)
table_data[f.name] = c
ii = []
for i in table.indexes:
if i.name == 'primary':
continue
ii.append(sa.Index('idx_%s_%s' % (table.alias, i.name), *i.fields))
# if needed add more table args
if ii:
table_data['__table_args__'] = tuple(ii)
return table_data
def _set_retations(self, alias):
"""Create the orm relationships.
This private method called from self.generate_orm method generate the
one to many relations for each related field of the table pointed by
alias parameter. It handle "cascade referential integrity" if the
related field has the property "child == True"
:param alias: alias name of the table
:return: None
"""
for field in self.db.tables[alias].fields:
if field.get('child'):
parent = self.tables[field.type.alias]
child = self.tables[alias]
setattr(parent, alias,
sa.orm.relationship(child,
backref=parent.__tablename__,
cascade="all, delete, delete-orphan"))
def sa_obj(self):
"""Build a convenient object for accessing to SqlAlchemy ORM objects
Example:
ws = dq.WorkSpace(db, engine)
ws.generate_orm()
o = ws.sa_obj()
now if in your definition is a table called users, you can do:
user = o.users()
:return: the container object
"""
t = {}
for k,v in self.tables.items():
t[k] = v
return type('WSO', (object,), t)
def session(self):
"""Return a session instance for the workspace"""
return sa.orm.sessionmaker(bind=self.engine)()
| claudiodriussi/DynaQ | dynaq/workspace.py | Python | lgpl-3.0 | 5,345 | 0.002058 |
import json
from moto.core.responses import BaseResponse
from moto.core.utils import amzn_request_id
from .models import transcribe_backends
class TranscribeResponse(BaseResponse):
@property
def transcribe_backend(self):
return transcribe_backends[self.region]
@property
def request_params(self):
try:
return json.loads(self.body)
except ValueError:
return {}
@amzn_request_id
def start_transcription_job(self):
name = self._get_param("TranscriptionJobName")
response = self.transcribe_backend.start_transcription_job(
transcription_job_name=name,
language_code=self._get_param("LanguageCode"),
media_sample_rate_hertz=self._get_param("MediaSampleRateHertz"),
media_format=self._get_param("MediaFormat"),
media=self._get_param("Media"),
output_bucket_name=self._get_param("OutputBucketName"),
output_key=self._get_param("OutputKey"),
output_encryption_kms_key_id=self._get_param("OutputEncryptionKMSKeyId"),
settings=self._get_param("Settings"),
model_settings=self._get_param("ModelSettings"),
job_execution_settings=self._get_param("JobExecutionSettings"),
content_redaction=self._get_param("ContentRedaction"),
identify_language=self._get_param("IdentifyLanguage"),
language_options=self._get_param("LanguageOptions"),
)
return json.dumps(response)
@amzn_request_id
def start_medical_transcription_job(self):
name = self._get_param("MedicalTranscriptionJobName")
response = self.transcribe_backend.start_medical_transcription_job(
medical_transcription_job_name=name,
language_code=self._get_param("LanguageCode"),
media_sample_rate_hertz=self._get_param("MediaSampleRateHertz"),
media_format=self._get_param("MediaFormat"),
media=self._get_param("Media"),
output_bucket_name=self._get_param("OutputBucketName"),
output_encryption_kms_key_id=self._get_param("OutputEncryptionKMSKeyId"),
settings=self._get_param("Settings"),
specialty=self._get_param("Specialty"),
type=self._get_param("Type"),
)
return json.dumps(response)
@amzn_request_id
def list_transcription_jobs(self):
state_equals = self._get_param("Status")
job_name_contains = self._get_param("JobNameContains")
next_token = self._get_param("NextToken")
max_results = self._get_param("MaxResults")
response = self.transcribe_backend.list_transcription_jobs(
state_equals=state_equals,
job_name_contains=job_name_contains,
next_token=next_token,
max_results=max_results,
)
return json.dumps(response)
@amzn_request_id
def list_medical_transcription_jobs(self):
status = self._get_param("Status")
job_name_contains = self._get_param("JobNameContains")
next_token = self._get_param("NextToken")
max_results = self._get_param("MaxResults")
response = self.transcribe_backend.list_medical_transcription_jobs(
status=status,
job_name_contains=job_name_contains,
next_token=next_token,
max_results=max_results,
)
return json.dumps(response)
@amzn_request_id
def get_transcription_job(self):
transcription_job_name = self._get_param("TranscriptionJobName")
response = self.transcribe_backend.get_transcription_job(
transcription_job_name=transcription_job_name
)
return json.dumps(response)
@amzn_request_id
def get_medical_transcription_job(self):
medical_transcription_job_name = self._get_param("MedicalTranscriptionJobName")
response = self.transcribe_backend.get_medical_transcription_job(
medical_transcription_job_name=medical_transcription_job_name
)
return json.dumps(response)
@amzn_request_id
def delete_transcription_job(self):
transcription_job_name = self._get_param("TranscriptionJobName")
response = self.transcribe_backend.delete_transcription_job(
transcription_job_name=transcription_job_name
)
return json.dumps(response)
@amzn_request_id
def delete_medical_transcription_job(self):
medical_transcription_job_name = self._get_param("MedicalTranscriptionJobName")
response = self.transcribe_backend.delete_medical_transcription_job(
medical_transcription_job_name=medical_transcription_job_name
)
return json.dumps(response)
@amzn_request_id
def create_vocabulary(self):
vocabulary_name = self._get_param("VocabularyName")
language_code = self._get_param("LanguageCode")
phrases = self._get_param("Phrases")
vocabulary_file_uri = self._get_param("VocabularyFileUri")
response = self.transcribe_backend.create_vocabulary(
vocabulary_name=vocabulary_name,
language_code=language_code,
phrases=phrases,
vocabulary_file_uri=vocabulary_file_uri,
)
return json.dumps(response)
@amzn_request_id
def create_medical_vocabulary(self):
vocabulary_name = self._get_param("VocabularyName")
language_code = self._get_param("LanguageCode")
vocabulary_file_uri = self._get_param("VocabularyFileUri")
response = self.transcribe_backend.create_medical_vocabulary(
vocabulary_name=vocabulary_name,
language_code=language_code,
vocabulary_file_uri=vocabulary_file_uri,
)
return json.dumps(response)
@amzn_request_id
def get_vocabulary(self):
vocabulary_name = self._get_param("VocabularyName")
response = self.transcribe_backend.get_vocabulary(
vocabulary_name=vocabulary_name
)
return json.dumps(response)
@amzn_request_id
def get_medical_vocabulary(self):
vocabulary_name = self._get_param("VocabularyName")
response = self.transcribe_backend.get_medical_vocabulary(
vocabulary_name=vocabulary_name
)
return json.dumps(response)
@amzn_request_id
def list_vocabularies(self):
state_equals = self._get_param("StateEquals")
name_contains = self._get_param("NameContains")
next_token = self._get_param("NextToken")
max_results = self._get_param("MaxResults")
response = self.transcribe_backend.list_vocabularies(
state_equals=state_equals,
name_contains=name_contains,
next_token=next_token,
max_results=max_results,
)
return json.dumps(response)
@amzn_request_id
def list_medical_vocabularies(self):
state_equals = self._get_param("StateEquals")
name_contains = self._get_param("NameContains")
next_token = self._get_param("NextToken")
max_results = self._get_param("MaxResults")
response = self.transcribe_backend.list_medical_vocabularies(
state_equals=state_equals,
name_contains=name_contains,
next_token=next_token,
max_results=max_results,
)
return json.dumps(response)
@amzn_request_id
def delete_vocabulary(self):
vocabulary_name = self._get_param("VocabularyName")
response = self.transcribe_backend.delete_vocabulary(
vocabulary_name=vocabulary_name
)
return json.dumps(response)
@amzn_request_id
def delete_medical_vocabulary(self):
vocabulary_name = self._get_param("VocabularyName")
response = self.transcribe_backend.delete_medical_vocabulary(
vocabulary_name=vocabulary_name
)
return json.dumps(response)
| spulec/moto | moto/transcribe/responses.py | Python | apache-2.0 | 7,998 | 0.0005 |
# -*- coding: utf-8 -*-
#
# (c) Copyright 2003-2007 Hewlett-Packard Development Company, L.P.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# Author: Don Welch
#
# RFC 1035
# Std Lib
import sys
import time
import socket
import select
import struct
import random
import re
import cStringIO
# Local
from g import *
import utils
MAX_ANSWERS_PER_PACKET = 24
QTYPE_A = 1
QTYPE_TXT = 16
QTYPE_SRV = 33
QTYPE_AAAA = 28
QTYPE_PTR = 12
QCLASS_IN = 1
def read_utf8(offset, data, l):
return offset+l, data[offset:offset+l].decode('utf-8')
def read_data(offset, data, l):
return offset+l, data[offset:offset+l]
def read_data_unpack(offset, data, fmt):
l = struct.calcsize(fmt)
return offset+l, struct.unpack(fmt, data[offset:offset+l])
def read_name(offset, data):
result = ''
off = offset
next = -1
first = off
while True:
l = ord(data[off])
off += 1
if l == 0:
break
t = l & 0xC0
if t == 0x00:
off, utf8 = read_utf8(off, data, l)
result = ''.join([result, utf8, '.'])
elif t == 0xC0:
if next < 0:
next = off + 1
off = ((l & 0x3F) << 8) | ord(data[off])
if off >= first:
log.error("Bad domain name (circular) at 0x%04x" % off)
break
first = off
else:
log.error("Bad domain name at 0x%04x" % off)
break
if next >= 0:
offset = next
else:
offset = off
return offset, result
def write_name(packet, name):
for p in name.split('.'):
utf8_string = p.encode('utf-8')
packet.write(struct.pack('!B', len(utf8_string)))
packet.write(utf8_string)
def create_outgoing_packets(answers):
index = 0
num_questions = 1
first_packet = True
packets = []
packet = cStringIO.StringIO()
answer_record = cStringIO.StringIO()
while True:
packet.seek(0)
packet.truncate()
num_answers = len(answers[index:index+MAX_ANSWERS_PER_PACKET])
if num_answers == 0 and num_questions == 0:
break
flags = 0x0200 # truncated
if len(answers) - index <= MAX_ANSWERS_PER_PACKET:
flags = 0x0000 # not truncated
# ID/FLAGS/QDCOUNT/ANCOUNT/NSCOUNT/ARCOUNT
packet.write(struct.pack("!HHHHHH", 0x0000, flags, num_questions, num_answers, 0x0000, 0x0000))
if num_questions:
# QNAME
write_name(packet, "_pdl-datastream._tcp.local") # QNAME
packet.write(struct.pack("!B", 0x00))
# QTYPE/QCLASS
packet.write(struct.pack("!HH", QTYPE_PTR, QCLASS_IN))
first_record = True
for d in answers[index:index+MAX_ANSWERS_PER_PACKET]:
answer_record.seek(0)
answer_record.truncate()
# NAME
if not first_packet and first_record:
first_record = False
write_name(answer_record, "_pdl-datastream._tcp.local")
answer_record.write(struct.pack("!B", 0x00))
else:
answer_record.write(struct.pack("!H", 0xc00c)) # Pointer
# TYPE/CLASS
answer_record.write(struct.pack("!HH", QTYPE_PTR, QCLASS_IN))
# TTL
answer_record.write(struct.pack("!I", 0xffff))
rdlength_pos = answer_record.tell()
# RDLENGTH
answer_record.write(struct.pack("!H", 0x0000)) # (adj later)
# RDATA
write_name(answer_record, d)
answer_record.write(struct.pack("!H", 0xc00c)) # Ptr
# RDLENGTH
rdlength = answer_record.tell() - rdlength_pos - 2
answer_record.seek(rdlength_pos)
answer_record.write(struct.pack("!H", rdlength))
answer_record.seek(0)
packet.write(answer_record.read())
packets.append(packet.getvalue())
index += 20
if first_packet:
num_questions = 0
first_packet = False
return packets
def detectNetworkDevices(ttl=4, timeout=10):
mcast_addr, mcast_port ='224.0.0.251', 5353
found_devices = {}
answers = []
try:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
x = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
x.connect(('1.2.3.4', 56))
intf = x.getsockname()[0]
x.close()
s.setblocking(0)
ttl = struct.pack('B', ttl)
except socket.error:
log.error("Network error")
return {}
try:
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
except (AttributeError, socket.error):
pass
try:
s.setsockopt(socket.SOL_IP, socket.IP_MULTICAST_TTL, ttl)
s.setsockopt(socket.SOL_IP, socket.IP_MULTICAST_IF, socket.inet_aton(intf) + socket.inet_aton('0.0.0.0'))
s.setsockopt(socket.SOL_IP, socket.IP_MULTICAST_LOOP ,1)
except Exception, e:
log.error("Unable to setup multicast socket for mDNS: %s" % e)
return {}
now = time.time()
next = now
last = now + timeout
delay = 1
while True:
now = time.time()
if now > last:
break
if now >= next:
try:
for p in create_outgoing_packets(answers):
log.debug("Outgoing: (%d)" % len(p))
log.log_data(p, width=16)
s.sendto(p, 0, (mcast_addr, mcast_port))
except socket.error, e:
log.error("Unable to send broadcast DNS packet: %s" % e)
next += delay
delay *= 2
update_spinner()
r, w, e = select.select([s], [], [s], 0.5)
if not r:
continue
data, addr = s.recvfrom(16384)
if data:
update_spinner()
y = {'num_devices' : 1, 'num_ports': 1, 'product_id' : '', 'mac': '',
'status_code': 0, 'device2': '0', 'device3': '0', 'note': ''}
log.debug("Incoming: (%d)" % len(data))
log.log_data(data, width=16)
offset = 0
offset, (id, flags, num_questions, num_answers, num_authorities, num_additionals) = \
read_data_unpack(offset, data, "!HHHHHH")
log.debug("Response: ID=%d FLAGS=0x%x Q=%d A=%d AUTH=%d ADD=%d" %
(id, flags, num_questions, num_answers, num_authorities, num_additionals))
for question in range(num_questions):
update_spinner()
offset, name = read_name(offset, data)
offset, (typ, cls) = read_data_unpack(offset, data, "!HH")
log.debug("Q: %s TYPE=%d CLASS=%d" % (name, typ, cls))
fmt = '!HHiH'
for record in range(num_answers + num_authorities + num_additionals):
update_spinner()
offset, name = read_name(offset, data)
offset, info = read_data_unpack(offset, data, "!HHiH")
if info[0] == QTYPE_A: # ipv4 address
offset, result = read_data(offset, data, 4)
ip = '.'.join([str(ord(x)) for x in result])
log.debug("A: %s" % ip)
y['ip'] = ip
elif info[0] == QTYPE_PTR: # PTR
offset, name = read_name(offset, data)
log.debug("PTR: %s" % name)
y['mdns'] = name
answers.append(name.replace("._pdl-datastream._tcp.local.", ""))
elif info[0] == QTYPE_TXT:
offset, name = read_data(offset, data, info[3])
txt, off = {}, 0
while off < len(name):
l = ord(name[off])
off += 1
result = name[off:off+l]
try:
key, value = result.split('=')
txt[key] = value
except ValueError:
pass
off += l
log.debug("TXT: %s" % repr(txt))
try:
y['device1'] = "MFG:Hewlett-Packard;MDL:%s;CLS:PRINTER;" % txt['ty']
except KeyError:
log.debug("NO ty Key in txt: %s" % repr(txt))
if 'note' in txt:
y['note'] = txt['note']
elif info[0] == QTYPE_SRV:
offset, (priority, weight, port) = read_data_unpack(offset, data, "!HHH")
ttl = info[3]
offset, server = read_name(offset, data)
log.debug("SRV: %s TTL=%d PRI=%d WT=%d PORT=%d" % (server, ttl, priority, weight, port))
y['hn'] = server.replace('.local.', '')
elif info[0] == QTYPE_AAAA: # ipv6 address
offset, result = read_data(offset, data, 16)
log.debug("AAAA: %s" % repr(result))
else:
log.error("Unknown DNS record type (%d)." % info[0])
break
found_devices[y['ip']] = y
log.debug("Found %d devices" % len(found_devices))
return found_devices
| Alberto-Beralix/Beralix | i386-squashfs-root/usr/share/hplip/base/mdns.py | Python | gpl-3.0 | 10,078 | 0.003671 |
def say_hello():
print 'Hello'
if __name__ == '__main__':
say_hello()
| MagicForest/Python | src/training/Core2/Chapter14ExecutionEnvironment/hello.py | Python | apache-2.0 | 88 | 0.022727 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from config.template_middleware import TemplateResponse
from gaecookie.decorator import no_csrf
from gaepermission.decorator import login_not_required
@login_not_required
@no_csrf
def index():
return TemplateResponse()
def insertStudent():
return TemplateResponse(template_path="/student/insert_student.html")
def searchStudent():
return TemplateResponse(template_path="/student/search_student.html") | SamaraCardoso27/eMakeup | backend/appengine/routes/home.py | Python | mit | 492 | 0.010163 |
import functools
import registration.views
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.conf import settings
import forms
def register(request):
'''Registration page for SSL auth without CA'''
next = request.GET.get(REDIRECT_FIELD_NAME, settings.LOGIN_REDIRECT_URL)
return registration.views.register(request, success_url=next,
form_class=functools.partial(forms.RegistrationForm,
request=request))
| adieu/authentic2 | authentic2/auth2_auth/auth2_ssl/views.py | Python | agpl-3.0 | 462 | 0.008658 |
# -*- coding: utf-8 -*-
'''Twisted logging to Python loggin bridge.'''
'''
Kontalk Pyserver
Copyright (C) 2011 Kontalk Devteam <devteam@kontalk.org>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
from twisted.python import log
LEVEL_DEBUG = 1
LEVEL_INFO = 1 << 1
LEVEL_WARN = 1 << 2
LEVEL_ERROR = 1 << 3
# all levels
LEVEL_ALL = LEVEL_DEBUG | LEVEL_INFO | LEVEL_WARN | LEVEL_ERROR
level = 0
def init(cfg):
'''Initializes logging system.'''
global level
l = cfg['server']['log.levels']
if 'ALL' in l:
level = LEVEL_ALL
else:
if 'DEBUG' in l:
level |= LEVEL_DEBUG
if 'INFO' in l:
level |= LEVEL_INFO
if 'WARN' in l:
level |= LEVEL_WARN
if 'ERROR' in l:
level |= LEVEL_ERROR
def debug(*args, **kwargs):
global level
if level & LEVEL_DEBUG:
log.msg(*args, **kwargs)
def info(*args, **kwargs):
global level
if level & LEVEL_INFO:
log.msg(*args, **kwargs)
def warn(*args, **kwargs):
global level
if level & LEVEL_WARN:
log.msg(*args, **kwargs)
def error(*args, **kwargs):
global level
if level & LEVEL_ERROR:
log.msg(*args, **kwargs)
| cgvarela/pyserverlib | kontalklib/logging.py | Python | gpl-3.0 | 1,793 | 0.003904 |
import unittest
import pickle
import pickletools
import copyreg
from test.support import TestFailed, TESTFN, run_with_locale
from pickle import bytes_types
# Tests that try a number of pickle protocols should have a
# for proto in protocols:
# kind of outer loop.
protocols = range(pickle.HIGHEST_PROTOCOL + 1)
# Return True if opcode code appears in the pickle, else False.
def opcode_in_pickle(code, pickle):
for op, dummy, dummy in pickletools.genops(pickle):
if op.code == code.decode("latin-1"):
return True
return False
# Return the number of times opcode code appears in pickle.
def count_opcode(code, pickle):
n = 0
for op, dummy, dummy in pickletools.genops(pickle):
if op.code == code.decode("latin-1"):
n += 1
return n
# We can't very well test the extension registry without putting known stuff
# in it, but we have to be careful to restore its original state. Code
# should do this:
#
# e = ExtensionSaver(extension_code)
# try:
# fiddle w/ the extension registry's stuff for extension_code
# finally:
# e.restore()
class ExtensionSaver:
# Remember current registration for code (if any), and remove it (if
# there is one).
def __init__(self, code):
self.code = code
if code in copyreg._inverted_registry:
self.pair = copyreg._inverted_registry[code]
copyreg.remove_extension(self.pair[0], self.pair[1], code)
else:
self.pair = None
# Restore previous registration for code.
def restore(self):
code = self.code
curpair = copyreg._inverted_registry.get(code)
if curpair is not None:
copyreg.remove_extension(curpair[0], curpair[1], code)
pair = self.pair
if pair is not None:
copyreg.add_extension(pair[0], pair[1], code)
class C:
def __eq__(self, other):
return self.__dict__ == other.__dict__
import __main__
__main__.C = C
C.__module__ = "__main__"
class myint(int):
def __init__(self, x):
self.str = str(x)
class initarg(C):
def __init__(self, a, b):
self.a = a
self.b = b
def __getinitargs__(self):
return self.a, self.b
class metaclass(type):
pass
class use_metaclass(object, metaclass=metaclass):
pass
# DATA0 .. DATA2 are the pickles we expect under the various protocols, for
# the object returned by create_data().
DATA0 = (
b'(lp0\nL0L\naL1L\naF2.0\nac'
b'builtins\ncomplex\n'
b'p1\n(F3.0\nF0.0\ntp2\nRp'
b'3\naL1L\naL-1L\naL255L\naL-'
b'255L\naL-256L\naL65535L\na'
b'L-65535L\naL-65536L\naL2'
b'147483647L\naL-2147483'
b'647L\naL-2147483648L\na('
b'Vabc\np4\ng4\nccopyreg'
b'\n_reconstructor\np5\n('
b'c__main__\nC\np6\ncbu'
b'iltins\nobject\np7\nNt'
b'p8\nRp9\n(dp10\nVfoo\np1'
b'1\nL1L\nsVbar\np12\nL2L\nsb'
b'g9\ntp13\nag13\naL5L\na.'
)
# Disassembly of DATA0
DATA0_DIS = """\
0: ( MARK
1: l LIST (MARK at 0)
2: p PUT 0
5: L LONG 0
9: a APPEND
10: L LONG 1
14: a APPEND
15: F FLOAT 2.0
20: a APPEND
21: c GLOBAL 'builtins complex'
39: p PUT 1
42: ( MARK
43: F FLOAT 3.0
48: F FLOAT 0.0
53: t TUPLE (MARK at 42)
54: p PUT 2
57: R REDUCE
58: p PUT 3
61: a APPEND
62: L LONG 1
66: a APPEND
67: L LONG -1
72: a APPEND
73: L LONG 255
79: a APPEND
80: L LONG -255
87: a APPEND
88: L LONG -256
95: a APPEND
96: L LONG 65535
104: a APPEND
105: L LONG -65535
114: a APPEND
115: L LONG -65536
124: a APPEND
125: L LONG 2147483647
138: a APPEND
139: L LONG -2147483647
153: a APPEND
154: L LONG -2147483648
168: a APPEND
169: ( MARK
170: V UNICODE 'abc'
175: p PUT 4
178: g GET 4
181: c GLOBAL 'copyreg _reconstructor'
205: p PUT 5
208: ( MARK
209: c GLOBAL '__main__ C'
221: p PUT 6
224: c GLOBAL 'builtins object'
241: p PUT 7
244: N NONE
245: t TUPLE (MARK at 208)
246: p PUT 8
249: R REDUCE
250: p PUT 9
253: ( MARK
254: d DICT (MARK at 253)
255: p PUT 10
259: V UNICODE 'foo'
264: p PUT 11
268: L LONG 1
272: s SETITEM
273: V UNICODE 'bar'
278: p PUT 12
282: L LONG 2
286: s SETITEM
287: b BUILD
288: g GET 9
291: t TUPLE (MARK at 169)
292: p PUT 13
296: a APPEND
297: g GET 13
301: a APPEND
302: L LONG 5
306: a APPEND
307: . STOP
highest protocol among opcodes = 0
"""
DATA1 = (
b']q\x00(K\x00K\x01G@\x00\x00\x00\x00\x00\x00\x00c'
b'builtins\ncomplex\nq\x01'
b'(G@\x08\x00\x00\x00\x00\x00\x00G\x00\x00\x00\x00\x00\x00\x00\x00t'
b'q\x02Rq\x03K\x01J\xff\xff\xff\xffK\xffJ\x01\xff\xff\xffJ'
b'\x00\xff\xff\xffM\xff\xffJ\x01\x00\xff\xffJ\x00\x00\xff\xffJ\xff\xff'
b'\xff\x7fJ\x01\x00\x00\x80J\x00\x00\x00\x80(X\x03\x00\x00\x00ab'
b'cq\x04h\x04ccopyreg\n_reco'
b'nstructor\nq\x05(c__main'
b'__\nC\nq\x06cbuiltins\n'
b'object\nq\x07Ntq\x08Rq\t}q\n('
b'X\x03\x00\x00\x00fooq\x0bK\x01X\x03\x00\x00\x00bar'
b'q\x0cK\x02ubh\ttq\rh\rK\x05e.'
)
# Disassembly of DATA1
DATA1_DIS = """\
0: ] EMPTY_LIST
1: q BINPUT 0
3: ( MARK
4: K BININT1 0
6: K BININT1 1
8: G BINFLOAT 2.0
17: c GLOBAL 'builtins complex'
35: q BINPUT 1
37: ( MARK
38: G BINFLOAT 3.0
47: G BINFLOAT 0.0
56: t TUPLE (MARK at 37)
57: q BINPUT 2
59: R REDUCE
60: q BINPUT 3
62: K BININT1 1
64: J BININT -1
69: K BININT1 255
71: J BININT -255
76: J BININT -256
81: M BININT2 65535
84: J BININT -65535
89: J BININT -65536
94: J BININT 2147483647
99: J BININT -2147483647
104: J BININT -2147483648
109: ( MARK
110: X BINUNICODE 'abc'
118: q BINPUT 4
120: h BINGET 4
122: c GLOBAL 'copyreg _reconstructor'
146: q BINPUT 5
148: ( MARK
149: c GLOBAL '__main__ C'
161: q BINPUT 6
163: c GLOBAL 'builtins object'
180: q BINPUT 7
182: N NONE
183: t TUPLE (MARK at 148)
184: q BINPUT 8
186: R REDUCE
187: q BINPUT 9
189: } EMPTY_DICT
190: q BINPUT 10
192: ( MARK
193: X BINUNICODE 'foo'
201: q BINPUT 11
203: K BININT1 1
205: X BINUNICODE 'bar'
213: q BINPUT 12
215: K BININT1 2
217: u SETITEMS (MARK at 192)
218: b BUILD
219: h BINGET 9
221: t TUPLE (MARK at 109)
222: q BINPUT 13
224: h BINGET 13
226: K BININT1 5
228: e APPENDS (MARK at 3)
229: . STOP
highest protocol among opcodes = 1
"""
DATA2 = (
b'\x80\x02]q\x00(K\x00K\x01G@\x00\x00\x00\x00\x00\x00\x00c'
b'builtins\ncomplex\n'
b'q\x01G@\x08\x00\x00\x00\x00\x00\x00G\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x86q\x02Rq\x03K\x01J\xff\xff\xff\xffK\xffJ\x01\xff\xff\xff'
b'J\x00\xff\xff\xffM\xff\xffJ\x01\x00\xff\xffJ\x00\x00\xff\xffJ\xff'
b'\xff\xff\x7fJ\x01\x00\x00\x80J\x00\x00\x00\x80(X\x03\x00\x00\x00a'
b'bcq\x04h\x04c__main__\nC\nq\x05'
b')\x81q\x06}q\x07(X\x03\x00\x00\x00fooq\x08K\x01'
b'X\x03\x00\x00\x00barq\tK\x02ubh\x06tq\nh'
b'\nK\x05e.'
)
# Disassembly of DATA2
DATA2_DIS = """\
0: \x80 PROTO 2
2: ] EMPTY_LIST
3: q BINPUT 0
5: ( MARK
6: K BININT1 0
8: K BININT1 1
10: G BINFLOAT 2.0
19: c GLOBAL 'builtins complex'
37: q BINPUT 1
39: G BINFLOAT 3.0
48: G BINFLOAT 0.0
57: \x86 TUPLE2
58: q BINPUT 2
60: R REDUCE
61: q BINPUT 3
63: K BININT1 1
65: J BININT -1
70: K BININT1 255
72: J BININT -255
77: J BININT -256
82: M BININT2 65535
85: J BININT -65535
90: J BININT -65536
95: J BININT 2147483647
100: J BININT -2147483647
105: J BININT -2147483648
110: ( MARK
111: X BINUNICODE 'abc'
119: q BINPUT 4
121: h BINGET 4
123: c GLOBAL '__main__ C'
135: q BINPUT 5
137: ) EMPTY_TUPLE
138: \x81 NEWOBJ
139: q BINPUT 6
141: } EMPTY_DICT
142: q BINPUT 7
144: ( MARK
145: X BINUNICODE 'foo'
153: q BINPUT 8
155: K BININT1 1
157: X BINUNICODE 'bar'
165: q BINPUT 9
167: K BININT1 2
169: u SETITEMS (MARK at 144)
170: b BUILD
171: h BINGET 6
173: t TUPLE (MARK at 110)
174: q BINPUT 10
176: h BINGET 10
178: K BININT1 5
180: e APPENDS (MARK at 5)
181: . STOP
highest protocol among opcodes = 2
"""
def create_data():
c = C()
c.foo = 1
c.bar = 2
x = [0, 1, 2.0, 3.0+0j]
# Append some integer test cases at cPickle.c's internal size
# cutoffs.
uint1max = 0xff
uint2max = 0xffff
int4max = 0x7fffffff
x.extend([1, -1,
uint1max, -uint1max, -uint1max-1,
uint2max, -uint2max, -uint2max-1,
int4max, -int4max, -int4max-1])
y = ('abc', 'abc', c, c)
x.append(y)
x.append(y)
x.append(5)
return x
class AbstractPickleTests(unittest.TestCase):
# Subclass must define self.dumps, self.loads.
_testdata = create_data()
def setUp(self):
pass
def test_misc(self):
# test various datatypes not tested by testdata
for proto in protocols:
x = myint(4)
s = self.dumps(x, proto)
y = self.loads(s)
self.assertEqual(x, y)
x = (1, ())
s = self.dumps(x, proto)
y = self.loads(s)
self.assertEqual(x, y)
x = initarg(1, x)
s = self.dumps(x, proto)
y = self.loads(s)
self.assertEqual(x, y)
# XXX test __reduce__ protocol?
def test_roundtrip_equality(self):
expected = self._testdata
for proto in protocols:
s = self.dumps(expected, proto)
got = self.loads(s)
self.assertEqual(expected, got)
def test_load_from_data0(self):
self.assertEqual(self._testdata, self.loads(DATA0))
def test_load_from_data1(self):
self.assertEqual(self._testdata, self.loads(DATA1))
def test_load_from_data2(self):
self.assertEqual(self._testdata, self.loads(DATA2))
# There are gratuitous differences between pickles produced by
# pickle and cPickle, largely because cPickle starts PUT indices at
# 1 and pickle starts them at 0. See XXX comment in cPickle's put2() --
# there's a comment with an exclamation point there whose meaning
# is a mystery. cPickle also suppresses PUT for objects with a refcount
# of 1.
def dont_test_disassembly(self):
from io import StringIO
from pickletools import dis
for proto, expected in (0, DATA0_DIS), (1, DATA1_DIS):
s = self.dumps(self._testdata, proto)
filelike = StringIO()
dis(s, out=filelike)
got = filelike.getvalue()
self.assertEqual(expected, got)
def test_recursive_list(self):
l = []
l.append(l)
for proto in protocols:
s = self.dumps(l, proto)
x = self.loads(s)
self.assertEqual(len(x), 1)
self.assert_(x is x[0])
def test_recursive_dict(self):
d = {}
d[1] = d
for proto in protocols:
s = self.dumps(d, proto)
x = self.loads(s)
self.assertEqual(list(x.keys()), [1])
self.assert_(x[1] is x)
def test_recursive_inst(self):
i = C()
i.attr = i
for proto in protocols:
s = self.dumps(i, 2)
x = self.loads(s)
self.assertEqual(dir(x), dir(i))
self.assert_(x.attr is x)
def test_recursive_multi(self):
l = []
d = {1:l}
i = C()
i.attr = d
l.append(i)
for proto in protocols:
s = self.dumps(l, proto)
x = self.loads(s)
self.assertEqual(len(x), 1)
self.assertEqual(dir(x[0]), dir(i))
self.assertEqual(list(x[0].attr.keys()), [1])
self.assert_(x[0].attr[1] is x)
def test_get(self):
self.assertRaises(KeyError, self.loads, b'g0\np0')
self.assertEquals(self.loads(b'((Kdtp0\nh\x00l.))'), [(100,), (100,)])
def test_insecure_strings(self):
# XXX Some of these tests are temporarily disabled
insecure = [b"abc", b"2 + 2", # not quoted
## b"'abc' + 'def'", # not a single quoted string
b"'abc", # quote is not closed
b"'abc\"", # open quote and close quote don't match
b"'abc' ?", # junk after close quote
b"'\\'", # trailing backslash
# some tests of the quoting rules
## b"'abc\"\''",
## b"'\\\\a\'\'\'\\\'\\\\\''",
]
for b in insecure:
buf = b"S" + b + b"\012p0\012."
self.assertRaises(ValueError, self.loads, buf)
def test_unicode(self):
endcases = ['', '<\\u>', '<\\\u1234>', '<\n>', '<\\>',
'<\\\U00012345>']
for proto in protocols:
for u in endcases:
p = self.dumps(u, proto)
u2 = self.loads(p)
self.assertEqual(u2, u)
def test_unicode_high_plane(self):
t = '\U00012345'
for proto in protocols:
p = self.dumps(t, proto)
t2 = self.loads(p)
self.assertEqual(t2, t)
def test_bytes(self):
for proto in protocols:
for u in b'', b'xyz', b'xyz'*100:
p = self.dumps(u)
self.assertEqual(self.loads(p), u)
def test_ints(self):
import sys
for proto in protocols:
n = sys.maxsize
while n:
for expected in (-n, n):
s = self.dumps(expected, proto)
n2 = self.loads(s)
self.assertEqual(expected, n2)
n = n >> 1
def test_maxint64(self):
maxint64 = (1 << 63) - 1
data = b'I' + str(maxint64).encode("ascii") + b'\n.'
got = self.loads(data)
self.assertEqual(got, maxint64)
# Try too with a bogus literal.
data = b'I' + str(maxint64).encode("ascii") + b'JUNK\n.'
self.assertRaises(ValueError, self.loads, data)
def test_long(self):
for proto in protocols:
# 256 bytes is where LONG4 begins.
for nbits in 1, 8, 8*254, 8*255, 8*256, 8*257:
nbase = 1 << nbits
for npos in nbase-1, nbase, nbase+1:
for n in npos, -npos:
pickle = self.dumps(n, proto)
got = self.loads(pickle)
self.assertEqual(n, got)
# Try a monster. This is quadratic-time in protos 0 & 1, so don't
# bother with those.
nbase = int("deadbeeffeedface", 16)
nbase += nbase << 1000000
for n in nbase, -nbase:
p = self.dumps(n, 2)
got = self.loads(p)
self.assertEqual(n, got)
def test_float(self):
test_values = [0.0, 4.94e-324, 1e-310, 7e-308, 6.626e-34, 0.1, 0.5,
3.14, 263.44582062374053, 6.022e23, 1e30]
test_values = test_values + [-x for x in test_values]
for proto in protocols:
for value in test_values:
pickle = self.dumps(value, proto)
got = self.loads(pickle)
self.assertEqual(value, got)
@run_with_locale('LC_ALL', 'de_DE', 'fr_FR')
def test_float_format(self):
# make sure that floats are formatted locale independent with proto 0
self.assertEqual(self.dumps(1.2, 0)[0:3], b'F1.')
def test_reduce(self):
pass
def test_getinitargs(self):
pass
def test_metaclass(self):
a = use_metaclass()
for proto in protocols:
s = self.dumps(a, proto)
b = self.loads(s)
self.assertEqual(a.__class__, b.__class__)
def test_structseq(self):
import time
import os
t = time.localtime()
for proto in protocols:
s = self.dumps(t, proto)
u = self.loads(s)
self.assertEqual(t, u)
if hasattr(os, "stat"):
t = os.stat(os.curdir)
s = self.dumps(t, proto)
u = self.loads(s)
self.assertEqual(t, u)
if hasattr(os, "statvfs"):
t = os.statvfs(os.curdir)
s = self.dumps(t, proto)
u = self.loads(s)
self.assertEqual(t, u)
# Tests for protocol 2
def test_proto(self):
build_none = pickle.NONE + pickle.STOP
for proto in protocols:
expected = build_none
if proto >= 2:
expected = pickle.PROTO + bytes([proto]) + expected
p = self.dumps(None, proto)
self.assertEqual(p, expected)
oob = protocols[-1] + 1 # a future protocol
badpickle = pickle.PROTO + bytes([oob]) + build_none
try:
self.loads(badpickle)
except ValueError as detail:
self.failUnless(str(detail).startswith(
"unsupported pickle protocol"))
else:
self.fail("expected bad protocol number to raise ValueError")
def test_long1(self):
x = 12345678910111213141516178920
for proto in protocols:
s = self.dumps(x, proto)
y = self.loads(s)
self.assertEqual(x, y)
self.assertEqual(opcode_in_pickle(pickle.LONG1, s), proto >= 2)
def test_long4(self):
x = 12345678910111213141516178920 << (256*8)
for proto in protocols:
s = self.dumps(x, proto)
y = self.loads(s)
self.assertEqual(x, y)
self.assertEqual(opcode_in_pickle(pickle.LONG4, s), proto >= 2)
def test_short_tuples(self):
# Map (proto, len(tuple)) to expected opcode.
expected_opcode = {(0, 0): pickle.TUPLE,
(0, 1): pickle.TUPLE,
(0, 2): pickle.TUPLE,
(0, 3): pickle.TUPLE,
(0, 4): pickle.TUPLE,
(1, 0): pickle.EMPTY_TUPLE,
(1, 1): pickle.TUPLE,
(1, 2): pickle.TUPLE,
(1, 3): pickle.TUPLE,
(1, 4): pickle.TUPLE,
(2, 0): pickle.EMPTY_TUPLE,
(2, 1): pickle.TUPLE1,
(2, 2): pickle.TUPLE2,
(2, 3): pickle.TUPLE3,
(2, 4): pickle.TUPLE,
(3, 0): pickle.EMPTY_TUPLE,
(3, 1): pickle.TUPLE1,
(3, 2): pickle.TUPLE2,
(3, 3): pickle.TUPLE3,
(3, 4): pickle.TUPLE,
}
a = ()
b = (1,)
c = (1, 2)
d = (1, 2, 3)
e = (1, 2, 3, 4)
for proto in protocols:
for x in a, b, c, d, e:
s = self.dumps(x, proto)
y = self.loads(s)
self.assertEqual(x, y, (proto, x, s, y))
expected = expected_opcode[proto, len(x)]
self.assertEqual(opcode_in_pickle(expected, s), True)
def test_singletons(self):
# Map (proto, singleton) to expected opcode.
expected_opcode = {(0, None): pickle.NONE,
(1, None): pickle.NONE,
(2, None): pickle.NONE,
(3, None): pickle.NONE,
(0, True): pickle.INT,
(1, True): pickle.INT,
(2, True): pickle.NEWTRUE,
(3, True): pickle.NEWTRUE,
(0, False): pickle.INT,
(1, False): pickle.INT,
(2, False): pickle.NEWFALSE,
(3, False): pickle.NEWFALSE,
}
for proto in protocols:
for x in None, False, True:
s = self.dumps(x, proto)
y = self.loads(s)
self.assert_(x is y, (proto, x, s, y))
expected = expected_opcode[proto, x]
self.assertEqual(opcode_in_pickle(expected, s), True)
def test_newobj_tuple(self):
x = MyTuple([1, 2, 3])
x.foo = 42
x.bar = "hello"
for proto in protocols:
s = self.dumps(x, proto)
y = self.loads(s)
self.assertEqual(tuple(x), tuple(y))
self.assertEqual(x.__dict__, y.__dict__)
def test_newobj_list(self):
x = MyList([1, 2, 3])
x.foo = 42
x.bar = "hello"
for proto in protocols:
s = self.dumps(x, proto)
y = self.loads(s)
self.assertEqual(list(x), list(y))
self.assertEqual(x.__dict__, y.__dict__)
def test_newobj_generic(self):
for proto in protocols:
for C in myclasses:
B = C.__base__
x = C(C.sample)
x.foo = 42
s = self.dumps(x, proto)
y = self.loads(s)
detail = (proto, C, B, x, y, type(y))
self.assertEqual(B(x), B(y), detail)
self.assertEqual(x.__dict__, y.__dict__, detail)
# Register a type with copyreg, with extension code extcode. Pickle
# an object of that type. Check that the resulting pickle uses opcode
# (EXT[124]) under proto 2, and not in proto 1.
def produce_global_ext(self, extcode, opcode):
e = ExtensionSaver(extcode)
try:
copyreg.add_extension(__name__, "MyList", extcode)
x = MyList([1, 2, 3])
x.foo = 42
x.bar = "hello"
# Dump using protocol 1 for comparison.
s1 = self.dumps(x, 1)
self.assert_(__name__.encode("utf-8") in s1)
self.assert_(b"MyList" in s1)
self.assertEqual(opcode_in_pickle(opcode, s1), False)
y = self.loads(s1)
self.assertEqual(list(x), list(y))
self.assertEqual(x.__dict__, y.__dict__)
# Dump using protocol 2 for test.
s2 = self.dumps(x, 2)
self.assert_(__name__.encode("utf-8") not in s2)
self.assert_(b"MyList" not in s2)
self.assertEqual(opcode_in_pickle(opcode, s2), True, repr(s2))
y = self.loads(s2)
self.assertEqual(list(x), list(y))
self.assertEqual(x.__dict__, y.__dict__)
finally:
e.restore()
def test_global_ext1(self):
self.produce_global_ext(0x00000001, pickle.EXT1) # smallest EXT1 code
self.produce_global_ext(0x000000ff, pickle.EXT1) # largest EXT1 code
def test_global_ext2(self):
self.produce_global_ext(0x00000100, pickle.EXT2) # smallest EXT2 code
self.produce_global_ext(0x0000ffff, pickle.EXT2) # largest EXT2 code
self.produce_global_ext(0x0000abcd, pickle.EXT2) # check endianness
def test_global_ext4(self):
self.produce_global_ext(0x00010000, pickle.EXT4) # smallest EXT4 code
self.produce_global_ext(0x7fffffff, pickle.EXT4) # largest EXT4 code
self.produce_global_ext(0x12abcdef, pickle.EXT4) # check endianness
def test_list_chunking(self):
n = 10 # too small to chunk
x = list(range(n))
for proto in protocols:
s = self.dumps(x, proto)
y = self.loads(s)
self.assertEqual(x, y)
num_appends = count_opcode(pickle.APPENDS, s)
self.assertEqual(num_appends, proto > 0)
n = 2500 # expect at least two chunks when proto > 0
x = list(range(n))
for proto in protocols:
s = self.dumps(x, proto)
y = self.loads(s)
self.assertEqual(x, y)
num_appends = count_opcode(pickle.APPENDS, s)
if proto == 0:
self.assertEqual(num_appends, 0)
else:
self.failUnless(num_appends >= 2)
def test_dict_chunking(self):
n = 10 # too small to chunk
x = dict.fromkeys(range(n))
for proto in protocols:
s = self.dumps(x, proto)
assert isinstance(s, bytes_types)
y = self.loads(s)
self.assertEqual(x, y)
num_setitems = count_opcode(pickle.SETITEMS, s)
self.assertEqual(num_setitems, proto > 0)
n = 2500 # expect at least two chunks when proto > 0
x = dict.fromkeys(range(n))
for proto in protocols:
s = self.dumps(x, proto)
y = self.loads(s)
self.assertEqual(x, y)
num_setitems = count_opcode(pickle.SETITEMS, s)
if proto == 0:
self.assertEqual(num_setitems, 0)
else:
self.failUnless(num_setitems >= 2)
def test_simple_newobj(self):
x = object.__new__(SimpleNewObj) # avoid __init__
x.abc = 666
for proto in protocols:
s = self.dumps(x, proto)
self.assertEqual(opcode_in_pickle(pickle.NEWOBJ, s), proto >= 2)
y = self.loads(s) # will raise TypeError if __init__ called
self.assertEqual(y.abc, 666)
self.assertEqual(x.__dict__, y.__dict__)
def test_newobj_list_slots(self):
x = SlotList([1, 2, 3])
x.foo = 42
x.bar = "hello"
s = self.dumps(x, 2)
y = self.loads(s)
self.assertEqual(list(x), list(y))
self.assertEqual(x.__dict__, y.__dict__)
self.assertEqual(x.foo, y.foo)
self.assertEqual(x.bar, y.bar)
def test_reduce_overrides_default_reduce_ex(self):
for proto in 0, 1, 2:
x = REX_one()
self.assertEqual(x._reduce_called, 0)
s = self.dumps(x, proto)
self.assertEqual(x._reduce_called, 1)
y = self.loads(s)
self.assertEqual(y._reduce_called, 0)
def test_reduce_ex_called(self):
for proto in 0, 1, 2:
x = REX_two()
self.assertEqual(x._proto, None)
s = self.dumps(x, proto)
self.assertEqual(x._proto, proto)
y = self.loads(s)
self.assertEqual(y._proto, None)
def test_reduce_ex_overrides_reduce(self):
for proto in 0, 1, 2:
x = REX_three()
self.assertEqual(x._proto, None)
s = self.dumps(x, proto)
self.assertEqual(x._proto, proto)
y = self.loads(s)
self.assertEqual(y._proto, None)
def test_reduce_ex_calls_base(self):
for proto in 0, 1, 2:
x = REX_four()
self.assertEqual(x._proto, None)
s = self.dumps(x, proto)
self.assertEqual(x._proto, proto)
y = self.loads(s)
self.assertEqual(y._proto, proto)
def test_reduce_calls_base(self):
for proto in 0, 1, 2:
x = REX_five()
self.assertEqual(x._reduce_called, 0)
s = self.dumps(x, proto)
self.assertEqual(x._reduce_called, 1)
y = self.loads(s)
self.assertEqual(y._reduce_called, 1)
def test_bad_getattr(self):
x = BadGetattr()
for proto in 0, 1:
self.assertRaises(RuntimeError, self.dumps, x, proto)
# protocol 2 don't raise a RuntimeError.
d = self.dumps(x, 2)
self.assertRaises(RuntimeError, self.loads, d)
def test_reduce_bad_iterator(self):
# Issue4176: crash when 4th and 5th items of __reduce__()
# are not iterators
class C(object):
def __reduce__(self):
# 4th item is not an iterator
return list, (), None, [], None
class D(object):
def __reduce__(self):
# 5th item is not an iterator
return dict, (), None, None, []
# Protocol 0 is less strict and also accept iterables.
for proto in protocols:
try:
self.dumps(C(), proto)
except (pickle.PickleError):
pass
try:
self.dumps(D(), proto)
except (pickle.PickleError):
pass
# Test classes for reduce_ex
class REX_one(object):
_reduce_called = 0
def __reduce__(self):
self._reduce_called = 1
return REX_one, ()
# No __reduce_ex__ here, but inheriting it from object
class REX_two(object):
_proto = None
def __reduce_ex__(self, proto):
self._proto = proto
return REX_two, ()
# No __reduce__ here, but inheriting it from object
class REX_three(object):
_proto = None
def __reduce_ex__(self, proto):
self._proto = proto
return REX_two, ()
def __reduce__(self):
raise TestFailed("This __reduce__ shouldn't be called")
class REX_four(object):
_proto = None
def __reduce_ex__(self, proto):
self._proto = proto
return object.__reduce_ex__(self, proto)
# Calling base class method should succeed
class REX_five(object):
_reduce_called = 0
def __reduce__(self):
self._reduce_called = 1
return object.__reduce__(self)
# This one used to fail with infinite recursion
# Test classes for newobj
class MyInt(int):
sample = 1
class MyLong(int):
sample = 1
class MyFloat(float):
sample = 1.0
class MyComplex(complex):
sample = 1.0 + 0.0j
class MyStr(str):
sample = "hello"
class MyUnicode(str):
sample = "hello \u1234"
class MyTuple(tuple):
sample = (1, 2, 3)
class MyList(list):
sample = [1, 2, 3]
class MyDict(dict):
sample = {"a": 1, "b": 2}
myclasses = [MyInt, MyLong, MyFloat,
MyComplex,
MyStr, MyUnicode,
MyTuple, MyList, MyDict]
class SlotList(MyList):
__slots__ = ["foo"]
class SimpleNewObj(object):
def __init__(self, a, b, c):
# raise an error, to make sure this isn't called
raise TypeError("SimpleNewObj.__init__() didn't expect to get called")
class BadGetattr:
def __getattr__(self, key):
self.foo
class AbstractPickleModuleTests(unittest.TestCase):
def test_dump_closed_file(self):
import os
f = open(TESTFN, "wb")
try:
f.close()
self.assertRaises(ValueError, pickle.dump, 123, f)
finally:
os.remove(TESTFN)
def test_load_closed_file(self):
import os
f = open(TESTFN, "wb")
try:
f.close()
self.assertRaises(ValueError, pickle.dump, 123, f)
finally:
os.remove(TESTFN)
def test_highest_protocol(self):
# Of course this needs to be changed when HIGHEST_PROTOCOL changes.
self.assertEqual(pickle.HIGHEST_PROTOCOL, 3)
def test_callapi(self):
from io import BytesIO
f = BytesIO()
# With and without keyword arguments
pickle.dump(123, f, -1)
pickle.dump(123, file=f, protocol=-1)
pickle.dumps(123, -1)
pickle.dumps(123, protocol=-1)
pickle.Pickler(f, -1)
pickle.Pickler(f, protocol=-1)
def test_bad_init(self):
# Test issue3664 (pickle can segfault from a badly initialized Pickler).
from io import BytesIO
# Override initialization without calling __init__() of the superclass.
class BadPickler(pickle.Pickler):
def __init__(self): pass
class BadUnpickler(pickle.Unpickler):
def __init__(self): pass
self.assertRaises(pickle.PicklingError, BadPickler().dump, 0)
self.assertRaises(pickle.UnpicklingError, BadUnpickler().load)
def test_bad_input(self):
# Test issue4298
s = bytes([0x58, 0, 0, 0, 0x54])
self.assertRaises(EOFError, pickle.loads, s)
class AbstractPersistentPicklerTests(unittest.TestCase):
# This class defines persistent_id() and persistent_load()
# functions that should be used by the pickler. All even integers
# are pickled using persistent ids.
def persistent_id(self, object):
if isinstance(object, int) and object % 2 == 0:
self.id_count += 1
return str(object)
else:
return None
def persistent_load(self, oid):
self.load_count += 1
object = int(oid)
assert object % 2 == 0
return object
def test_persistence(self):
self.id_count = 0
self.load_count = 0
L = list(range(10))
self.assertEqual(self.loads(self.dumps(L)), L)
self.assertEqual(self.id_count, 5)
self.assertEqual(self.load_count, 5)
def test_bin_persistence(self):
self.id_count = 0
self.load_count = 0
L = list(range(10))
self.assertEqual(self.loads(self.dumps(L, 1)), L)
self.assertEqual(self.id_count, 5)
self.assertEqual(self.load_count, 5)
if __name__ == "__main__":
# Print some stuff that can be used to rewrite DATA{0,1,2}
from pickletools import dis
x = create_data()
for i in range(3):
p = pickle.dumps(x, i)
print("DATA{0} = (".format(i))
for j in range(0, len(p), 20):
b = bytes(p[j:j+20])
print(" {0!r}".format(b))
print(")")
print()
print("# Disassembly of DATA{0}".format(i))
print("DATA{0}_DIS = \"\"\"\\".format(i))
dis(p)
print("\"\"\"")
print()
| mancoast/CPythonPyc_test | fail/301_pickletester.py | Python | gpl-3.0 | 35,665 | 0.001514 |
from django.views.generic import TemplateView
# All todos view
class Home( TemplateView ):
# Set the view template
template_name = 'index.html' | pombredanne/todomvc-django | todo/views.py | Python | mit | 148 | 0.033784 |
# -*- coding: utf-8 -*-
# Copyright 2015 Antiun Ingeniería S.L. - Antonio Espinosa
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': "Manage model export profiles",
'category': 'Personalization',
'version': '10.0.1.0.0',
'depends': [
'web',
],
'data': [
'views/assets.xml',
'views/ir_exports.xml',
'views/ir_model.xml',
'views/ir_model_access.xml',
'views/res_groups.xml',
],
'qweb': [
"static/src/xml/base.xml",
],
'author': 'Tecnativa, '
'LasLabs, '
'Ursa Information Systems, '
'Odoo Community Association (OCA)',
'website': 'https://www.tecnativa.com',
'license': 'AGPL-3',
'installable': True,
'application': False,
}
| ovnicraft/server-tools | base_export_manager/__manifest__.py | Python | agpl-3.0 | 808 | 0 |
from __future__ import print_function
import numpy as np
h, l, c = np.loadtxt('data.csv', delimiter=',', usecols=(4, 5, 6), unpack=True)
N = 5
h = h[-N:]
l = l[-N:]
print("len(h)", len(h), "len(l)", len(l))
print("Close", c)
previousclose = c[-N -1: -1]
print("len(previousclose)", len(previousclose))
print("Previous close", previousclose)
truerange = np.maximum(h - l, h - previousclose, previousclose - l)
print("True range", truerange)
atr = np.zeros(N)
atr[0] = np.mean(truerange)
for i in range(1, N):
atr[i] = (N - 1) * atr[i - 1] + truerange[i]
atr[i] /= N
print("ATR", atr)
| moonbury/notebooks | github/Numpy/Chapter3/atr.py | Python | gpl-3.0 | 599 | 0.008347 |
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import string
import textwrap
import six
from six.moves.configparser import ConfigParser
from swift.common.utils import (
config_true_value, SWIFT_CONF_FILE, whataremyips, list_from_csv)
from swift.common.ring import Ring, RingData
from swift.common.utils import quorum_size
from swift.common.exceptions import RingValidationError
from pyeclib.ec_iface import ECDriver, ECDriverError, VALID_EC_TYPES
LEGACY_POLICY_NAME = 'Policy-0'
VALID_CHARS = '-' + string.ascii_letters + string.digits
DEFAULT_POLICY_TYPE = REPL_POLICY = 'replication'
EC_POLICY = 'erasure_coding'
DEFAULT_EC_OBJECT_SEGMENT_SIZE = 1048576
class BindPortsCache(object):
def __init__(self, swift_dir, bind_ip):
self.swift_dir = swift_dir
self.mtimes_by_ring_path = {}
self.portsets_by_ring_path = {}
self.my_ips = set(whataremyips(bind_ip))
def all_bind_ports_for_node(self):
"""
Given an iterable of IP addresses identifying a storage backend server,
return a set of all bind ports defined in all rings for this storage
backend server.
The caller is responsible for not calling this method (which performs
at least a stat on all ring files) too frequently.
"""
# NOTE: we don't worry about disappearing rings here because you can't
# ever delete a storage policy.
for policy in POLICIES:
# NOTE: we must NOT use policy.load_ring to load the ring. Users
# of this utility function will not need the actual ring data, just
# the bind ports.
#
# This is duplicated with Ring.__init__ just a bit...
serialized_path = os.path.join(self.swift_dir,
policy.ring_name + '.ring.gz')
try:
new_mtime = os.path.getmtime(serialized_path)
except OSError:
continue
old_mtime = self.mtimes_by_ring_path.get(serialized_path)
if not old_mtime or old_mtime != new_mtime:
self.portsets_by_ring_path[serialized_path] = set(
dev['port']
for dev in RingData.load(serialized_path,
metadata_only=True).devs
if dev and dev['ip'] in self.my_ips)
self.mtimes_by_ring_path[serialized_path] = new_mtime
# No "break" here so that the above line will update the
# mtimes_by_ring_path entry for any ring that changes, not just
# the first one we notice.
# Return the requested set of ports from our (now-freshened) cache
return six.moves.reduce(set.union,
self.portsets_by_ring_path.values(), set())
class PolicyError(ValueError):
def __init__(self, msg, index=None):
if index is not None:
msg += ', for index %r' % index
super(PolicyError, self).__init__(msg)
def _get_policy_string(base, policy_index):
if policy_index == 0 or policy_index is None:
return_string = base
else:
return_string = base + "-%d" % int(policy_index)
return return_string
def get_policy_string(base, policy_or_index):
"""
Helper function to construct a string from a base and the policy.
Used to encode the policy index into either a file name or a
directory name by various modules.
:param base: the base string
:param policy_or_index: StoragePolicy instance, or an index
(string or int), if None the legacy
storage Policy-0 is assumed.
:returns: base name with policy index added
:raises: PolicyError if no policy exists with the given policy_index
"""
if isinstance(policy_or_index, BaseStoragePolicy):
policy = policy_or_index
else:
policy = POLICIES.get_by_index(policy_or_index)
if policy is None:
raise PolicyError("Unknown policy", index=policy_or_index)
return _get_policy_string(base, int(policy))
def split_policy_string(policy_string):
"""
Helper function to convert a string representing a base and a
policy. Used to decode the policy from either a file name or
a directory name by various modules.
:param policy_string: base name with policy index added
:raises: PolicyError if given index does not map to a valid policy
:returns: a tuple, in the form (base, policy) where base is the base
string and policy is the StoragePolicy instance for the
index encoded in the policy_string.
"""
if '-' in policy_string:
base, policy_index = policy_string.rsplit('-', 1)
else:
base, policy_index = policy_string, None
policy = POLICIES.get_by_index(policy_index)
if get_policy_string(base, policy) != policy_string:
raise PolicyError("Unknown policy", index=policy_index)
return base, policy
class BaseStoragePolicy(object):
"""
Represents a storage policy. Not meant to be instantiated directly;
implement a derived subclasses (e.g. StoragePolicy, ECStoragePolicy, etc)
or use :func:`~swift.common.storage_policy.reload_storage_policies` to
load POLICIES from ``swift.conf``.
The object_ring property is lazy loaded once the service's ``swift_dir``
is known via :meth:`~StoragePolicyCollection.get_object_ring`, but it may
be over-ridden via object_ring kwarg at create time for testing or
actively loaded with :meth:`~StoragePolicy.load_ring`.
"""
policy_type_to_policy_cls = {}
def __init__(self, idx, name='', is_default=False, is_deprecated=False,
object_ring=None, aliases=''):
# do not allow BaseStoragePolicy class to be instantiated directly
if type(self) == BaseStoragePolicy:
raise TypeError("Can't instantiate BaseStoragePolicy directly")
# policy parameter validation
try:
self.idx = int(idx)
except ValueError:
raise PolicyError('Invalid index', idx)
if self.idx < 0:
raise PolicyError('Invalid index', idx)
self.alias_list = []
if not name or not self._validate_policy_name(name):
raise PolicyError('Invalid name %r' % name, idx)
self.alias_list.append(name)
if aliases:
names_list = list_from_csv(aliases)
for alias in names_list:
if alias == name:
continue
self._validate_policy_name(alias)
self.alias_list.append(alias)
self.is_deprecated = config_true_value(is_deprecated)
self.is_default = config_true_value(is_default)
if self.policy_type not in BaseStoragePolicy.policy_type_to_policy_cls:
raise PolicyError('Invalid type', self.policy_type)
if self.is_deprecated and self.is_default:
raise PolicyError('Deprecated policy can not be default. '
'Invalid config', self.idx)
self.ring_name = _get_policy_string('object', self.idx)
self.object_ring = object_ring
@property
def name(self):
return self.alias_list[0]
@name.setter
def name_setter(self, name):
self._validate_policy_name(name)
self.alias_list[0] = name
@property
def aliases(self):
return ", ".join(self.alias_list)
def __int__(self):
return self.idx
def __cmp__(self, other):
return cmp(self.idx, int(other))
def __repr__(self):
return ("%s(%d, %r, is_default=%s, "
"is_deprecated=%s, policy_type=%r)") % \
(self.__class__.__name__, self.idx, self.alias_list,
self.is_default, self.is_deprecated, self.policy_type)
@classmethod
def register(cls, policy_type):
"""
Decorator for Storage Policy implementations to register
their StoragePolicy class. This will also set the policy_type
attribute on the registered implementation.
"""
def register_wrapper(policy_cls):
if policy_type in cls.policy_type_to_policy_cls:
raise PolicyError(
'%r is already registered for the policy_type %r' % (
cls.policy_type_to_policy_cls[policy_type],
policy_type))
cls.policy_type_to_policy_cls[policy_type] = policy_cls
policy_cls.policy_type = policy_type
return policy_cls
return register_wrapper
@classmethod
def _config_options_map(cls):
"""
Map config option name to StoragePolicy parameter name.
"""
return {
'name': 'name',
'aliases': 'aliases',
'policy_type': 'policy_type',
'default': 'is_default',
'deprecated': 'is_deprecated',
}
@classmethod
def from_config(cls, policy_index, options):
config_to_policy_option_map = cls._config_options_map()
policy_options = {}
for config_option, value in options.items():
try:
policy_option = config_to_policy_option_map[config_option]
except KeyError:
raise PolicyError('Invalid option %r in '
'storage-policy section' % config_option,
index=policy_index)
policy_options[policy_option] = value
return cls(policy_index, **policy_options)
def get_info(self, config=False):
"""
Return the info dict and conf file options for this policy.
:param config: boolean, if True all config options are returned
"""
info = {}
for config_option, policy_attribute in \
self._config_options_map().items():
info[config_option] = getattr(self, policy_attribute)
if not config:
# remove some options for public consumption
if not self.is_default:
info.pop('default')
if not self.is_deprecated:
info.pop('deprecated')
info.pop('policy_type')
return info
def _validate_policy_name(self, name):
"""
Helper function to determine the validity of a policy name. Used
to check policy names before setting them.
:param name: a name string for a single policy name.
:returns: true if the name is valid.
:raises: PolicyError if the policy name is invalid.
"""
# this is defensively restrictive, but could be expanded in the future
if not all(c in VALID_CHARS for c in name):
raise PolicyError('Names are used as HTTP headers, and can not '
'reliably contain any characters not in %r. '
'Invalid name %r' % (VALID_CHARS, name))
if name.upper() == LEGACY_POLICY_NAME.upper() and self.idx != 0:
msg = 'The name %s is reserved for policy index 0. ' \
'Invalid name %r' % (LEGACY_POLICY_NAME, name)
raise PolicyError(msg, self.idx)
if name.upper() in (existing_name.upper() for existing_name
in self.alias_list):
msg = 'The name %s is already assigned to this policy.' % name
raise PolicyError(msg, self.idx)
return True
def add_name(self, name):
"""
Adds an alias name to the storage policy. Shouldn't be called
directly from the storage policy but instead through the
storage policy collection class, so lookups by name resolve
correctly.
:param name: a new alias for the storage policy
"""
if self._validate_policy_name(name):
self.alias_list.append(name)
def remove_name(self, name):
"""
Removes an alias name from the storage policy. Shouldn't be called
directly from the storage policy but instead through the storage
policy collection class, so lookups by name resolve correctly. If
the name removed is the primary name then the next availiable alias
will be adopted as the new primary name.
:param name: a name assigned to the storage policy
"""
if name not in self.alias_list:
raise PolicyError("%s is not a name assigned to policy %s"
% (name, self.idx))
if len(self.alias_list) == 1:
raise PolicyError("Cannot remove only name %s from policy %s. "
"Policies must have at least one name."
% (name, self.idx))
else:
self.alias_list.remove(name)
def change_primary_name(self, name):
"""
Changes the primary/default name of the policy to a specified name.
:param name: a string name to replace the current primary name.
"""
if name == self.name:
return
elif name in self.alias_list:
self.remove_name(name)
else:
self._validate_policy_name(name)
self.alias_list.insert(0, name)
def _validate_ring(self):
"""
Hook, called when the ring is loaded. Can be used to
validate the ring against the StoragePolicy configuration.
"""
pass
def load_ring(self, swift_dir):
"""
Load the ring for this policy immediately.
:param swift_dir: path to rings
"""
if self.object_ring:
return
self.object_ring = Ring(swift_dir, ring_name=self.ring_name)
# Validate ring to make sure it conforms to policy requirements
self._validate_ring()
@property
def quorum(self):
"""
Number of successful backend requests needed for the proxy to
consider the client request successful.
"""
raise NotImplementedError()
@BaseStoragePolicy.register(REPL_POLICY)
class StoragePolicy(BaseStoragePolicy):
"""
Represents a storage policy of type 'replication'. Default storage policy
class unless otherwise overridden from swift.conf.
Not meant to be instantiated directly; use
:func:`~swift.common.storage_policy.reload_storage_policies` to load
POLICIES from ``swift.conf``.
"""
@property
def quorum(self):
"""
Quorum concept in the replication case:
floor(number of replica / 2) + 1
"""
if not self.object_ring:
raise PolicyError('Ring is not loaded')
return quorum_size(self.object_ring.replica_count)
@BaseStoragePolicy.register(EC_POLICY)
class ECStoragePolicy(BaseStoragePolicy):
"""
Represents a storage policy of type 'erasure_coding'.
Not meant to be instantiated directly; use
:func:`~swift.common.storage_policy.reload_storage_policies` to load
POLICIES from ``swift.conf``.
"""
def __init__(self, idx, name='', aliases='', is_default=False,
is_deprecated=False, object_ring=None,
ec_segment_size=DEFAULT_EC_OBJECT_SEGMENT_SIZE,
ec_type=None, ec_ndata=None, ec_nparity=None):
super(ECStoragePolicy, self).__init__(
idx=idx, name=name, aliases=aliases, is_default=is_default,
is_deprecated=is_deprecated, object_ring=object_ring)
# Validate erasure_coding policy specific members
# ec_type is one of the EC implementations supported by PyEClib
if ec_type is None:
raise PolicyError('Missing ec_type')
if ec_type not in VALID_EC_TYPES:
raise PolicyError('Wrong ec_type %s for policy %s, should be one'
' of "%s"' % (ec_type, self.name,
', '.join(VALID_EC_TYPES)))
self._ec_type = ec_type
# Define _ec_ndata as the number of EC data fragments
# Accessible as the property "ec_ndata"
try:
value = int(ec_ndata)
if value <= 0:
raise ValueError
self._ec_ndata = value
except (TypeError, ValueError):
raise PolicyError('Invalid ec_num_data_fragments %r' %
ec_ndata, index=self.idx)
# Define _ec_nparity as the number of EC parity fragments
# Accessible as the property "ec_nparity"
try:
value = int(ec_nparity)
if value <= 0:
raise ValueError
self._ec_nparity = value
except (TypeError, ValueError):
raise PolicyError('Invalid ec_num_parity_fragments %r'
% ec_nparity, index=self.idx)
# Define _ec_segment_size as the encode segment unit size
# Accessible as the property "ec_segment_size"
try:
value = int(ec_segment_size)
if value <= 0:
raise ValueError
self._ec_segment_size = value
except (TypeError, ValueError):
raise PolicyError('Invalid ec_object_segment_size %r' %
ec_segment_size, index=self.idx)
# Initialize PyECLib EC backend
try:
self.pyeclib_driver = \
ECDriver(k=self._ec_ndata, m=self._ec_nparity,
ec_type=self._ec_type)
except ECDriverError as e:
raise PolicyError("Error creating EC policy (%s)" % e,
index=self.idx)
# quorum size in the EC case depends on the choice of EC scheme.
self._ec_quorum_size = \
self._ec_ndata + self.pyeclib_driver.min_parity_fragments_needed()
@property
def ec_type(self):
return self._ec_type
@property
def ec_ndata(self):
return self._ec_ndata
@property
def ec_nparity(self):
return self._ec_nparity
@property
def ec_segment_size(self):
return self._ec_segment_size
@property
def fragment_size(self):
"""
Maximum length of a fragment, including header.
NB: a fragment archive is a sequence of 0 or more max-length
fragments followed by one possibly-shorter fragment.
"""
# Technically pyeclib's get_segment_info signature calls for
# (data_len, segment_size) but on a ranged GET we don't know the
# ec-content-length header before we need to compute where in the
# object we should request to align with the fragment size. So we
# tell pyeclib a lie - from it's perspective, as long as data_len >=
# segment_size it'll give us the answer we want. From our
# perspective, because we only use this answer to calculate the
# *minimum* size we should read from an object body even if data_len <
# segment_size we'll still only read *the whole one and only last
# fragment* and pass than into pyeclib who will know what to do with
# it just as it always does when the last fragment is < fragment_size.
return self.pyeclib_driver.get_segment_info(
self.ec_segment_size, self.ec_segment_size)['fragment_size']
@property
def ec_scheme_description(self):
"""
This short hand form of the important parts of the ec schema is stored
in Object System Metadata on the EC Fragment Archives for debugging.
"""
return "%s %d+%d" % (self._ec_type, self._ec_ndata, self._ec_nparity)
def __repr__(self):
return ("%s, EC config(ec_type=%s, ec_segment_size=%d, "
"ec_ndata=%d, ec_nparity=%d)") % \
(super(ECStoragePolicy, self).__repr__(), self.ec_type,
self.ec_segment_size, self.ec_ndata, self.ec_nparity)
@classmethod
def _config_options_map(cls):
options = super(ECStoragePolicy, cls)._config_options_map()
options.update({
'ec_type': 'ec_type',
'ec_object_segment_size': 'ec_segment_size',
'ec_num_data_fragments': 'ec_ndata',
'ec_num_parity_fragments': 'ec_nparity',
})
return options
def get_info(self, config=False):
info = super(ECStoragePolicy, self).get_info(config=config)
if not config:
info.pop('ec_object_segment_size')
info.pop('ec_num_data_fragments')
info.pop('ec_num_parity_fragments')
info.pop('ec_type')
return info
def _validate_ring(self):
"""
EC specific validation
Replica count check - we need _at_least_ (#data + #parity) replicas
configured. Also if the replica count is larger than exactly that
number there's a non-zero risk of error for code that is considering
the number of nodes in the primary list from the ring.
"""
if not self.object_ring:
raise PolicyError('Ring is not loaded')
nodes_configured = self.object_ring.replica_count
if nodes_configured != (self.ec_ndata + self.ec_nparity):
raise RingValidationError(
'EC ring for policy %s needs to be configured with '
'exactly %d nodes. Got %d.' % (
self.name, self.ec_ndata + self.ec_nparity,
nodes_configured))
@property
def quorum(self):
"""
Number of successful backend requests needed for the proxy to consider
the client request successful.
The quorum size for EC policies defines the minimum number
of data + parity elements required to be able to guarantee
the desired fault tolerance, which is the number of data
elements supplemented by the minimum number of parity
elements required by the chosen erasure coding scheme.
For example, for Reed-Solomon, the minimum number parity
elements required is 1, and thus the quorum_size requirement
is ec_ndata + 1.
Given the number of parity elements required is not the same
for every erasure coding scheme, consult PyECLib for
min_parity_fragments_needed()
"""
return self._ec_quorum_size
class StoragePolicyCollection(object):
"""
This class represents the collection of valid storage policies for the
cluster and is instantiated as :class:`StoragePolicy` objects are added to
the collection when ``swift.conf`` is parsed by
:func:`parse_storage_policies`.
When a StoragePolicyCollection is created, the following validation
is enforced:
* If a policy with index 0 is not declared and no other policies defined,
Swift will create one
* The policy index must be a non-negative integer
* If no policy is declared as the default and no other policies are
defined, the policy with index 0 is set as the default
* Policy indexes must be unique
* Policy names are required
* Policy names are case insensitive
* Policy names must contain only letters, digits or a dash
* Policy names must be unique
* The policy name 'Policy-0' can only be used for the policy with index 0
* If any policies are defined, exactly one policy must be declared default
* Deprecated policies can not be declared the default
"""
def __init__(self, pols):
self.default = []
self.by_name = {}
self.by_index = {}
self._validate_policies(pols)
def _add_policy(self, policy):
"""
Add pre-validated policies to internal indexes.
"""
for name in policy.alias_list:
self.by_name[name.upper()] = policy
self.by_index[int(policy)] = policy
def __repr__(self):
return (textwrap.dedent("""
StoragePolicyCollection([
%s
])
""") % ',\n '.join(repr(p) for p in self)).strip()
def __len__(self):
return len(self.by_index)
def __getitem__(self, key):
return self.by_index[key]
def __iter__(self):
return iter(self.by_index.values())
def _validate_policies(self, policies):
"""
:param policies: list of policies
"""
for policy in policies:
if int(policy) in self.by_index:
raise PolicyError('Duplicate index %s conflicts with %s' % (
policy, self.get_by_index(int(policy))))
for name in policy.alias_list:
if name.upper() in self.by_name:
raise PolicyError('Duplicate name %s conflicts with %s' % (
policy, self.get_by_name(name)))
if policy.is_default:
if not self.default:
self.default = policy
else:
raise PolicyError(
'Duplicate default %s conflicts with %s' % (
policy, self.default))
self._add_policy(policy)
# If a 0 policy wasn't explicitly given, or nothing was
# provided, create the 0 policy now
if 0 not in self.by_index:
if len(self) != 0:
raise PolicyError('You must specify a storage policy '
'section for policy index 0 in order '
'to define multiple policies')
self._add_policy(StoragePolicy(0, name=LEGACY_POLICY_NAME))
# at least one policy must be enabled
enabled_policies = [p for p in self if not p.is_deprecated]
if not enabled_policies:
raise PolicyError("Unable to find policy that's not deprecated!")
# if needed, specify default
if not self.default:
if len(self) > 1:
raise PolicyError("Unable to find default policy")
self.default = self[0]
self.default.is_default = True
def get_by_name(self, name):
"""
Find a storage policy by its name.
:param name: name of the policy
:returns: storage policy, or None
"""
return self.by_name.get(name.upper())
def get_by_index(self, index):
"""
Find a storage policy by its index.
An index of None will be treated as 0.
:param index: numeric index of the storage policy
:returns: storage policy, or None if no such policy
"""
# makes it easier for callers to just pass in a header value
if index in ('', None):
index = 0
else:
try:
index = int(index)
except ValueError:
return None
return self.by_index.get(index)
@property
def legacy(self):
return self.get_by_index(None)
def get_object_ring(self, policy_idx, swift_dir):
"""
Get the ring object to use to handle a request based on its policy.
An index of None will be treated as 0.
:param policy_idx: policy index as defined in swift.conf
:param swift_dir: swift_dir used by the caller
:returns: appropriate ring object
"""
policy = self.get_by_index(policy_idx)
if not policy:
raise PolicyError("No policy with index %s" % policy_idx)
if not policy.object_ring:
policy.load_ring(swift_dir)
return policy.object_ring
def get_policy_info(self):
"""
Build info about policies for the /info endpoint
:returns: list of dicts containing relevant policy information
"""
policy_info = []
for pol in self:
# delete from /info if deprecated
if pol.is_deprecated:
continue
policy_entry = pol.get_info()
policy_info.append(policy_entry)
return policy_info
def add_policy_alias(self, policy_index, *aliases):
"""
Adds a new name or names to a policy
:param policy_index: index of a policy in this policy collection.
:param *aliases: arbitrary number of string policy names to add.
"""
policy = self.get_by_index(policy_index)
for alias in aliases:
if alias.upper() in self.by_name:
raise PolicyError('Duplicate name %s in use '
'by policy %s' % (alias,
self.get_by_name(alias)))
else:
policy.add_name(alias)
self.by_name[alias.upper()] = policy
def remove_policy_alias(self, *aliases):
"""
Removes a name or names from a policy. If the name removed is the
primary name then the next availiable alias will be adopted
as the new primary name.
:param *aliases: arbitrary number of existing policy names to remove.
"""
for alias in aliases:
policy = self.get_by_name(alias)
if not policy:
raise PolicyError('No policy with name %s exists.' % alias)
if len(policy.alias_list) == 1:
raise PolicyError('Policy %s with name %s has only one name. '
'Policies must have at least one name.' % (
policy, alias))
else:
policy.remove_name(alias)
del self.by_name[alias.upper()]
def change_policy_primary_name(self, policy_index, new_name):
"""
Changes the primary or default name of a policy. The new primary
name can be an alias that already belongs to the policy or a
completely new name.
:param policy_index: index of a policy in this policy collection.
:param new_name: a string name to set as the new default name.
"""
policy = self.get_by_index(policy_index)
name_taken = self.get_by_name(new_name)
# if the name belongs to some other policy in the collection
if name_taken and name_taken != policy:
raise PolicyError('Other policy %s with name %s exists.' %
(self.get_by_name(new_name).idx, new_name))
else:
policy.change_primary_name(new_name)
self.by_name[new_name.upper()] = policy
def parse_storage_policies(conf):
"""
Parse storage policies in ``swift.conf`` - note that validation
is done when the :class:`StoragePolicyCollection` is instantiated.
:param conf: ConfigParser parser object for swift.conf
"""
policies = []
for section in conf.sections():
if not section.startswith('storage-policy:'):
continue
policy_index = section.split(':', 1)[1]
config_options = dict(conf.items(section))
policy_type = config_options.pop('policy_type', DEFAULT_POLICY_TYPE)
policy_cls = BaseStoragePolicy.policy_type_to_policy_cls[policy_type]
policy = policy_cls.from_config(policy_index, config_options)
policies.append(policy)
return StoragePolicyCollection(policies)
class StoragePolicySingleton(object):
"""
An instance of this class is the primary interface to storage policies
exposed as a module level global named ``POLICIES``. This global
reference wraps ``_POLICIES`` which is normally instantiated by parsing
``swift.conf`` and will result in an instance of
:class:`StoragePolicyCollection`.
You should never patch this instance directly, instead patch the module
level ``_POLICIES`` instance so that swift code which imported
``POLICIES`` directly will reference the patched
:class:`StoragePolicyCollection`.
"""
def __iter__(self):
return iter(_POLICIES)
def __len__(self):
return len(_POLICIES)
def __getitem__(self, key):
return _POLICIES[key]
def __getattribute__(self, name):
return getattr(_POLICIES, name)
def __repr__(self):
return repr(_POLICIES)
def reload_storage_policies():
"""
Reload POLICIES from ``swift.conf``.
"""
global _POLICIES
policy_conf = ConfigParser()
policy_conf.read(SWIFT_CONF_FILE)
try:
_POLICIES = parse_storage_policies(policy_conf)
except PolicyError as e:
raise SystemExit('ERROR: Invalid Storage Policy Configuration '
'in %s (%s)' % (SWIFT_CONF_FILE, e))
# parse configuration and setup singleton
_POLICIES = None
reload_storage_policies()
POLICIES = StoragePolicySingleton()
| levythu/swift | swift/common/storage_policy.py | Python | apache-2.0 | 33,131 | 0 |
#!/usr/bin/env python3
###
# (C) Copyright 2014 Hewlett-Packard Development Company, L.P.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
###
import sys
if sys.version_info < (3, 2):
raise Exception('Must use Python 3.2 or later')
import hpOneView as hpov
from pprint import pprint
def acceptEULA(con):
# See if we need to accept the EULA before we try to log in
con.get_eula_status()
try:
if con.get_eula_status() is True:
print('EULA display needed')
con.set_eula('no')
except Exception as e:
print('EXCEPTION:')
print(e)
def login(con, credential):
# Login with givin credentials
try:
con.login(credential)
except:
print('Login failed')
def getapi(sts):
version = sts.get_version()
print('currentVersion: ', version['currentVersion'])
print('minimumVersion: ', version['minimumVersion'])
def main():
# Arguments supplied from command line
if args:
credential = {'userName': args.user, 'password': args.passwd}
con = hpov.connection(args.host)
sts = hpov.settings(con)
if args.proxy:
con.set_proxy(args.proxy.split(':')[0], args.proxy.split(':')[1])
if args.cert:
con.set_trusted_ssl_bundle(args.cert)
login(con, credential)
acceptEULA(con)
getapi(sts)
else:
credential = "#"
if __name__ == '__main__':
import sys
import argparse
parser = argparse.ArgumentParser(add_help=True, description='Usage')
parser.add_argument('-a', '--appliance', dest='host', required=True,
help='HP OneView Appliance hostname or IP')
parser.add_argument('-u', '--user', dest='user', required=False,
default='Administrator', help='HP OneView Username')
parser.add_argument('-p', '--pass', dest='passwd', required=False,
help='HP OneView Password')
parser.add_argument('-c', '--certificate', dest='cert', required=False,
help='Trusted SSL Certificate Bundle in PEM '
'(Base64 Encoded DER) Format')
parser.add_argument('-r', '--proxy', dest='proxy', required=False,
help='Proxy (host:port format')
args = parser.parse_args()
sys.exit(main())
# vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79:
| xod442/sample_scripts | get-xapi.py | Python | gpl-2.0 | 3,401 | 0.000588 |
import os
# Application constants
APP_NAME = 'job_offers'
INSTALL_DIR = os.path.dirname(os.path.abspath(__file__))
LOG_FORMAT = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
LOG_NAME = os.path.join(INSTALL_DIR, 'job_offers.log')
# Testing fixtures
JOB_OFFER_FIXTURES = os.path.join(INSTALL_DIR, "fixtures/job_offers.json")
| jvazquez/organization | organization/job_offers/constants.py | Python | unlicense | 334 | 0 |
from django import forms
from open-ehr.labs.models import PatientInfo
from open-ehr.registration.forms import *
from open-ehr.report_manager.models import *
from django.forms.widgets import CheckboxSelectMultiple
class PatientInfoForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self.created_by = kwargs.pop('created_by',None)
self.tests_json_list = kwargs.pop('tests_json_list',None)
self.total_test_count = kwargs.pop('total_test_count',None)
self.belongs_to_lab = kwargs.pop('belongs_to_lab',None)
self.status_by_technician_json_list = kwargs.pop('status_by_technician_json_list',None)
self.results_field = kwargs.pop('results_field',None)
self.status_by_doctor_json_list = kwargs.pop('status_by_doctor_json_list',None)
super(PatientInfoForm, self).__init__(*args, **kwargs)
self.fields['gender'] = forms.ChoiceField(choices= (('male',('Male')),('female',('Female'))))
self.fields['patient_first_name'].label="Patient's First Name:"
self.fields['patient_last_name'].label="Patient's Last Name:"
self.fields['patient_dob'].label="Date of Birth:"
self.fields['report_due_on'].label="Report due on:"
self.fields['reference_doctor_name'].label="Reference Doctor Name:"
self.fields['sample_id'].label="Sample Id:"
#report_element_categories = ReportElementCategories.objects.all()
#category_choices = ( (x.id,x.report_element_category_name) for x in report_element_categories)
#self.fields['tests_list'] = forms.MultipleChoiceField(widget=CheckboxSelectMultiple,choices=category_choices,label=("Tests to be done"))#(required=True,widget=CheckboxSelectMultiple, choices=category_choices,label=("Tests to be done"))
self.fields['tests_list-'+str(1)] = forms.CharField(widget=forms.TextInput(),label=("Test Name "+str(1)))
for i in range(2,6):
self.fields['tests_list-'+str(i)] = forms.CharField(widget=forms.TextInput(),label=("Test Name "+str(i)),required=False)
def save(self, commit=True):
instance = super(PatientInfoForm, self).save(commit=False)
if self.created_by:
instance.created_by = self.created_by
if self.tests_json_list:
instance.tests_json_list = self.tests_json_list
if self.total_test_count:
instance.total_test_count = self.total_test_count
if self.belongs_to_lab:
instance.belongs_to_lab = self.belongs_to_lab
if self.status_by_doctor_json_list:
instance.status_by_doctor_json_list = self.status_by_doctor_json_list
if self.status_by_technician_json_list:
instance.status_by_technician_json_list = self.status_by_technician_json_list
if self.results_field:
instance.results_field = self.results_field
instance.save()
class Meta:
model = PatientInfo
exclude =('tests_json_list','created_by','technician_assigned','is_complete_by_technician','is_complete_by_doctor', 'total_test_count','status_by_technician_json_list','status_by_doctor_json_list','is_verified_by_doctor','verified_by_doctor','share_count','shared_with_json_list','results_field')
class PatientResultsForm(PatientInfoForm):
def __init__(self,*args,**kwargs):
super(PatientResultsForm, self).__init__(*args, **kwargs)
del self.fields['tests_list']
def render_patient_result_form(test_name_fields,report_id):
attrs_dict ={}
attrs_dict["readonly"]=True
attrs_dict["value"] = report_id
attrs_dict["required"] = True
fields={"report_id":forms.CharField(widget=forms.TextInput(attrs=attrs_dict),required=True)}
for test_id in test_name_fields:
fields[test_name_fields[test_id]] = forms.CharField(widget= TextInput({ "required": "True","class": "test_input_field" }),required=True)
return type('PatientResultsForm', (forms.BaseForm,), { 'base_fields': fields })
def render_patient_completed_form(test_name_fields,test_name_values,report_id,test_number):
attrs_dict ={}
attrs_dict["readonly"]=True
attrs_dict["value"] = report_id
attrs_dict["required"] = True
fields={"report_id":forms.CharField(widget=forms.TextInput(attrs=attrs_dict),required=True)}
for test_id in test_name_fields:
fields[test_name_fields[test_id]] = forms.CharField(widget= TextInput(attrs=attrs_dict),required=True)
return type('PatientResultsForm', (forms.BaseForm,), { 'base_fields': fields })
| saketkc/open-ehr-django | open-ehr-django-src/labs/forms.py | Python | lgpl-3.0 | 4,518 | 0.019478 |
#!/usr/bin/python
#
# Copyright: NFG, Paul Stevens <paul@nfg.nl>, 2005
# License: GPL
#
# $Id: autoreplier.py,v 1.4 2004/12/01 10:15:58 paul Exp $
#
# Reimplementation of the famous vacation tool for dbmail
#
#
import os,sys,string,email,getopt,shelve,time,re,smtplib
from dbmail.lib import DbmailAutoreply
def usage():
print """autoreplier: dbmail autoreply replacement
-u <username> --username=<username> specify recipient
-a <alias> --alias=<alias> specify matching destination address
"""
error='AutoReplyError'
class AutoReplier:
CACHEDIR="/var/cache/dbmail"
TIMEOUT=3600*24*7
OUTHOST="localhost"
OUTPORT=25
_username=None
_messagefile=None
def __init__(self):
self.setMessage(email.message_from_file(sys.stdin))
def setUsername(self,_username): self._username=_username
def getUsername(self): return self._username
def setMessage(self,_message): self._message=_message
def getMessage(self): return self._message
def setReply(self): self._reply=DbmailAutoreply(self.getUsername()).getReply()
def getReply(self): return email.message_from_string(self._reply)
def setAlias(self,_alias): self._alias=_alias
def getAlias(self): return self._alias
def openCache(self):
file=os.path.join(self.CACHEDIR,"%s.db" % self.getUsername())
self.cache=shelve.open(file,writeback=True)
def closeCache(self): self.cache.close()
def checkSender(self,bounce_senders=[]):
for f in ('From',):
if self.getMessage().has_key(f):
header=string.lower(self.getMessage()[f])
for s in bounce_senders:
if string.find(header,s) >= 0:
return True
return False
def checkDestination(self):
for f in ('To','Cc'):
if self.getMessage().has_key(f):
header=string.lower(self.getMessage()[f])
if string.find(header,self.getAlias()) >=0:
return True
return False
def send_message(self,msg):
server=smtplib.SMTP(self.OUTHOST,self.OUTPORT)
server.sendmail(msg['From'],msg['To'],msg.as_string())
server.quit()
def do_reply(self):
m=self.getMessage()
u=self.getUsername()
if m.has_key('Reply-to'): to=m['Reply-to']
elif m.has_key('From'): to=m['From']
else: raise error, "No return address"
if self.checkSender(['daemon','mailer-daemon','postmaster']):
return
if not self.checkDestination():
return
if not self.cache.has_key(u):
self.cache[u]={}
if not self.cache[u].has_key(to) or self.cache[u][to] < int(time.time())-self.TIMEOUT:
replymsg=self.getReply()
print replymsg
replymsg['To']=to
replymsg['From']=self.getAlias()
body=replymsg.get_payload()
body="%s\n---\n\n%s\n" % ( body, self.getAlias() )
replymsg.set_payload(body)
self.send_message(replymsg)
self.cache[u][to]=int(time.time())
def reply(self):
self.openCache()
self.do_reply()
self.closeCache()
if __name__ == '__main__':
try:
opts,args = getopt.getopt(sys.argv[1:],"u:m:a:",
["username=","alias="])
except getopt.GetoptError:
usage()
sys.exit(0)
replier=AutoReplier()
for o,a in opts:
if o in ('-u','--username'):
replier.setUsername(a)
replier.setReply()
if o in ('-a','--alias'):
replier.setAlias(a)
replier.reply()
| moveone/dbmail | python/bin/autoreplier.py | Python | gpl-2.0 | 3,784 | 0.026163 |
from typing import Dict
import random
from . import HardwareAdapter, POWER
from hedgehog.protocol.messages import io
class SimulatedHardwareAdapter(HardwareAdapter):
def __init__(self, *args, simulate_sensors=False, **kwargs):
super().__init__(*args, **kwargs)
self.simulate_sensors = simulate_sensors
self.io_configs: Dict[int, int] = {}
self.emergency: bool = False
async def get_version(self):
return bytes(12), 0, 0
async def emergency_action(self, activate):
self.emergency = activate
async def get_emergency_state(self) -> bool:
return self.emergency
async def set_io_config(self, port, flags):
self.io_configs[port] = flags
async def get_analog(self, port):
if not self.simulate_sensors:
return 0
mu, sigma = {
io.INPUT_FLOATING: (800, 60),
io.INPUT_PULLUP: (4030, 30),
io.INPUT_PULLDOWN: (80, 30),
io.OUTPUT_ON: (4050, 20),
io.OUTPUT_OFF: (50, 20),
}[self.io_configs.get(port, io.INPUT_FLOATING)]
num = int(random.gauss(mu, sigma))
if num < 0:
num = 0
if num >= 4096:
num = 4095
return num
async def get_imu_rate(self):
# TODO get_imu_rate
return 0, 0, 0
async def get_imu_acceleration(self):
# TODO get_imu_acceleration
return 0, 0, 0
async def get_imu_pose(self):
# TODO get_imu_pose
return 0, 0, 0
async def get_digital(self, port):
if not self.simulate_sensors:
return False
value = {
io.INPUT_FLOATING: False,
io.INPUT_PULLUP: True,
io.INPUT_PULLDOWN: False,
io.OUTPUT_ON: True,
io.OUTPUT_OFF: False,
}[self.io_configs.get(port, io.INPUT_FLOATING)]
return value
async def set_motor(self, port, mode, amount=0, reached_state=POWER, relative=None, absolute=None):
# TODO set motor action
pass
async def get_motor(self, port):
return 0, 0
async def set_motor_position(self, port, position):
# TODO set motor position
pass
async def set_motor_config(self, port, config):
# TODO set_motor_config
pass
async def set_servo(self, port, active, position):
# TODO set_servo
pass
async def send_uart(self, data):
# TODO send_uart
pass
async def set_speaker(self, frequency):
# TODO set_speaker
pass
| PRIArobotics/HedgehogServer | hedgehog/server/hardware/simulated.py | Python | agpl-3.0 | 2,565 | 0.00039 |
"""
WSGI config for django_shopfront project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "django_shopfront.settings")
application = get_wsgi_application()
| rapilabs/django-shopfront | backend/django_shopfront/wsgi.py | Python | mit | 410 | 0 |
#
# Martin Kolman <mkolman@redhat.com>
#
# Copyright 2016 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use, modify,
# copy, or redistribute it subject to the terms and conditions of the GNU
# General Public License v.2. This program is distributed in the hope that it
# will be useful, but WITHOUT ANY WARRANTY expressed or implied, including the
# implied warranties of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. Any Red Hat
# trademarks that are incorporated in the source code or documentation are not
# subject to the GNU General Public License and may only be used or replicated
# with the express permission of Red Hat, Inc.
#
import unittest
from pyanaconda.installation_tasks import Task
from pyanaconda.installation_tasks import TaskQueue
class InstallTasksTestCase(unittest.TestCase):
def setUp(self):
self._test_variable1 = 0
self._test_variable2 = 0
self._test_variable3 = 0
self._test_variable4 = None
self._test_variable5 = None
self._test_variable6 = None
self._task_started_count = 0
self._task_completed_count = 0
self._queue_started_count = 0
self._queue_completed_count = 0
def _increment_var1(self):
self._test_variable1 += 1
def _increment_var2(self):
self._test_variable2 += 1
def _increment_var3(self):
self._test_variable3 += 1
def _set_var_4(self, value):
self._test_variable4 = value
def _set_var_5(self, value):
self._test_variable5 = value
def _set_var_6(self, value):
self._test_variable6 = value
def test_task(self):
"""Check that task works correctly."""
task = Task("foo", self._set_var_5, ("anaconda",))
assert task.name == "foo"
assert task.summary == "Task: foo"
assert task.parent is None
assert task.elapsed_time is None
# check initial state of the testing variables
assert self._test_variable4 is None
assert self._test_variable5 is None
assert self._test_variable6 is None
# check task state
assert not task.done
assert not task.running
# connect callbacks
task.started.connect(self._set_var_4)
task.completed.connect(self._set_var_6)
# check if the task is executed correctly
task.start()
assert task.done
assert not task.running
assert self._test_variable5 == "anaconda"
assert self._test_variable4 is task
assert self._test_variable6 is task
# it should be possible to execute the task only once
task.start()
assert task.done
assert not task.running
assert task.elapsed_time is not None
assert self._test_variable5 == "anaconda"
assert self._test_variable4 is task
assert self._test_variable6 is task
def test_task_kwargs(self):
"""Check that works correctly with kwargs."""
def custom_function(arg1, foo=None):
self._set_var_5((arg1, foo))
task = Task("foo", custom_function, task_args=("anaconda",), task_kwargs={"foo": "bar"})
assert task.name == "foo"
assert task.summary == "Task: foo"
assert task.parent is None
assert task.elapsed_time is None
# check initial state of the testing variables
assert self._test_variable4 is None
assert self._test_variable5 is None
assert self._test_variable6 is None
# check task state
assert not task.done
assert not task.running
# connect callbacks
task.started.connect(self._set_var_4)
task.completed.connect(self._set_var_6)
# check if the task is executed correctly
task.start()
assert task.done
assert not task.running
assert self._test_variable5 == ("anaconda", "bar")
assert self._test_variable4 is task
assert self._test_variable6 is task
# it should be possible to execute the task only once
task.start()
assert task.done
assert not task.running
assert task.elapsed_time is not None
assert self._test_variable5 == ("anaconda", "bar")
assert self._test_variable4 is task
assert self._test_variable6 is task
def test_task_no_args(self):
"""Check if task with no arguments works correctly."""
task = Task("foo", self._increment_var1)
assert task.name == "foo"
assert task.summary == "Task: foo"
assert task.parent is None
assert task.elapsed_time is None
# check initial state of the testing variables
assert self._test_variable1 == 0
assert self._test_variable4 is None
assert self._test_variable5 is None
assert self._test_variable6 is None
# check task state
assert not task.done
assert not task.running
# connect callbacks
task.started.connect(self._set_var_4)
task.completed.connect(self._set_var_6)
# check if the task is executed correctly
task.start()
assert task.done
assert not task.running
assert self._test_variable1 == 1
assert self._test_variable4 is task
assert self._test_variable6 is task
# it should be possible to execute the task only once
task.start()
assert task.done
assert not task.running
assert task.elapsed_time is not None
assert self._test_variable1 == 1
assert self._test_variable4 is task
assert self._test_variable6 is task
def test_task_subclass_light(self):
"""Check if a Task subclass with custom run_task() method works."""
class CustomPayloadTask(Task):
def __init__(self, name):
super(CustomPayloadTask, self).__init__(name, task=None, task_args=[])
self.var1 = 0
self.var2 = None
# We define a custom run_task method and override it with our own "payload",
# as this is more lightweight than overriding the full start() method and
# we get all the locking and signal triggering for free.
def run_task(self):
self.var1 += 1
self.var1 += 1
self.var2 = "anaconda"
task = CustomPayloadTask("custom payload task")
# connect callbacks
task.started.connect(self._set_var_4)
task.completed.connect(self._set_var_6)
# verify initial state
assert task.var1 == 0
assert task.var2 is None
# run the custom task
task.start()
# verify that the custom payload was run
assert task.var1 == 2
assert task.var2 == "anaconda"
# verify that the started/completed signals were triggered
assert self._test_variable4 is task
assert self._test_variable6 is task
def test_task_subclass_heavy(self):
"""Check if a Task subclass with custom start() method works."""
class CustomStartTask(Task):
def __init__(self, name):
super(CustomStartTask, self).__init__(name, task=None, task_args=[])
self.var1 = 0
self.var2 = None
# We define a custom start method and override it with our own "payload".
# This is more "heavy" than overriding just run_task() method and
# we generally need to implement all the locking and signal triggering.
# On the other hand it can potentially provide more fine-grained control
# over how the task is processed.
def start(self):
self.var1 += 1
self.var1 += 1
self.var2 = "anaconda"
task = CustomStartTask("custom payload task")
# connect callbacks
task.started.connect(self._set_var_4)
task.completed.connect(self._set_var_6)
# verify initial state
assert task.var1 == 0
assert task.var2 is None
# run the custom task
task.start()
# verify that the custom payload was run
assert task.var1 == 2
assert task.var2 == "anaconda"
# verify that the started/completed signals were *not* triggered
# (as they are not called by the reimplemented start() method)
assert self._test_variable4 is None
assert self._test_variable6 is None
def test_task_subclass_kwargs(self):
"""Check if kwarg passing works for Task subclasses."""
class TestTask(Task):
def __init__(self, name, task, task_args, custom_option="foo"):
super(TestTask, self).__init__(name, task, task_args)
self._custom_option = custom_option
@property
def custom_option(self):
return self._custom_option
# check that the kwarg has been propagated correctly
task = TestTask("foo", self._set_var_5, ("anaconda",))
assert task.custom_option == "foo"
# also check that the task still works as expected
task.start()
assert self._test_variable5 == "anaconda"
def test_empty_task_queue(self):
"""Check that an empty task queue works correctly."""
# first check if empty task queue works correctly
task_queue = TaskQueue("foo", status_message="foo status message")
assert task_queue.name == "foo"
assert task_queue.status_message == "foo status message"
assert task_queue.task_count == 0
assert task_queue.queue_count == 0
assert task_queue.current_task_number is None
assert task_queue.current_queue_number is None
assert task_queue.progress == 0.0
assert not task_queue.running
assert not task_queue.done
assert len(task_queue.summary) > 0
# connect started/completed callbacks
# these should be triggered
task_queue.started.connect(self._set_var_4)
task_queue.completed.connect(self._set_var_5)
# these should not
should_not_run = lambda x: self._set_var_6("anaconda")
task_queue.task_started.connect(should_not_run)
task_queue.task_completed.connect(should_not_run)
task_queue.queue_started.connect(should_not_run)
task_queue.queue_completed.connect(should_not_run)
# it should be possible to start an empty task queue
task_queue.start()
# check state after the run
assert not task_queue.running
assert task_queue.done
assert task_queue.current_queue_number is None
assert task_queue.current_task_number is None
assert task_queue.task_count == 0
assert task_queue.queue_count == 0
# started/completed signals should still be triggered, even
# if the queue is empty
assert self._test_variable4 is task_queue
assert self._test_variable5 is task_queue
# the nested queue/task signals should not be triggered if
# the queue is empty
assert self._test_variable6 is None
def test_task_queue_processing(self):
"""Check that task queue processing works correctly."""
# callback counting functions
def task_started_cb(*args):
self._task_started_count += 1
def task_completed_cb(*args):
self._task_completed_count += 1
def queue_started_cb(*args):
self._queue_started_count += 1
def queue_completed_cb(*args):
self._queue_completed_count += 1
# verify initial content of callback counters
assert self._task_started_count == 0
assert self._task_completed_count == 0
assert self._queue_started_count == 0
assert self._queue_completed_count == 0
# create some groups
group1 = TaskQueue(name="group1", status_message="processing group1")
group1.append(Task("increment var 1", self._increment_var1))
group2 = TaskQueue(name="group2", status_message="processing group2")
group2.append(Task("increment var 2", self._increment_var2))
group2.append(Task("increment var 2", self._increment_var2))
group3 = TaskQueue(name="group3", status_message="processing group3 (empty)")
# create the top level queue
queue1 = TaskQueue(name="queue1")
# connect to it's top-level callbacks
queue1.task_started.connect(task_started_cb)
queue1.task_completed.connect(task_completed_cb)
queue1.queue_started.connect(queue_started_cb)
queue1.queue_completed.connect(queue_completed_cb)
# add the nested queues
queue1.append(group1)
queue1.append(group2)
queue1.append(group3) # an empty group should be also processed correctly
# and one top-level task
queue1.append(Task("increment var 1", self._increment_var1))
# check that the groups have been added correctly
assert len(queue1) == 4
assert queue1[0].name == "group1"
assert len(queue1[0]) == 1
assert queue1[1].name == "group2"
assert len(queue1[1]) == 2
assert queue1[2].name == "group3"
assert len(queue1[2]) == 0
assert queue1.queue_count == 3
assert queue1.task_count == 4
# summary is generated recursively
assert bool(queue1.summary)
# start the queue
queue1.start()
# check if the tasks were correctly executed
assert self._test_variable1 == 2
assert self._test_variable2 == 2
assert self._test_variable3 == 0
# check that the task & queue signals were triggered correctly
assert self._task_started_count == 4
assert self._task_completed_count == 4
assert self._queue_started_count == 3
assert self._queue_completed_count == 3
# check queue state after execution
assert not queue1.running
assert queue1.done
assert queue1.current_task_number is None
assert queue1.current_queue_number is None
# create another queue and add some task groups and tasks to it
group4 = TaskQueue(name="group 4", status_message="processing group4")
group4.append(Task("increment var 1", self._increment_var1))
group5 = TaskQueue(name="group 5", status_message="processing group5")
group5.append(Task("increment var 3", self._increment_var3))
queue2 = TaskQueue(name="queue2")
queue2.append(group4)
queue2.append(group5)
# start the second queue
queue2.start()
# check the tasks also properly executed
assert self._test_variable1 == 3
assert self._test_variable2 == 2
assert self._test_variable3 == 1
| M4rtinK/anaconda | tests/unit_tests/pyanaconda_tests/test_installation_tasks.py | Python | gpl-2.0 | 15,098 | 0.000795 |
# Copyright (c) 2012-2022, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
#
# *** Do not modify - this file is autogenerated ***
from . import AWSObject, AWSProperty, PropsDictType, Tags
from .validators import boolean
class DataResource(AWSProperty):
"""
`DataResource <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudtrail-trail-dataresource.html>`__
"""
props: PropsDictType = {
"Type": (str, True),
"Values": ([str], False),
}
class EventSelector(AWSProperty):
"""
`EventSelector <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudtrail-trail-eventselector.html>`__
"""
props: PropsDictType = {
"DataResources": ([DataResource], False),
"ExcludeManagementEventSources": ([str], False),
"IncludeManagementEvents": (boolean, False),
"ReadWriteType": (str, False),
}
class InsightSelector(AWSProperty):
"""
`InsightSelector <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudtrail-trail-insightselector.html>`__
"""
props: PropsDictType = {
"InsightType": (str, False),
}
class Trail(AWSObject):
"""
`Trail <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cloudtrail-trail.html>`__
"""
resource_type = "AWS::CloudTrail::Trail"
props: PropsDictType = {
"CloudWatchLogsLogGroupArn": (str, False),
"CloudWatchLogsRoleArn": (str, False),
"EnableLogFileValidation": (boolean, False),
"EventSelectors": ([EventSelector], False),
"IncludeGlobalServiceEvents": (boolean, False),
"InsightSelectors": ([InsightSelector], False),
"IsLogging": (boolean, True),
"IsMultiRegionTrail": (boolean, False),
"IsOrganizationTrail": (boolean, False),
"KMSKeyId": (str, False),
"S3BucketName": (str, True),
"S3KeyPrefix": (str, False),
"SnsTopicName": (str, False),
"Tags": (Tags, False),
"TrailName": (str, False),
}
| cloudtools/troposphere | troposphere/cloudtrail.py | Python | bsd-2-clause | 2,138 | 0.001871 |
"""
components/tools/OmeroPy/scripts/omero/import_scripts/Populate_Plate_Roi.py
Uses the omero.util.populate_roi functionality to parse all the
measurement files attached to a plate, and generate server-side
rois.
params:
Plate_ID: id of the plate which should be parsed.
Copyright 2009 Glencoe Software, Inc. All rights reserved.
Use is subject to license terms supplied in LICENSE.txt
"""
import omero.scripts as scripts
from omero.util.populate_roi import *
client = scripts.client('Populate_ROI.py',
scripts.Long("Plate_ID", optional = False,
description = "ID of a valid plate with attached results files"),
version = "4.2.0",
contact = "ome-users@lists.openmicroscopy.org.uk",
description = """Generates regions of interest from the measurement files associated with a plate
This script is executed by the server on initial import, and should typically not need
to be run by users.""")
factory = PlateAnalysisCtxFactory(client.getSession())
analysis_ctx = factory.get_analysis_ctx(client.getInput("Plate_ID").val)
n_measurements = analysis_ctx.get_measurement_count()
for i in range(n_measurements):
measurement_ctx = analysis_ctx.get_measurement_ctx(i)
measurement_ctx.parse_and_populate()
| joshmoore/openmicroscopy | components/tools/OmeroPy/scripts/omero/import_scripts/Populate_ROI.py | Python | gpl-2.0 | 1,244 | 0.012862 |
#coding=utf-8
"""
parser.py
This contains the main Parser class that can be instantiated to create rules.
"""
from .rules import RuleMap
from .compat import basestring
class Parser(object):
"""
Parser exposes a couple methods for reading in strings.
Currently only parse_file is working.
"""
def __init__(self):
"""Initalizer"""
self.debug = False
self._parse_rule_map = RuleMap(list)
self._find_rule_map = RuleMap(list)
def on_parse(self, eventname):
"""
Decorator for rules. Calls the associated functions when the rule
is invoked via parse found
"""
def parse_decorator(func):
"""Event decorator closure thing"""
self._parse_rule_map.add_rule(eventname, func)
return func
return parse_decorator
def on_find(self, eventname):
"""
Decorator for rules. Calls the associated functions when the rule
is invoked via parse found
"""
def find_decorator(func):
"""Event decorator closure thing"""
self._find_rule_map.add_rule(eventname, func)
return func
return find_decorator
def parse_file(self, file):
"""Parses through a file"""
with open(file, 'r') as f:
for line in f:
self._parse_rule_map.query_parse(line)
def iter_parse(self, iterable):
"""Parses an interator/generator"""
for item in iterable:
self._parse_rule_map.query_parse(item)
def parse_string(self, string):
"""Parses and int or string"""
return self._parse_rule_map.query_parse(string)
def parse(self, item):
"""Magical method that automatically chooses parse string or iter parse"""
if isinstance(item, basestring):
return self.parse_string(item)
else:
self.iter_parse(item)
def find_string(self, string):
"""finds an int or string based on input pattern"""
return self._find_rule_map.query_find(string)
def iter_find(self, iterable):
"""Finds an string based on an input pattern and interable/generator"""
for item in iterable:
self._find_rule_map.query_find(item)
def find_file(self, file):
"""find a string based on an input pattern from a file"""
with open(file, 'r') as f:
for line in f:
self._parse_rule_map.query_parse(line)
def find(self, item):
"""Magical method that chooses between iter_find and find_string"""
if isinstance(item, basestring):
return self.find_string(item)
else:
self.iter_find(item)
| erinxocon/Text-Parsing-Function-Dispatcher | tpfd/parser.py | Python | mit | 2,830 | 0.00424 |
from config import config, ConfigSlider, ConfigSubsection, ConfigYesNo, ConfigText, ConfigInteger
from SystemInfo import SystemInfo
from fcntl import ioctl
import os
import struct
# asm-generic/ioctl.h
IOC_NRBITS = 8L
IOC_TYPEBITS = 8L
IOC_SIZEBITS = 13L
IOC_DIRBITS = 3L
IOC_NRSHIFT = 0L
IOC_TYPESHIFT = IOC_NRSHIFT+IOC_NRBITS
IOC_SIZESHIFT = IOC_TYPESHIFT+IOC_TYPEBITS
IOC_DIRSHIFT = IOC_SIZESHIFT+IOC_SIZEBITS
IOC_READ = 2L
def EVIOCGNAME(length):
return (IOC_READ<<IOC_DIRSHIFT)|(length<<IOC_SIZESHIFT)|(0x45<<IOC_TYPESHIFT)|(0x06<<IOC_NRSHIFT)
class inputDevices:
def __init__(self):
self.Devices = {}
self.currentDevice = ""
self.getInputDevices()
def getInputDevices(self):
devices = os.listdir("/dev/input/")
for evdev in devices:
try:
buffer = "\0"*512
self.fd = os.open("/dev/input/" + evdev, os.O_RDWR | os.O_NONBLOCK)
self.name = ioctl(self.fd, EVIOCGNAME(256), buffer)
self.name = self.name[:self.name.find("\0")]
os.close(self.fd)
except (IOError,OSError), err:
print '[iInputDevices] getInputDevices ' + evdev + ' <ERROR: ioctl(EVIOCGNAME): ' + str(err) + ' >'
self.name = None
if self.name:
self.Devices[evdev] = {'name': self.name, 'type': self.getInputDeviceType(self.name),'enabled': False, 'configuredName': None }
def getInputDeviceType(self,name):
if "remote control" in name:
return "remote"
elif "keyboard" in name:
return "keyboard"
elif "mouse" in name:
return "mouse"
else:
print "Unknown device type:",name
return None
def getDeviceName(self, x):
if x in self.Devices.keys():
return self.Devices[x].get("name", x)
else:
return "Unknown device name"
def getDeviceList(self):
return sorted(self.Devices.iterkeys())
def setDeviceAttribute(self, device, attribute, value):
#print "[iInputDevices] setting for device", device, "attribute", attribute, " to value", value
if device in self.Devices:
self.Devices[device][attribute] = value
def getDeviceAttribute(self, device, attribute):
if device in self.Devices:
if attribute in self.Devices[device]:
return self.Devices[device][attribute]
return None
def setEnabled(self, device, value):
oldval = self.getDeviceAttribute(device, 'enabled')
#print "[iInputDevices] setEnabled for device %s to %s from %s" % (device,value,oldval)
self.setDeviceAttribute(device, 'enabled', value)
if oldval is True and value is False:
self.setDefaults(device)
def setName(self, device, value):
#print "[iInputDevices] setName for device %s to %s" % (device,value)
self.setDeviceAttribute(device, 'configuredName', value)
#struct input_event {
# struct timeval time; -> ignored
# __u16 type; -> EV_REP (0x14)
# __u16 code; -> REP_DELAY (0x00) or REP_PERIOD (0x01)
# __s32 value; -> DEFAULTS: 700(REP_DELAY) or 100(REP_PERIOD)
#}; -> size = 16
def setDefaults(self, device):
print "[iInputDevices] setDefaults for device %s" % device
self.setDeviceAttribute(device, 'configuredName', None)
event_repeat = struct.pack('iihhi', 0, 0, 0x14, 0x01, 100)
event_delay = struct.pack('iihhi', 0, 0, 0x14, 0x00, 700)
fd = os.open("/dev/input/" + device, os.O_RDWR)
os.write(fd, event_repeat)
os.write(fd, event_delay)
os.close(fd)
def setRepeat(self, device, value): #REP_PERIOD
if self.getDeviceAttribute(device, 'enabled'):
print "[iInputDevices] setRepeat for device %s to %d ms" % (device,value)
event = struct.pack('iihhi', 0, 0, 0x14, 0x01, int(value))
fd = os.open("/dev/input/" + device, os.O_RDWR)
os.write(fd, event)
os.close(fd)
def setDelay(self, device, value): #REP_DELAY
if self.getDeviceAttribute(device, 'enabled'):
print "[iInputDevices] setDelay for device %s to %d ms" % (device,value)
event = struct.pack('iihhi', 0, 0, 0x14, 0x00, int(value))
fd = os.open("/dev/input/" + device, os.O_RDWR)
os.write(fd, event)
os.close(fd)
class InitInputDevices:
def __init__(self):
self.currentDevice = ""
self.createConfig()
def createConfig(self, *args):
config.inputDevices = ConfigSubsection()
for device in sorted(iInputDevices.Devices.iterkeys()):
self.currentDevice = device
#print "[InitInputDevices] -> creating config entry for device: %s -> %s " % (self.currentDevice, iInputDevices.Devices[device]["name"])
self.setupConfigEntries(self.currentDevice)
self.currentDevice = ""
def inputDevicesEnabledChanged(self,configElement):
if self.currentDevice != "" and iInputDevices.currentDevice == "":
iInputDevices.setEnabled(self.currentDevice, configElement.value)
elif iInputDevices.currentDevice != "":
iInputDevices.setEnabled(iInputDevices.currentDevice, configElement.value)
def inputDevicesNameChanged(self,configElement):
if self.currentDevice != "" and iInputDevices.currentDevice == "":
iInputDevices.setName(self.currentDevice, configElement.value)
if configElement.value != "":
devname = iInputDevices.getDeviceAttribute(self.currentDevice, 'name')
if devname != configElement.value:
cmd = "config.inputDevices." + self.currentDevice + ".enabled.value = False"
exec cmd
cmd = "config.inputDevices." + self.currentDevice + ".enabled.save()"
exec cmd
elif iInputDevices.currentDevice != "":
iInputDevices.setName(iInputDevices.currentDevice, configElement.value)
def inputDevicesRepeatChanged(self,configElement):
if self.currentDevice != "" and iInputDevices.currentDevice == "":
iInputDevices.setRepeat(self.currentDevice, configElement.value)
elif iInputDevices.currentDevice != "":
iInputDevices.setRepeat(iInputDevices.currentDevice, configElement.value)
def inputDevicesDelayChanged(self,configElement):
if self.currentDevice != "" and iInputDevices.currentDevice == "":
iInputDevices.setDelay(self.currentDevice, configElement.value)
elif iInputDevices.currentDevice != "":
iInputDevices.setDelay(iInputDevices.currentDevice, configElement.value)
def setupConfigEntries(self,device):
cmd = "config.inputDevices." + device + " = ConfigSubsection()"
exec cmd
cmd = "config.inputDevices." + device + ".enabled = ConfigYesNo(default = False)"
exec cmd
cmd = "config.inputDevices." + device + ".enabled.addNotifier(self.inputDevicesEnabledChanged,config.inputDevices." + device + ".enabled)"
exec cmd
cmd = "config.inputDevices." + device + '.name = ConfigText(default="")'
exec cmd
cmd = "config.inputDevices." + device + ".name.addNotifier(self.inputDevicesNameChanged,config.inputDevices." + device + ".name)"
exec cmd
cmd = "config.inputDevices." + device + ".repeat = ConfigSlider(default=100, increment = 10, limits=(0, 500))"
exec cmd
cmd = "config.inputDevices." + device + ".repeat.addNotifier(self.inputDevicesRepeatChanged,config.inputDevices." + device + ".repeat)"
exec cmd
cmd = "config.inputDevices." + device + ".delay = ConfigSlider(default=700, increment = 100, limits=(0, 5000))"
exec cmd
cmd = "config.inputDevices." + device + ".delay.addNotifier(self.inputDevicesDelayChanged,config.inputDevices." + device + ".delay)"
exec cmd
iInputDevices = inputDevices()
config.plugins.remotecontroltype = ConfigSubsection()
config.plugins.remotecontroltype.rctype = ConfigInteger(default = 0)
class RcTypeControl():
def __init__(self):
if SystemInfo["RcTypeChangable"] and os.path.exists('/proc/stb/info/boxtype'):
self.isSupported = True
self.boxType = open('/proc/stb/info/boxtype', 'r').read().strip()
if config.plugins.remotecontroltype.rctype.value != 0:
self.writeRcType(config.plugins.remotecontroltype.rctype.value)
else:
self.isSupported = False
def multipleRcSupported(self):
return self.isSupported
def getBoxType(self):
return self.boxType
def writeRcType(self, rctype):
if self.isSupported and rctype > 0:
open('/proc/stb/ir/rc/type', 'w').write('%d' % rctype)
def readRcType(self):
rc = 0
if self.isSupported:
rc = open('/proc/stb/ir/rc/type', 'r').read().strip()
return int(rc)
iRcTypeControl = RcTypeControl()
| ACJTeam/enigma2 | lib/python/Components/InputDevice.py | Python | gpl-2.0 | 8,051 | 0.026581 |
# Copyright 2015 Isotoma Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from touchdown.core.resource import Resource
class AliasTarget(Resource):
resource_name = "alias_target"
| yaybu/touchdown | touchdown/aws/route53/alias_target.py | Python | apache-2.0 | 689 | 0 |
# Copyright (C) 2011 Lars Wirzenius
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
__version__ = '1.20130808'
from fmt import TextFormat
from settings import (Settings, log_group_name, config_group_name,
perf_group_name)
from runcmd import runcmd, runcmd_unchecked, shell_quote, ssh_runcmd
from app import Application, AppException
# The plugin system
from hook import Hook, FilterHook
from hookmgr import HookManager
from plugin import Plugin
from pluginmgr import PluginManager
__all__ = locals()
| perryl/morph | cliapp/__init__.py | Python | gpl-2.0 | 1,188 | 0 |
import argparse
import logging
from typing import List, Optional
from redis import StrictRedis
from minique.compat import sentry_sdk
from minique.work.worker import Worker
def get_parser() -> argparse.ArgumentParser:
parser = argparse.ArgumentParser()
parser.add_argument("-u", "--redis-url", required=True)
parser.add_argument("-q", "--queues", nargs="+", required=True)
parser.add_argument("--allow-callable", nargs="+", required=True)
parser.add_argument("--single-tick", action="store_true")
return parser
def main(argv: Optional[List[str]] = None) -> None:
parser = get_parser()
args = parser.parse_args(argv)
logging.basicConfig(datefmt="%Y-%m-%d %H:%M:%S", level=logging.INFO)
redis = StrictRedis.from_url(args.redis_url)
worker = Worker.for_queue_names(redis=redis, queue_names=args.queues)
worker.allowed_callable_patterns = set(args.allow_callable)
worker.log.info("Worker initialized")
if sentry_sdk:
try:
sentry_sdk.init()
except Exception as exc:
worker.log.warning("Failed to initialize Sentry: %s", exc, exc_info=True)
else:
hub = sentry_sdk.hub.Hub.current
if hub and hub.client and hub.client.dsn:
worker.log.info("Sentry configured with a valid DSN")
if args.single_tick:
worker.tick()
else:
worker.loop()
| valohai/minique | minique/cli.py | Python | mit | 1,399 | 0.000715 |
from __future__ import print_function, unicode_literals
import os
import shutil
import zipfile
import datetime
import tempfile
import subprocess
from copy import deepcopy
import pytest
import numpy as np
from numpy.testing import assert_almost_equal
from sklearn.dummy import DummyClassifier
from destimator import DescribedEstimator, utils
@pytest.fixture
def features():
return np.zeros([10, 3])
@pytest.fixture
def labels():
labels = np.zeros(10)
labels[5:] = 1.0
return labels
@pytest.fixture
def clf(features, labels):
clf = DummyClassifier(strategy='constant', constant=0.0)
clf.fit(features, labels)
return clf
@pytest.fixture
def clf_described(clf, features, labels, feature_names):
return DescribedEstimator(clf, features, labels, features, labels, feature_names)
@pytest.fixture
def feature_names():
return ['one', 'two', 'three']
@pytest.fixture
def metadata_v1():
return {
'metadata_version': 1,
'created_at': '2016-01-01-00-00-00',
'feature_names': ['f0', 'f1', 'f2'],
'vcs_hash': 'deadbeef',
'distribution_info': {
'python': 3.5,
'packages': [],
},
}
@pytest.fixture
def metadata_v2():
return {
'metadata_version': 2,
'created_at': '2016-02-01-00-00-00',
'feature_names': ['f0', 'f1', 'f2'],
'vcs_hash': 'deadbeef',
'distribution_info': {
'python': 3.5,
'packages': [],
},
'performance_scores': {
'precision': [0.7],
'recall': [0.8],
'fscore': [0.9],
'support': [100],
'roc_auc': 0.6,
'log_loss': 0.5,
}
}
class TestDescribedEstimator(object):
def test_init(self, clf_described):
assert clf_described.n_training_samples_ == 10
assert clf_described.n_features_ == 3
def test_init_error(self, clf, features, labels, feature_names):
with pytest.raises(ValueError):
wrong_labels = np.zeros([9, 1])
DescribedEstimator(clf, features, wrong_labels, features, labels, feature_names)
with pytest.raises(ValueError):
wrong_feature_names = ['']
DescribedEstimator(clf, features, labels, features, labels, wrong_feature_names)
def test_eq(self, clf, features, labels, feature_names, metadata_v1, metadata_v2):
d1 = DescribedEstimator(clf, features, labels, features, labels, compute_metadata=False, metadata=metadata_v1)
d1b = DescribedEstimator(clf, features, labels, features, labels, compute_metadata=False, metadata=metadata_v1)
assert d1 == d1b
d2 = DescribedEstimator(clf, features, labels, features, labels, compute_metadata=False, metadata=metadata_v2)
assert d1 != d2
metadata_v1a = dict(metadata_v1)
metadata_v1a['metadata_version'] = 3
d1a = DescribedEstimator(clf, features, labels, features, labels, compute_metadata=False, metadata=metadata_v1a)
assert d1 != d1a
def test_from_file(self, clf_described):
save_dir = tempfile.mkdtemp()
try:
file_path = clf_described.save(save_dir)
destimator = DescribedEstimator.from_file(file_path)
assert destimator == clf_described
finally:
shutil.rmtree(save_dir)
def test_is_compatible(self, clf, clf_described, features, labels):
compatible = DescribedEstimator(clf, features, labels, features, labels, ['one', 'two', 'three'])
assert clf_described.is_compatible(compatible)
incompatible = DescribedEstimator(clf, features, labels, features, labels, ['one', 'two', 'boom'])
assert not clf_described.is_compatible(incompatible)
def test_metadata(self, clf, features, labels, feature_names):
clf_described = DescribedEstimator(clf, features, labels, features, labels, feature_names)
d = clf_described.metadata
assert d['feature_names'] == feature_names
# assert type(d['metadata_version']) == str
assert type(datetime.datetime.strptime(d['created_at'], '%Y-%m-%d-%H-%M-%S')) == datetime.datetime
# assert type(d['vcs_hash']) == str
assert type(d['distribution_info']) == dict
# assert type(d['distribution_info']['python']) == str
assert type(d['distribution_info']['packages']) == list
assert type(d['performance_scores']['precision']) == list
assert type(d['performance_scores']['precision'][0]) == float
assert type(d['performance_scores']['recall']) == list
assert type(d['performance_scores']['recall'][0]) == float
assert type(d['performance_scores']['fscore']) == list
assert type(d['performance_scores']['fscore'][0]) == float
assert type(d['performance_scores']['support']) == list
assert type(d['performance_scores']['support'][0]) == int
assert type(d['performance_scores']['roc_auc']) == float
assert type(d['performance_scores']['log_loss']) == float
def test_get_metric(self, clf_described):
assert clf_described.recall == [1.0, 0.0]
assert clf_described.roc_auc == 0.5
# log_loss use epsilon 1e-15, so -log(1e-15) / 2 approximately equal 20
assert_almost_equal(clf_described.log_loss, 17.269, decimal=3)
def test_save_classifier(self, clf_described):
save_dir = tempfile.mkdtemp()
try:
saved_name = clf_described.save(save_dir)
assert os.path.dirname(saved_name) == save_dir
assert os.path.isfile(saved_name)
assert saved_name.endswith('.zip')
zf = zipfile.ZipFile(saved_name)
files_present = zf.namelist()
expected_files = [
'model.bin', 'features_train.bin', 'labels_train.bin',
'features_test.bin', 'labels_test.bin', 'metadata.json',
]
# could use a set, but this way errors are easier to read
for f in expected_files:
assert f in files_present
finally:
shutil.rmtree(save_dir)
def test_save_classifier_with_filename(self, clf_described):
save_dir = tempfile.mkdtemp()
try:
saved_name = clf_described.save(save_dir, filename='boom.pkl')
assert os.path.basename(saved_name) == 'boom.pkl.zip'
assert os.path.isfile(saved_name)
finally:
shutil.rmtree(save_dir)
def test_save_classifier_nonexistent_path(self, clf_described):
save_dir = tempfile.mkdtemp()
try:
saved_name = clf_described.save(os.path.join(save_dir, 'nope'))
os.path.dirname(saved_name) == save_dir
assert os.path.isfile(saved_name)
finally:
shutil.rmtree(save_dir)
class TestGetCurrentGitHash(object):
def test_get_current_vcs_hash(self, monkeypatch):
def fake_check_output(*args, **kwargs):
return b'thisisagithash'
monkeypatch.setattr(subprocess, 'check_output', fake_check_output)
assert utils.get_current_vcs_hash() == 'thisisagithash'
def test_get_current_vcs_hash_no_git(self, monkeypatch):
def fake_check_output(*args, **kwargs):
raise OSError()
monkeypatch.setattr(subprocess, 'check_output', fake_check_output)
assert utils.get_current_vcs_hash() == ''
def test_get_current_vcs_hash_git_error(self, monkeypatch):
def fake_check_output(*args, **kwargs):
raise subprocess.CalledProcessError(0, '', '')
monkeypatch.setattr(subprocess, 'check_output', fake_check_output)
assert utils.get_current_vcs_hash() == ''
| rainforestapp/destimator | destimator/tests/test.py | Python | mit | 7,733 | 0.001681 |
#! /usr/bin/env python
import requests
import sys
import urllib
from requests.auth import HTTPBasicAuth
if len(sys.argv) != 5:
print "usage: verify-topo-links onos-node cluster-id first-index last-index"
sys.exit(1)
node = sys.argv[1]
cluster = sys.argv[2]
first = int(sys.argv[3])
last = int(sys.argv[4])
found = 0
topoRequest = requests.get('http://' + node + ':8181/onos/v1/topology/clusters/'
+ cluster
+ "/devices",
auth=HTTPBasicAuth('onos', 'rocks'))
if topoRequest.status_code != 200:
print topoRequest.text
sys.exit(1)
topoJson = topoRequest.json()
for deviceIndex in range(first, last+1):
lookingFor = "of:" + format(deviceIndex, '016x')
print lookingFor
for arrayIndex in range(0, len(topoJson["devices"])):
device = topoJson["devices"][arrayIndex]
if device == lookingFor:
found = found + 1
print "Match found for " + device
break
if found == last - first:
sys.exit(0)
print "Found " + str(found) + " matches, need " + str(last - first)
sys.exit(2)
| planoAccess/clonedONOS | tools/test/scenarios/bin/verify-topo-devices.py | Python | apache-2.0 | 1,143 | 0.002625 |
#############################
##
## The Squire module
##
## (C) Christopher Woods
##
import Sire.MM
import Sire.System
from Sire.Squire._Squire import *
| michellab/Sire | wrapper/Squire/__init__.py | Python | gpl-2.0 | 155 | 0.012903 |
"""
Телефонен указател
Задачата е да се напишат функции, които работят като телефонен указател.
Телефонният указател трябва да се съхранява във файл.
Телефоните се представят като речник с две полете:
- `name` - име на човек
- `phone` - телефоне номер
Например:
{
'name': 'Ivan',
'phone': '0884-35-45-55'
}
Телефонният указател представлява списък от записи:
[
{
'name': 'Ivan',
'phone': '0884-35-45-55'
},
{
'name': 'Pesho',
'phone': '0887-33-44-55'
}
]
Телефонният указател да се съхранява във файл в JSON формат.
"""
import json
import os.path
import sys
def load_contacts_from_file(filename):
if os.path.exists(filename):
f = open(filename)
contacts = json.loads(f.read())
f.close()
else:
contacts = []
return contacts
def save_contacts_to_file(filename, contacts):
f = open(filename, 'w')
f.write(json.dumps(contacts))
f.close()
def set_phone(filename, name, phone):
"""
Задава телефонен номер `phone` на човек с име `name`.
Ако съществува човек с име `name` променя телефона му.
Ако не съществува - добавя нов запис в телефонния указател.
Функцията трябва да:
- прочете указателя от файла с име `filename`
- модифицаира указателя
- запише указателя във файла с име `filename`
Пример:
>>> set_phone('phonebook.json', 'Ivan', '0895-11-11-11') # doctest: +SKIP
"""
contacts = load_contacts_from_file(filename)
index = find_contact_index(contacts, name)
if index != -1:
contacts[index]['phone'] = phone
else:
insert_contact(contacts, name, phone)
save_contacts_to_file(filename, contacts)
def remove_phone(filename, name):
"""
Изтрива записа (ако цъществува) за човек с име `name` от телефонния указател.
Функцията трябва да:
- прочете указателя от файла с име `filename`
- модифицаира указателя
- запише указателя във файла с име `filename`
Пример:
>>> remove_phone('phonebook.json', 'Ivan') # doctest: +SKIP
"""
contacts = load_contacts_from_file(filename)
index = find_contact_index(contacts, name)
if index != -1:
contacts.pop(index)
save_contacts_to_file(filename, contacts)
def find_phone(filename, name):
"""
Намире телефонния номер на човек с име `name`.
Връща като резултат телефонния номер или `None` ако не го открие.
Функцията трябва да:
- прочете указателя от файла с име `filename`
- потърси и върне телефонния номер
Пример:
>>> find_phone('phonebook.json', 'Ivan') # doctest: +SKIP
'0895-11-11-11'
"""
contacts = load_contacts_from_file(filename)
index = find_contact_index(contacts, name)
if index != -1:
return contacts[index]['phone']
def find_contact_index(contacts, name):
"""
Функция, която намира позицията на контакт в списъка (по име).
Връща като резултат пизицията или `-1`, ако не успее да намери контакт с такова име.
>>> contacts = [{'name': 'Pesho', 'phone': 1}, {'name': 'Gosho', 'phone': 2}]
>>> find_contact_index(contacts, 'Gosho')
1
>>> find_contact_index(contacts, 'Ivan')
-1
"""
pass
def insert_contact(contacts, name, phone):
"""
Функция, която вмъква нов контакт в списъка с контакти,
като запазва списъка сортиран по име.
>>> contacts = []
>>> insert_contact(contacts, 'Pesho', 1)
>>> insert_contact(contacts, 'Gosho', 2)
>>> contacts[0]['name']
'Gosho'
>>> contacts[1]['name']
'Pesho'
>>> insert_contact(contacts, 'Boby', 3)
>>> contacts[0]['name']
'Boby'
>>> contacts[1]['name']
'Gosho'
>>> contacts[2]['name']
'Pesho'
>>> insert_contact(contacts, 'Tosho', 4)
>>> contacts[3]['name']
'Tosho'
"""
pass
def do_command(command, *args):
result = {
'set': set_phone,
'remove': remove_phone,
'find': find_phone
}[command](*args)
if result:
print(result)
if __name__ == '__main__':
command = sys.argv[1]
if command == 'test':
import doctest
doctest.testmod()
else:
do_command(*sys.argv[1:])
| YAtOff/python0-reloaded | projects/hard/phonebook/phonebook.py | Python | mit | 5,283 | 0.000934 |
from cStringIO import StringIO
from twisted.internet.defer import fail, succeed
from twisted.web.error import Error
from juju.errors import FileNotFound, ProviderError, ProviderInteractionError
from juju.lib.testing import TestCase
from juju.providers.orchestra import MachineProvider
from .test_digestauth import GetPageAuthTestCase
class SomeError(Exception):
pass
def get_file_storage(custom_config=None):
config = {"orchestra-server": "somewhereel.se",
"orchestra-user": "fallback-user",
"orchestra-pass": "fallback-pass",
"acquired-mgmt-class": "acquired",
"available-mgmt-class": "available"}
if custom_config is None:
config["storage-url"] = "http://somewhe.re"
config["storage-user"] = "user"
config["storage-pass"] = "pass"
else:
config.update(custom_config)
provider = MachineProvider("blah", config)
return provider.get_file_storage()
class FileStorageGetTest(TestCase):
def setUp(self):
self.uuid4_m = self.mocker.replace("uuid.uuid4")
self.getPage = self.mocker.replace("twisted.web.client.getPage")
def test_get_url(self):
self.mocker.replay()
fs = get_file_storage()
self.assertEquals(fs.get_url("angry/birds"),
"http://somewhe.re/angry/birds")
def test_get_url_fallback(self):
self.mocker.replay()
fs = get_file_storage({})
self.assertEquals(fs.get_url("angry/birds"),
"http://somewhereel.se/webdav/angry/birds")
def test_get(self):
self.getPage("http://somewhe.re/rubber/chicken")
self.mocker.result(succeed("pulley"))
self.mocker.replay()
fs = get_file_storage()
d = fs.get("rubber/chicken")
def verify(result):
self.assertEquals(result.read(), "pulley")
d.addCallback(verify)
return d
def check_get_error(self, result, err_type, err_message):
self.getPage("http://somewhe.re/rubber/chicken")
self.mocker.result(result)
self.mocker.replay()
fs = get_file_storage()
d = fs.get("rubber/chicken")
self.assertFailure(d, err_type)
def verify(error):
self.assertEquals(str(error), err_message)
d.addCallback(verify)
return d
def test_get_error(self):
return self.check_get_error(
fail(SomeError("pow!")),
ProviderInteractionError,
"Unexpected SomeError interacting with provider: pow!")
def test_get_404(self):
return self.check_get_error(
fail(Error("404")),
FileNotFound,
"File was not found: 'http://somewhe.re/rubber/chicken'")
def test_get_bad_code(self):
return self.check_get_error(
fail(Error("999")),
ProviderError,
"Unexpected HTTP 999 trying to GET "
"http://somewhe.re/rubber/chicken")
class FileStoragePutTest(GetPageAuthTestCase):
def setup_mock(self):
self.uuid4_m = self.mocker.replace("uuid.uuid4")
def get_file_storage(self, with_user=True):
storage_url = self.get_base_url()
custom_config = {"storage-url": storage_url}
if with_user:
custom_config["storage-user"] = "user"
custom_config["storage-pass"] = "pass"
return get_file_storage(custom_config)
def test_no_auth_error(self):
self.add_plain("peregrine", "PUT", "", "croissant", 999)
fs = self.get_file_storage()
d = fs.put("peregrine", StringIO("croissant"))
self.assertFailure(d, ProviderError)
def verify(error):
self.assertIn("Unexpected HTTP 999 trying to PUT ", str(error))
d.addCallback(verify)
return d
def test_no_auth_201(self):
self.add_plain("peregrine", "PUT", "", "croissant", 201)
fs = self.get_file_storage()
d = fs.put("peregrine", StringIO("croissant"))
d.addCallback(self.assertEquals, True)
return d
def test_no_auth_204(self):
self.add_plain("peregrine", "PUT", "", "croissant", 204)
fs = self.get_file_storage()
d = fs.put("peregrine", StringIO("croissant"))
d.addCallback(self.assertEquals, True)
return d
def auth_common(self, username, status, with_user=True):
self.setup_mock()
self.uuid4_m()
self.mocker.result("dinner")
self.mocker.replay()
url = self.get_url("possum")
def check(response):
self.assertTrue(response.startswith(
'Digest username="%s", realm="sparta", nonce="meh", uri="%s"'
% (username, url)))
self.assertIn(
'qop="auth", nc="00000001", cnonce="dinner"', response)
self.add_auth(
"possum", "PUT", "", "Digest realm=sparta, nonce=meh, qop=auth",
check, expect_content="canabalt", status=status)
fs = self.get_file_storage(with_user)
return fs.put("possum", StringIO("canabalt"))
def test_auth_error(self):
d = self.auth_common("user", 808)
self.assertFailure(d, ProviderError)
def verify(error):
self.assertIn("Unexpected HTTP 808 trying to PUT", str(error))
d.addCallback(verify)
return d
def test_auth_bad_credentials(self):
d = self.auth_common("user", 401)
self.assertFailure(d, ProviderError)
def verify(error):
self.assertEquals(
str(error),
"The supplied storage credentials were not accepted by the "
"server")
d.addCallback(verify)
return d
def test_auth_201(self):
d = self.auth_common("user", 201)
d.addCallback(self.assertEquals, True)
return d
def test_auth_204(self):
d = self.auth_common("user", 204)
d.addCallback(self.assertEquals, True)
return d
def test_auth_fallback_error(self):
d = self.auth_common("fallback-user", 747, False)
self.assertFailure(d, ProviderError)
def verify(error):
self.assertIn("Unexpected HTTP 747 trying to PUT", str(error))
d.addCallback(verify)
return d
def test_auth_fallback_201(self):
d = self.auth_common("fallback-user", 201, False)
d.addCallback(self.assertEquals, True)
return d
def test_auth_fallback_204(self):
d = self.auth_common("fallback-user", 204, False)
d.addCallback(self.assertEquals, True)
return d
| anbangr/trusted-juju | juju/providers/orchestra/tests/test_files.py | Python | agpl-3.0 | 6,631 | 0 |
#!/usr/bin/python -u
from __future__ import absolute_import
from __future__ import print_function
#
# droiddemo.py, Copyright 2010-2013, The Beanstalks Project ehf.
# http://beanstalks-project.net/
#
# This is a proof-of-concept PageKite enabled HTTP server for Android.
# It has been developed and tested in the SL4A Python environment.
#
DOMAIN='phone.bre.pagekite.me'
SECRET='ba4e5430'
SOURCE='/sdcard/sl4a/scripts/droiddemo.py'
#
#############################################################################
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#############################################################################
#
import six
from six.moves.urllib import unquote
from six.moves.urllib.parse import urlparse
import android
import pagekite
import os
class UiRequestHandler(pagekite.UiRequestHandler):
CAMERA_PATH = '/sdcard/dcim/.thumbnails'
HOME = ('<html><head>\n'
'<script type=text/javascript>'
'lastImage = "";'
'function getImage() {'
'xhr = new XMLHttpRequest();'
'xhr.open("GET", "/latest-image.txt", true);'
'xhr.onreadystatechange = function() {'
'if (xhr.readyState == 4) {'
'if (xhr.responseText && xhr.responseText != lastImage) {'
'document.getElementById("i").src = lastImage = xhr.responseText;'
'}'
'setTimeout("getImage()", 2000);'
'}'
'};'
'xhr.send(null);'
'}'
'</script>\n'
'</head><body onLoad="getImage();" style="text-align: center;">\n'
'<h1>Android photos!</h1>\n'
'<img id=i height=80% src="http://www.android.com/images/opensourceproject.gif">\n'
'<br><a href="/droiddemo.py">source code</a>'
'| <a href="/status.html">kite status</a>\n'
'</body></head>')
def listFiles(self):
mtimes = {}
for item in os.listdir(self.CAMERA_PATH):
iname = '%s/%s' % (self.CAMERA_PATH, item)
if iname.endswith('.jpg'):
mtimes[iname] = os.path.getmtime(iname)
files = list(six.iterkeys(mtimes))
files.sort(key=lambda iname: mtimes[iname])
return files
def do_GET(self):
(scheme, netloc, path, params, query, frag) = urlparse(self.path)
p = unquote(path)
if p.endswith('.jpg') and p.startswith(self.CAMERA_PATH) and ('..' not in p):
try:
jpgfile = open(p)
self.send_response(200)
self.send_header('Content-Type', 'image/jpeg')
self.send_header('Content-Length', '%s' % os.path.getsize(p))
self.send_header('Cache-Control', 'max-age: 36000')
self.send_header('Expires', 'Sat, 1 Jan 2011 12:00:00 GMT')
self.send_header('Last-Modified', 'Wed, 1 Sep 2011 12:00:00 GMT')
self.end_headers()
data = jpgfile.read()
while data:
try:
sent = self.wfile.write(data[0:15000])
data = data[15000:]
except Exception:
pass
return
except Exception as e:
print('%s' % e)
pass
if path == '/latest-image.txt':
flist = self.listFiles()
self.begin_headers(200, 'text/plain')
self.end_headers()
self.wfile.write(flist[-1])
return
elif path == '/droiddemo.py':
try:
pyfile = open(SOURCE)
self.begin_headers(200, 'text/plain')
self.end_headers()
self.wfile.write(pyfile.read().replace(SECRET, 'mysecret'))
except IOError as e:
self.begin_headers(404, 'text/plain')
self.end_headers()
self.wfile.write('Could not read %s: %s' % (SOURCE, e))
return
elif path == '/':
self.begin_headers(200, 'text/html')
self.end_headers()
self.wfile.write(self.HOME)
return
return pagekite.UiRequestHandler.do_GET(self)
class DroidKite(pagekite.PageKite):
def __init__(self, droid):
pagekite.PageKite.__init__(self)
self.droid = droid
self.ui_request_handler = UiRequestHandler
def Start(host, secret):
ds = DroidKite(android.Android())
ds.Configure(['--defaults',
'--httpd=localhost:9999',
'--backend=http:%s:localhost:9999:%s' % (host, secret)])
ds.Start()
Start(DOMAIN, SECRET)
| pagekite/PyPagekite | droiddemo.py | Python | agpl-3.0 | 4,921 | 0.009348 |
import py
try:
from pypy.rpython.test.test_llinterp import interpret
except ImportError:
py.test.skip('Needs PyPy to be on the PYTHONPATH')
from rply import ParserGenerator, Token
from rply.errors import ParserGeneratorWarning
from .base import BaseTests
from .utils import FakeLexer, BoxInt, ParserState
class TestTranslation(BaseTests):
def run(self, func, args):
return interpret(func, args)
def test_basic(self):
pg = ParserGenerator(["NUMBER", "PLUS"])
@pg.production("main : expr")
def main(p):
return p[0]
@pg.production("expr : expr PLUS expr")
def expr_op(p):
return BoxInt(p[0].getint() + p[2].getint())
@pg.production("expr : NUMBER")
def expr_num(p):
return BoxInt(int(p[0].getstr()))
with self.assert_warns(ParserGeneratorWarning, "1 shift/reduce conflict"):
parser = pg.build()
def f(n):
return parser.parse(FakeLexer([
Token("NUMBER", str(n)),
Token("PLUS", "+"),
Token("NUMBER", str(n))
])).getint()
assert self.run(f, [12]) == 24
def test_state(self):
pg = ParserGenerator(["NUMBER", "PLUS"], precedence=[
("left", ["PLUS"]),
])
@pg.production("main : expression")
def main(state, p):
state.count += 1
return p[0]
@pg.production("expression : expression PLUS expression")
def expression_plus(state, p):
state.count += 1
return BoxInt(p[0].getint() + p[2].getint())
@pg.production("expression : NUMBER")
def expression_number(state, p):
state.count += 1
return BoxInt(int(p[0].getstr()))
parser = pg.build()
def f():
state = ParserState()
return parser.parse(FakeLexer([
Token("NUMBER", "10"),
Token("PLUS", "+"),
Token("NUMBER", "12"),
Token("PLUS", "+"),
Token("NUMBER", "-2"),
]), state=state).getint() + state.count
assert self.run(f, []) == 26
| solanolabs/rply | tests/test_ztranslation.py | Python | bsd-3-clause | 2,200 | 0.000455 |
# -*- coding: utf-8 -*-
"""
Enchants
- CurrencyTypes.dbc
"""
from .. import *
class Currency(Model):
pass
class CurrencyTooltip(Tooltip):
def tooltip(self):
self.append("name", self.obj.getName())
self.append("description", self.obj.getDescription(), color=YELLOW)
return self.flush()
Currency.Tooltip = CurrencyTooltip
class CurrencyProxy(object):
"""
WDBC proxy for currencies
"""
def __init__(self, cls):
from pywow import wdbc
self.__file = wdbc.get("CurrencyTypes.dbc", build=-1)
def get(self, id):
return self.__file[id]
def getDescription(self, row):
return row.description_enus
def getName(self, row):
return row.name_enus
Currency.initProxy(CurrencyProxy)
| jleclanche/pywow | game/currencies/__init__.py | Python | cc0-1.0 | 702 | 0.029915 |
import unittest
from unittest2 import skip
from numpy import alltrue, arange
from enable.compiled_path import CompiledPath
# Chaco imports
from chaco.api import (ArrayDataSource, ColormappedScatterPlot, DataRange1D,
LinearMapper, PlotGraphicsContext, jet)
class TestColormappedScatterplot(unittest.TestCase):
def setUp(self):
self.index = ArrayDataSource(arange(10))
self.value = ArrayDataSource(arange(10))
self.color_data = ArrayDataSource(arange(10))
self.size_data = arange(10)
self.index_range = DataRange1D()
self.index_range.add(self.index)
self.index_mapper = LinearMapper(range=self.index_range)
self.value_range = DataRange1D()
self.value_range.add(self.value)
self.value_mapper = LinearMapper(range=self.value_range)
self.color_range = DataRange1D()
self.color_range.add(self.color_data)
self.color_mapper = jet(self.color_range)
self.scatterplot = ColormappedScatterPlot(
index=self.index,
value=self.value,
index_mapper=self.index_mapper,
value_mapper=self.value_mapper,
color_data=self.color_data,
marker_size=self.size_data,
color_mapper=self.color_mapper,
)
self.scatterplot.outer_bounds = [50, 50]
self.gc = PlotGraphicsContext((50, 50))
def test_scatter_render(self):
""" Coverage test to check basic case works """
self.gc.render_component(self.scatterplot)
actual = self.gc.bmp_array[:, :, :]
self.assertFalse(alltrue(actual == 255))
def test_scatter_circle(self):
""" Coverage test to check circles work """
self.scatterplot.marker = 'circle'
self.gc.render_component(self.scatterplot)
actual = self.gc.bmp_array[:, :, :]
self.assertFalse(alltrue(actual == 255))
@skip
def test_scatter_custom(self):
""" Coverage test to check custom markers work...
XXX ...which apparently they currently don't. See #232.
"""
# build path
path = CompiledPath()
path.begin_path()
path.move_to(-5, -5)
path.line_to(-5, 5)
path.line_to(5, 5)
path.line_to(5, -5)
path.line_to(-5, -5)
self.scatterplot.marker = 'custom'
self.scatterplot.custom_symbol = path
self.gc.render_component(self.scatterplot)
actual = self.gc.bmp_array[:, :, :]
self.assertFalse(alltrue(actual == 255))
def test_colormap_updated(self):
""" If colormapper updated then we need to redraw """
self.color_mapper.updated = True
self.assertFalse(self.scatterplot.draw_valid)
if __name__ == "__main__":
unittest.main()
| tommy-u/chaco | chaco/tests/test_colormapped_scatterplot.py | Python | bsd-3-clause | 2,802 | 0 |
# Copyright 2017 by Notmail Bot contributors. All rights reserved.
#
# This file is part of Notmail Bot.
#
# Notmail Bot is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Notmail Bot is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Notmail Bot. If not, see <http:#www.gnu.org/licenses/>.
import os
from tinydb import TinyDB, Query
from repository.email_server import EmailServer
from repository.user import User
from repository.account import parse_accounts_to_json, parse_json_to_accounts
db = None
def get_dbc():
return db
def set_dbc(dbc):
global db
db = dbc
class DBC:
def __init__(self, path=None):
if path is None:
self.db = TinyDB(os.path.join('config', 'tmail-bot.json'))
else:
self.db = TinyDB(path)
def get_table(self, table_name):
return self.db.table(table_name)
def purge(self):
self.db.purge_tables()
def insert_email_server(self, name, host, port, protocol):
email_servers = self.db.table('EmailServers')
email_servers.insert({'name': name, 'host': host, 'port': port, 'protocol': protocol})
return EmailServer(name, host, port, protocol)
def search_email_server(self, name, protocol):
email_servers = self.db.table('EmailServers')
query = Query()
search = email_servers.search(query.name == name and query.protocol == protocol)
result = eval(str(search))[0] # We suppose that names + protocol will be unique
email_server = EmailServer(name, result['host'], result['port'], result['protocol'])
return email_server
def update_email_server(self, email_server):
email_servers = self.db.table('EmailServers')
query = Query()
email_servers.update({'host': email_server.host, 'port': email_server.port},
query.name == email_server.name and query.protocol == email_server.protocol)
def remove_email_server(self, name, protocol):
email_servers = self.db.table('EmailServers')
query = Query()
email_servers.remove(query.name == name and query.protocol == protocol)
def insert_user(self, id, accounts):
users = self.db.table('Users')
users.insert({'id': id, 'accounts': parse_accounts_to_json(accounts)})
return User(id, accounts)
def search_user(self, id):
users = self.db.table('Users')
query = Query()
search = users.search(query.id == id)
result = eval(str(search))[0]
user = User(id, parse_json_to_accounts(result['accounts']))
return user
def update_user(self, user):
users = self.db.table('Users')
query = Query()
users.update({'id': user.id, 'accounts': parse_accounts_to_json(user.accounts)},
query.id == user.id)
def get_all_users(self):
users = self.db.table('Users')
res = []
for a in users.all():
res.append(User(a['id'], parse_json_to_accounts(a['accounts'])))
return res
def remove_user(self, user_id):
users = self.db.table('Users')
query = Query()
users.remove(query.id == user_id)
def get_accounts_of_user(self, user):
user = self.search_user(user.id)
return user.accounts
def get_account_of_user(self, user, username):
user = self.search_user(user.id)
result = None
for account in user.accounts:
if account.username == username:
result = account
break
return result
def add_account_to_user(self, user, account):
user = self.search_user(user.id)
user.add_account(account)
self.update_user(user)
def update_account_of_user(self, user, account):
user = self.search_user(user.id)
user.update_account(account)
self.update_user(user)
def remove_account_of_user(self, user, account):
user = self.search_user(user.id)
user.remove_account(account)
self.update_user(user)
def get_email_server_of_account(self, account, protocol):
email_server = self.search_email_server(account.name, protocol)
return email_server
| ManuelLR/Notmail_Bot | repository/repository.py | Python | gpl-3.0 | 4,697 | 0.00149 |
#! usr/bin/python
from __future__ import print_function, division, absolute_import
from os import remove
from os.path import join, abspath
from sys import stdout, exit
from time import time
import multiprocessing as mp
from argparse import ArgumentParser
import logging
import numpy as np
from disvis import DisVis, PDB, Volume
from disvis.rotations import proportional_orientations, quat_to_rotmat
from disvis.helpers import mkdir_p
def parse_args():
"""Parse the command-line arguments."""
p = ArgumentParser()
p.add_argument('receptor', type=file,
help='PDB-file containing fixed chain.')
p.add_argument('ligand', type=file,
help='PDB-file containing scanning chain.')
p.add_argument('restraints', type=file,
help='File containing the distance restraints')
p.add_argument('-a', '--angle', dest='angle', type=float, default=15, metavar='<float>',
help='Rotational sampling density in degrees. Default is 15 degrees.')
p.add_argument('-vs', '--voxelspacing', dest='voxelspacing', metavar='<float>',
type=float, default=1,
help='Voxel spacing of search grid in angstrom. Default is 1A.')
p.add_argument('-ir', '--interaction-radius',
dest='interaction_radius', type=float, default=3.0, metavar='<float>',
help='Radius of the interaction space for each atom in angstrom. '
'Atoms are thus considered interacting if the distance is '
'larger than the vdW radius and shorther than or equal to '
'vdW + interaction_radius. Default is 3A.')
p.add_argument('-cv', '--max-clash',
dest='max_clash', type=float, default=200, metavar='<float>',
help='Maximum allowed volume of clashes. Increasing this '
'number results in more allowed complexes. '
'Default is 200 A^3.')
p.add_argument('-iv', '--min-interaction',
dest='min_interaction', type=float, default=300, metavar='<float>',
help='Minimal required interaction volume for a '
'conformation to be considered a '
'complex. Increasing this number results in a '
'stricter counting of complexes. '
'Default is 300 A^3.')
p.add_argument('-d', '--directory', dest='directory', metavar='<dir>',
type=abspath, default='.',
help='Directory where results are written to. '
'Default is current directory.')
p.add_argument('-p', '--nproc', dest='nproc', type=int, default=1, metavar='<int>',
help='Number of processors used during search.')
p.add_argument('-g', '--gpu', dest='gpu', action='store_true',
help='Use GPU-acceleration for search. If not available '
'the CPU-version will be used with the given number '
'of processors.')
help_msg = ("File containing residue number for which interactions will be counted. "
"The first line holds the receptor residue, "
"and the second line the ligand residue numbers.")
p.add_argument('-is', '--interaction-selection', metavar='<file>',
dest='interaction_selection', type=file, default=None,
help=help_msg)
help_msg = ("Number of minimal consistent restraints for which an interaction "
"or occupancy analysis will be performed. "
"Default is number of restraints minus 1.")
p.add_argument('-ic', '--interaction-restraints-cutoff', metavar='<int>',
dest='interaction_restraints_cutoff', type=int, default=None,
help=help_msg)
p.add_argument('-oa', '--occupancy-analysis', dest='occupancy_analysis',
action='store_true',
help=("Perform an occupancy analysis, ultimately providing "
"a volume where each grid point represents the "
"normalized probability of that spot being occupied by the ligand."
)
)
return p.parse_args()
def parse_interaction_selection(fid, pdb1, pdb2):
"""Parse the interaction selection file, i.e. all residues for which an
interaction analysis is performed."""
resi1 = [int(x) for x in fid.readline().split()]
resi2 = [int(x) for x in fid.readline().split()]
pdb1_sel = pdb1.select('name', ('CA', "O3'")).select('resi', resi1)
pdb2_sel = pdb2.select('name', ('CA', "O3'")).select('resi', resi2)
if (len(resi1) != pdb1_sel.natoms) or (len(resi2) != pdb2_sel.natoms):
msg = ("Some selected interaction residues where either missing in the PDB file "
"or had alternate conformers. Please check your input residues and remove alternate conformers.")
raise ValueError(msg)
return pdb1_sel, pdb2_sel
def parse_restraints(fid, pdb1, pdb2):
"""Parse the restraints file."""
dist_restraints = []
for line in fid:
# ignore comments and empty lines
line = line.strip()
if line.startswith('#') or not line:
continue
chain1, resi1, name1, chain2, resi2, name2, mindis, maxdis = line.split()
pdb1_sel = pdb1.select('chain', chain1).select('resi',
int(resi1)).select('name', name1).duplicate()
pdb2_sel = pdb2.select('chain', chain2).select('resi',
int(resi2)).select('name', name2).duplicate()
if pdb1_sel.natoms == 0 or pdb2_sel.natoms == 0:
raise ValueError("A restraint selection was not found in line:\n{:s}".format(str(line)))
dist_restraints.append([pdb1_sel, pdb2_sel, float(mindis), float(maxdis)])
fid.close()
return dist_restraints
class Joiner(object):
def __init__(self, directory):
self.directory = directory
def __call__(self, fname):
"""Join fname with set directory."""
return join(self.directory, fname)
class Results(object):
"""Simple container"""
pass
def run_disvis_instance(queue, receptor, ligand, distance_restraints, rotmat,
weights, n, pdb1_sel, pdb2_sel, args):
"""Run a single DisVis instance."""
dv = DisVis()
dv.receptor = receptor
dv.ligand = ligand
dv.distance_restraints = distance_restraints
dv.rotations = rotmat
dv.weights = weights
dv.voxelspacing = args.voxelspacing
dv.interaction_radius = args.interaction_radius
dv.max_clash = args.max_clash
dv.min_interaction = args.min_interaction
dv.interaction_restraints_cutoff = args.interaction_restraints_cutoff
if args.interaction_selection is not None:
dv.receptor_interaction_selection = pdb1_sel
dv.ligand_interaction_selection = pdb2_sel
dv.occupancy_analysis = args.occupancy_analysis
dv.search()
# Save results to file, to be combined later
joiner = Joiner(args.directory)
fname = joiner('accessible_interaction_space_{:d}.mrc').format(n)
dv.accessible_interaction_space.tofile(fname)
fname = joiner('violations_{:d}.npy').format(n)
np.save(fname, dv.violations)
if dv.interaction_matrix is not None:
fname = joiner('interaction_matrix_{:d}.npy'.format(n))
np.save(fname, dv.interaction_matrix)
if dv.occupancy_analysis:
for key, value in dv.occupancy_grids.iteritems():
fname = joiner('occupancy_{:d}_{:d}.mrc'.format(key, n))
value.tofile(fname)
queue.put(dv.accessible_complexes)
def mp_cpu_disvis(receptor, ligand, rotmat, weights, distance_restraints,
pdb1_sel, pdb2_sel, args):
"""Run several DisVis instances, each with a subset of all rotations."""
# multi-threaded CPU version
try:
max_cpu = mp.cpu_count()
jobs = min(max_cpu, args.nproc)
except NotImplementedError:
jobs = args.nproc
# in case more processes are requested than the number
# of rotations sampled
nrot = rotmat.shape[0]
if jobs > nrot:
jobs = nrot
nrot_per_job = nrot//jobs
write('Number of processors used: {:d}'.format(jobs))
write('Number of rotations per job: {:d}'.format(nrot_per_job))
write('Creating jobs')
queue = mp.Queue()
processes = []
for n in xrange(jobs):
# Determine the rotations that each job needs to sample
init_rot = n * nrot_per_job
end_rot = (n + 1) * nrot_per_job
if n == (jobs - 1):
end_rot = None
sub_rotmat = rotmat[init_rot: end_rot]
sub_weights = weights[init_rot: end_rot]
disvis_args = (queue, receptor, ligand, distance_restraints,
sub_rotmat, sub_weights, n, pdb1_sel, pdb2_sel, args)
process = mp.Process(target=run_disvis_instance, args=disvis_args)
processes.append(process)
write('Starting jobs')
for p in processes:
p.start()
write('Waiting for jobs to finish')
for p in processes:
p.join()
# Check whether the queue is empty, this indicates failure to run on
# multi-processor runs.
if queue.empty():
raise mp.Queue.Empty
write('Searching done. Combining results')
# Create dummy class with similar results attributes as DisVis class
results = Results()
joiner = Joiner(args.directory)
fname_interspace = joiner('accessible_interaction_space_{:d}.mrc')
fname_violations = joiner('violations_{:d}.npy')
fname_intermat = joiner('interaction_matrix_{:d}.npy')
accessible_complexes = np.asarray(queue.get(), dtype=np.float64)
accessible_interaction_space = Volume.fromfile(fname_interspace.format(0))
violations = np.load(fname_violations.format(0))
for n in xrange(1, jobs):
accessible_complexes += np.asarray(queue.get(), dtype=np.float64)
np.maximum(accessible_interaction_space.array,
Volume.fromfile(fname_interspace.format(n)).array,
accessible_interaction_space.array)
violations += np.load(fname_violations.format(n))
# Combine the occupancy grids
occupancy = None
if args.occupancy_analysis:
fname_occupancy = joiner('occupancy_{:d}_{:d}.mrc')
occupancy = {}
for consistent_restraints in xrange(args.interaction_restraints_cutoff,
len(distance_restraints) + 1):
occupancy[consistent_restraints] = Volume.fromfile(
fname_occupancy.format(consistent_restraints, 0))
for n in range(1, jobs):
occupancy[consistent_restraints]._array += (
Volume.fromfile(fname_occupancy.format(consistent_restraints, n))._array
)
# Combine the interaction analysis
results.interaction_matrix = None
if args.interaction_selection is not None:
interaction_matrix = np.load(fname_intermat.format(0))
for n in range(1, jobs):
interaction_matrix += np.load(fname_intermat.format(n))
results.interaction_matrix = interaction_matrix
# Remove the intermediate files
write('Cleaning')
for n in xrange(jobs):
remove(fname_interspace.format(n))
remove(fname_violations.format(n))
if args.interaction_selection is not None:
remove(fname_intermat.format(n))
if args.occupancy_analysis:
for consistent_restraints in xrange(
args.interaction_restraints_cutoff, len(distance_restraints) + 1):
remove(fname_occupancy.format(consistent_restraints, n))
results.accessible_interaction_space = accessible_interaction_space
results.accessible_complexes = accessible_complexes
results.violations = violations
results.occupancy_grids = occupancy
return results
def run_disvis(queue, receptor, ligand, rotmat, weights, distance_restraints,
pdb1_sel, pdb2_sel, args):
dv = DisVis()
dv.receptor = receptor
dv.ligand = ligand
dv.distance_restraints = distance_restraints
dv.rotations = rotmat
dv.weights = weights
dv.voxelspacing = args.voxelspacing
dv.interaction_radius = args.interaction_radius
dv.max_clash = args.max_clash
dv.min_interaction = args.min_interaction
dv.queue = queue
dv.occupancy_analysis = args.occupancy_analysis
dv.interaction_restraints_cutoff = args.interaction_restraints_cutoff
if not any([x is None for x in (pdb1_sel, pdb2_sel)]):
dv.receptor_interaction_selection = pdb1_sel
dv.ligand_interaction_selection = pdb2_sel
dv.search()
return dv
def write(line):
if stdout.isatty():
print(line)
logging.info(line)
def main():
args = parse_args()
mkdir_p(args.directory)
joiner = Joiner(args.directory)
logging.basicConfig(filename=joiner('disvis.log'),
level=logging.INFO, format='%(asctime)s %(message)s')
time0 = time()
queue = None
if args.gpu:
from disvis.helpers import get_queue
queue = get_queue()
if queue is None:
raise ValueError("No GPU queue was found.")
write('Reading fixed model from: {:s}'.format(args.receptor.name))
receptor = PDB.fromfile(args.receptor)
write('Reading scanning model from: {:s}'.format(args.ligand.name))
ligand = PDB.fromfile(args.ligand)
write('Reading in rotations.')
q, weights, a = proportional_orientations(args.angle)
rotmat = quat_to_rotmat(q)
write('Requested rotational sampling density: {:.2f}'.format(args.angle))
write('Real rotational sampling density: {:.2f}'.format(a))
write('Number of rotations: {:d}'.format(rotmat.shape[0]))
write('Reading in restraints from file: {:s}'.format(args.restraints.name))
distance_restraints = parse_restraints(args.restraints, receptor, ligand)
write('Number of distance restraints: {:d}'.format(len(distance_restraints)))
# If the interaction restraints cutoff is not specified, only calculate the
# interactions and occupancy grids for complexes consistent with at least 1
# restraints or more, with a limit of three.
if args.interaction_restraints_cutoff is None:
args.interaction_restraints_cutoff = max(len(distance_restraints) - 3, 1)
pdb1_sel = pdb2_sel = None
if args.interaction_selection is not None:
write('Reading in interaction selection from file: {:s}'
.format(args.interaction_selection.name))
pdb1_sel, pdb2_sel = parse_interaction_selection(
args.interaction_selection, receptor, ligand)
write('Number of receptor residues: {:d}'.format(pdb1_sel.natoms))
write('Number of ligand residues: {:d}'.format(pdb2_sel.natoms))
write('Voxel spacing set to: {:.2f}'.format(args.voxelspacing))
write('Interaction radius set to: {:.2f}'.format(args.interaction_radius))
write('Minimum required interaction volume: {:.2f}'.format(args.min_interaction))
write('Maximum allowed volume of clashes: {:.2f}'.format(args.max_clash))
if args.occupancy_analysis:
write('Performing occupancy analysis')
if queue is None:
# CPU-version
if args.nproc > 1:
try:
dv = mp_cpu_disvis(receptor, ligand, rotmat, weights,
distance_restraints, pdb1_sel, pdb2_sel, args)
except Queue.Empty:
msg = ('ERROR: Queue.Empty exception raised while processing job, '
'stopping execution ...')
write(msg)
exit(-1)
else:
dv = run_disvis(queue, receptor, ligand, rotmat, weights,
distance_restraints, pdb1_sel, pdb2_sel, args)
else:
# GPU-version
write('Using GPU accelerated search.')
dv = run_disvis(queue, receptor, ligand, rotmat, weights,
distance_restraints, pdb1_sel, pdb2_sel, args)
# write out accessible interaction space
fname = joiner('accessible_interaction_space.mrc')
write('Writing accessible interaction space to: {:s}'.format(fname))
dv.accessible_interaction_space.tofile(fname)
# write out accessible complexes
accessible_complexes = dv.accessible_complexes
norm = sum(accessible_complexes)
digits = len(str(int(norm))) + 1
cum_complexes = np.cumsum(np.asarray(accessible_complexes)[::-1])[::-1]
with open(joiner('accessible_complexes.out'), 'w') as f_accessible_complexes:
write('Writing number of accessible complexes to: {:s}'.format(f_accessible_complexes.name))
header = '# consistent restraints | accessible complexes |' +\
'relative | cumulative accessible complexes | relative\n'
f_accessible_complexes.write(header)
for n, acc in enumerate(accessible_complexes):
f_accessible_complexes.write('{0:3d} {2:{1}d} {3:8.6f} {4:{1}d} {5:8.6f}\n'\
.format(n, digits, int(acc), acc/norm,
int(cum_complexes[n]), cum_complexes[n]/norm))
# writing out violations
violations = dv.violations
cum_violations = violations[::-1].cumsum(axis=0)[::-1]
with open(joiner('violations.out'), 'w') as f_viol:
write('Writing violations to file: {:s}'.format(f_viol.name))
num_violations = violations.sum(axis=1)
nrestraints = num_violations.shape[0]
header = ('# row represents the number of consistent restraints\n'
'# column represents how often that restraint is violated\n')
f_viol.write(header)
header = (' ' + '{:8d}'*nrestraints + '\n').format(*range(1, nrestraints + 1))
f_viol.write(header)
for n, line in enumerate(cum_violations):
f_viol.write('{:<2d} '.format(n+1))
for word in line:
if num_violations[n] > 0:
percentage_violated = word/cum_complexes[n+1]
else:
percentage_violated = 0
f_viol.write('{:8.4f}'.format(percentage_violated))
f_viol.write('\n')
# Give user indication for false positives.
# Determine minimum number of false positives.
nrestraints = len(distance_restraints)
n = 1
while accessible_complexes[-n] == 0:
n += 1
if n > 1:
msg = ('Not all restraints are consistent. '
'Number of false-positive restraints present '
'is at least: {:d}'.format(n - 1))
write(msg)
# next give possible false-positives based on the percentage of violations
# and their associated Z-score
if n == 1:
n = None
else:
n = -n + 1
percentage_violated = cum_violations[:n]/np.asarray(cum_complexes[1:n]).reshape(-1, 1)
average_restraint_violation = percentage_violated.mean(axis=0)
std_restraint_violation = percentage_violated.std(axis=0)
zscore_violations = ((average_restraint_violation - average_restraint_violation.mean())
/ average_restraint_violation.std())
ind_false_positives = np.flatnonzero(zscore_violations >= 1.0)
nfalse_positives = ind_false_positives.size
if nfalse_positives > 0:
ind_false_positives += 1
write(('Possible false-positive restraints (z-score > 1.0):' +\
' {:d}'*nfalse_positives).format(*tuple(ind_false_positives)))
with open(joiner('z-score.out'), 'w') as f:
write('Writing z-score of each restraint to {:s}'.format(f.name))
for n in xrange(zscore_violations.shape[0]):
f.write('{:2d} {:6.3f} {:6.3f} {:6.3f}\n'.format(n+1,
average_restraint_violation[n], std_restraint_violation[n],
zscore_violations[n]))
# Write all occupancy grids to MRC-files if requested
if args.occupancy_analysis:
for n, vol in dv.occupancy_grids.iteritems():
# Normalize the occupancy grid
if cum_complexes[n] > 0:
vol._array /= cum_complexes[n]
vol.tofile(joiner('occupancy_{:d}.mrc'.format(n)))
# Write out interaction analysis
# the interaction_matrix gives the number of interactions between each
# residue of the receptor and ligand for complexes consistent with exactly
# N restraints.
interaction_matrix = dv.interaction_matrix
if interaction_matrix is not None:
## Save interaction matrix
#f = joiner('interaction_matrix.npy')
#write('Writing interaction-matrix to: {:s}'.format(f))
#np.save(f, interaction_matrix)
# Save contacted receptor and ligand residue interaction for each analyzed number
# of consistent restraints
write('Writing contacted receptor residue interactions to files.')
# Take the cumsum in order to give the number of interactions for complexes
# with at least N restraints.
receptor_cum_interactions = interaction_matrix.sum(axis=1)[::-1].cumsum(axis=0)[::-1]
ligand_cum_interactions = interaction_matrix.sum(axis=2)[::-1].cumsum(axis=0)[::-1]
fname = joiner('receptor_interactions.txt')
with open(fname, 'w') as f:
# Write header
f.write('#resi')
for consistent_restraints in xrange(args.interaction_restraints_cutoff, nrestraints + 1):
f.write(' {:>6d}'.format(consistent_restraints))
f.write('\n')
for n, resi in enumerate(pdb1_sel.data['resi']):
f.write('{:<5d}'.format(resi))
for consistent_restraints in xrange(args.interaction_restraints_cutoff, nrestraints + 1):
index = consistent_restraints - args.interaction_restraints_cutoff
interactions = receptor_cum_interactions[index, n]
cum_complex = cum_complexes[consistent_restraints]
if cum_complex > 0:
relative_interactions = interactions / cum_complex
else:
relative_interactions = 0
f.write(' {:6.3f}'.format(relative_interactions))
f.write('\n')
fname = joiner('ligand_interactions.txt')
with open(fname, 'w') as f:
# Write header
f.write('#resi')
for consistent_restraints in xrange(args.interaction_restraints_cutoff, nrestraints + 1):
f.write(' {:>6d}'.format(consistent_restraints))
f.write('\n')
for n, resi in enumerate(pdb2_sel.data['resi']):
f.write('{:<5d}'.format(resi))
for consistent_restraints in xrange(args.interaction_restraints_cutoff, nrestraints + 1):
index = consistent_restraints - args.interaction_restraints_cutoff
interactions = ligand_cum_interactions[index, n]
cum_complex = cum_complexes[consistent_restraints]
if cum_complex > 0:
relative_interactions = interactions / cum_complex
else:
relative_interactions = 0
f.write(' {:6.3f}'.format(relative_interactions))
f.write('\n')
# time indication
seconds = int(round(time() - time0))
m, s = divmod(seconds, 60)
write('Total time passed: {:d}m {:2d}s'.format(m, s))
if __name__=='__main__':
main()
| haddocking/disvis | disvis/main.py | Python | apache-2.0 | 23,344 | 0.004112 |
# -*- coding: utf-8 -*-
'''
Copyright 2015 by Tobias Houska
This file is part of Statistical Parameter Estimation Tool (SPOTPY).
:author: Tobias Houska
This class holds the example code from the getting_started web-documention.
'''
from __future__ import print_function, division, absolute_import, unicode_literals
# Getting started
#To start your experience with SPOT you need to have SPOT installed. Please see the [Installation chapter](index.md) for further details.
#To use SPOT we have to import it and use one of the pre-build examples:
import spotpy # Load the SPOT package into your working storage
from spotpy.examples.spot_setup_rosenbrock import spot_setup # Import the two dimensional Rosenbrock example
#The example comes along with parameter boundaries, the Rosenbrock function, the optimal value of the function and RMSE as a likelihood.
#So we can directly start to analyse the Rosenbrock function with one of the algorithms. We start with a simple Monte Carlo sampling:
if __name__ == '__main__':
# Give Monte Carlo algorithm the example setup and saves results in a RosenMC.csv file
#spot_setup.slow = True
sampler = spotpy.algorithms.mc(spot_setup(), dbname='RosenMC', dbformat='ram')
#Now we can sample with the implemented Monte Carlo algortihm:
sampler.sample(10000) # Sample 100.000 parameter combinations
results=sampler.getdata()
#Now we want to have a look at the results. First we want to know, what the algorithm has done during the 10.000 iterations:
#spot.analyser.plot_parametertrace(results) # Use the analyser to show the parameter trace
spotpy.analyser.plot_parameterInteraction(results)
posterior=spotpy.analyser.get_posterior(results)
spotpy.analyser.plot_parameterInteraction(posterior)
#spotpy.analyser.plot_posterior_parametertrace(results, threshold=0.9)
print(spotpy.analyser.get_best_parameterset(results)) | thouska/spotpy | spotpy/examples/getting_started.py | Python | mit | 1,975 | 0.013671 |
#!/usr/bin/env python
#
# Copyright 2013 Tim O'Shea
#
# This file is part of PyBOMBS
#
# PyBOMBS is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# PyBOMBS is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PyBOMBS; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from PyQt4.QtCore import Qt;
from PyQt4 import QtCore
import PyQt4.QtGui as QtGui
import sys
import os.path
from mod_pybombs import *;
recipe_loader.load_all();
class AppList(QtGui.QWidget):
def __init__(self, parent, name):
super(AppList, self).__init__()
self.parent = parent;
self.lay = QtGui.QGridLayout();
self.setLayout(self.lay);
self.width = 8;
self.idx = 0;
self.cbd = {};
def cb(self):
self._cb();
def addButton(self, name, callback):
self._cb = callback;
pkgimg = "img/" + name + ".png";
if os.path.exists(pkgimg):
pixmap = QtGui.QPixmap(pkgimg);
else:
defaultimg = "img/unknown.png";
pixmap = QtGui.QPixmap(defaultimg);
icon = QtGui.QIcon(pixmap);
button = QtGui.QToolButton();
action = QtGui.QAction( icon, str(name), self );
action.setStatusTip('Install App')
button.setDefaultAction(action);
button.setToolButtonStyle(Qt.ToolButtonTextUnderIcon);
button.setIconSize(QtCore.QSize(100,100));
button.setAutoRaise(True);
self.connect(action, QtCore.SIGNAL("triggered()"), callback);
self.lay.addWidget(button, self.idx/self.width, self.idx%self.width);
self.idx = self.idx + 1;
class Installer:
def __init__(self, parent, name):
self.parent = parent;
self.name = name;
def cb(self):
print "installing "+ self.name;
install(self.name);
self.parent.refresh();
class Remover:
def __init__(self, parent, name):
self.parent = parent;
self.name = name;
def cb(self):
print "removing "+ self.name;
remove(self.name);
self.parent.refresh();
class ASMain(QtGui.QWidget):
#class ASMain(QtGui.QMainWindow):
def __init__(self):
super(ASMain, self).__init__()
self.setWindowTitle("Python Build Overlay Managed Bundle System - APP STORE GUI");
self.layout = QtGui.QVBoxLayout(self);
self.setLayout(self.layout);
self.menu = QtGui.QMenuBar(self);
pixmap = QtGui.QPixmap("img/logo.png")
lbl = QtGui.QLabel(self)
lbl.setPixmap(pixmap)
l2 = QtGui.QHBoxLayout();
l2.addWidget(QtGui.QLabel(" "));
l2.addWidget(lbl);
l2.addWidget(QtGui.QLabel(" "));
self.tw = QtGui.QTabWidget(self);
self.layout.setMargin(0);
self.layout.addWidget(self.menu);
self.layout.addLayout(l2);
self.layout.addWidget(self.tw);
# Populate Apps
self.populate_tabs();
# Populate the menu
exitAction = QtGui.QAction(QtGui.QIcon('exit.png'), '&Exit', self)
exitAction.triggered.connect(QtGui.qApp.quit)
fileMenu = self.menu.addMenu('&File');
fileMenu.addAction(exitAction);
reloadAction = QtGui.QAction('&Refresh State', self)
reloadAction.triggered.connect(self.reload_op)
toolsMenu = self.menu.addMenu('&Tools');
toolsMenu.addAction(reloadAction);
self.show();
def reload_op(self):
inv.loadc();
recipe_loader.load_all();
self.refresh();
def refresh(self):
self.populate_tabs();
def populate_tabs(self):
self.tw.clear();
#categories = ["baseline", "common"]
categories = ["common"]
cbs = {};
pages = [];
for c in categories:
pages.append( "Available %s Apps"%(c) );
pages.append( "Installed %s Apps"%(c) );
#pages = ["Available Apps", "Installed Apps"];
tabw = [];
for p in pages:
pp = AppList(self, p);
tabw.append(pp);
self.tw.addTab(pp, p);
catpkg = get_catpkgs()
for c in categories:
cbs[c] = {};
cidx = categories.index(c);
pkgs = catpkg[c];
pkgs.sort();
for p in pkgs:
installed = global_recipes[p].satisfy();
if(installed):
cbs[c][p] = Remover(self, p);
pcidx = 2*cidx+1;
else:
cbs[c][p] = Installer(self, p);
pcidx = 2*cidx;
tabw[pcidx].addButton(p, cbs[c][p].cb);
self.cbs = cbs;
app = QtGui.QApplication(sys.argv)
mw = ASMain();
sys.exit(app.exec_());
| scalable-networks/ext | pybombs/app_store.py | Python | gpl-2.0 | 5,226 | 0.020666 |
from .fetch import FetchParser
from .json_ld import JsonLdParser
from .lom import LomParser
from .lrmi import LrmiParser
from .nsdl_dc import NsdlDcParser
__all__ = [
'FetchParser',
'JsonLdParser',
'LomParser',
'LrmiParser',
'NsdlDcParser',
]
| navnorth/LR-Data | src/payload_schema/__init__.py | Python | apache-2.0 | 265 | 0 |
# coding=utf-8
import json
import re
import responses
import pytest
import mapbox
def test_geocoder_default_name():
"""Default name is set"""
geocoder = mapbox.Geocoder()
assert geocoder.name == 'mapbox.places'
def test_geocoder_name():
"""Named dataset name is set"""
geocoder = mapbox.Geocoder('mapbox.places-permanent')
assert geocoder.name == 'mapbox.places-permanent'
def _check_coordinate_precision(coord, precision):
"""Coordinate precision is <= specified number of digits"""
if '.' not in coord:
return True
else:
return len(coord.split('.')[-1]) <= precision
@responses.activate
def test_geocoder_forward():
"""Forward geocoding works"""
responses.add(
responses.GET,
'https://api.mapbox.com/geocoding/v5/mapbox.places/1600%20pennsylvania%20ave%20nw.json?access_token=pk.test',
match_querystring=True,
body='{"query": ["1600", "pennsylvania", "ave", "nw"]}', status=200,
content_type='application/json')
response = mapbox.Geocoder(access_token='pk.test').forward('1600 pennsylvania ave nw')
assert response.status_code == 200
assert response.json()['query'] == ["1600", "pennsylvania", "ave", "nw"]
@responses.activate
def test_geocoder_forward_geojson():
"""Forward geocoding .geojson method works"""
responses.add(
responses.GET,
'https://api.mapbox.com/geocoding/v5/mapbox.places/1600%20pennsylvania%20ave%20nw.json?access_token=pk.test',
match_querystring=True,
body='{"query": ["1600", "pennsylvania", "ave", "nw"]}', status=200,
content_type='application/json')
response = mapbox.Geocoder(access_token='pk.test').forward('1600 pennsylvania ave nw')
assert response.status_code == 200
assert response.geojson() == response.json()
@responses.activate
def test_geocoder_reverse():
"""Reverse geocoding works"""
lon, lat = -77.4371, 37.5227
body = json.dumps({"query": [lon, lat]})
responses.add(
responses.GET,
'https://api.mapbox.com/geocoding/v5/mapbox.places/{0},{1}.json?access_token=pk.test'.format(lon, lat),
match_querystring=True,
body=body,
status=200,
content_type='application/json')
response = mapbox.Geocoder(access_token='pk.test').reverse(lon=lon, lat=lat)
assert response.status_code == 200
assert response.json()['query'] == [lon, lat]
@responses.activate
def test_geocoder_reverse_geojson():
"""Reverse geocoding geojson works"""
lon, lat = -77.4371, 37.5227
body = json.dumps({"query": [lon, lat]})
responses.add(
responses.GET,
'https://api.mapbox.com/geocoding/v5/mapbox.places/{0},{1}.json?access_token=pk.test'.format(lon, lat),
match_querystring=True,
body=body,
status=200,
content_type='application/json')
response = mapbox.Geocoder(access_token='pk.test').reverse(lon=lon, lat=lat)
assert response.status_code == 200
assert response.geojson() == response.json()
def test_geocoder_place_types():
"""Place types are enumerated"""
assert sorted(mapbox.Geocoder().place_types.items()) == [
('address', "A street address with house number. Examples: 1600 Pennsylvania Ave NW, 1051 Market St, Oberbaumstrasse 7."),
('country', "Sovereign states and other political entities. Examples: United States, France, China, Russia."),
('district', "Second order administrative division. Only used when necessary. Examples: Tianjin, Beijing"),
('locality', "A smaller area within a place that possesses official status and boundaries. Examples: Oakleigh (Melbourne)"),
('neighborhood', 'A smaller area within a place, often without formal boundaries. Examples: Montparnasse, Downtown, Haight-Ashbury.'),
('place', "City, town, village or other municipality relevant to a country's address or postal system. Examples: Cleveland, Saratoga Springs, Berlin, Paris."),
('poi', "Places of interest including commercial venues, major landmarks, parks, and other features. Examples: Subway Restaurant, Yosemite National Park, Statue of Liberty."),
('poi.landmark', "Places of interest that are particularly notable or long-lived like parks, places of worship and museums. A strict subset of the poi place type. Examples: Yosemite National Park, Statue of Liberty."),
('postcode', "Postal code, varies by a country's postal system. Examples: 20009, CR0 3RL."),
('region', "First order administrative divisions within a country, usually provinces or states. Examples: California, Ontario, Essonne.")]
def test_validate_country_codes_err():
try:
mapbox.Geocoder()._validate_country_codes(('us', 'bogus'))
except mapbox.InvalidCountryCodeError as err:
assert str(err) == "bogus"
def test_validate_country():
assert mapbox.Geocoder()._validate_country_codes(
('us', 'br')) == {'country': 'us,br'}
def test_validate_place_types_err():
try:
mapbox.Geocoder()._validate_place_types(('address', 'bogus'))
except mapbox.InvalidPlaceTypeError as err:
assert str(err) == "bogus"
def test_validate_place_types():
assert mapbox.Geocoder()._validate_place_types(
('address', 'poi')) == {'types': 'address,poi'}
@responses.activate
def test_geocoder_forward_types():
"""Type filtering of forward geocoding works"""
responses.add(
responses.GET,
'https://api.mapbox.com/geocoding/v5/mapbox.places/1600%20pennsylvania%20ave%20nw.json?types=address,country,place,poi.landmark,postcode,region&access_token=pk.test',
match_querystring=True,
body='{"query": ["1600", "pennsylvania", "ave", "nw"]}', status=200,
content_type='application/json')
response = mapbox.Geocoder(
access_token='pk.test').forward(
'1600 pennsylvania ave nw',
types=('address', 'country', 'place', 'poi.landmark', 'postcode', 'region'))
assert response.status_code == 200
assert response.json()['query'] == ["1600", "pennsylvania", "ave", "nw"]
@responses.activate
def test_geocoder_reverse_types():
"""Type filtering of reverse geocoding works"""
lon, lat = -77.4371, 37.5227
body = json.dumps({"query": [lon, lat]})
responses.add(
responses.GET,
'https://api.mapbox.com/geocoding/v5/mapbox.places/{0},{1}.json?types=address,country,place,poi.landmark,postcode,region&access_token=pk.test'.format(lon, lat),
match_querystring=True,
body=body,
status=200,
content_type='application/json')
response = mapbox.Geocoder(
access_token='pk.test').reverse(
lon=lon, lat=lat,
types=('address', 'country', 'place', 'poi.landmark', 'postcode', 'region'))
assert response.status_code == 200
assert response.json()['query'] == [lon, lat]
@responses.activate
def test_geocoder_forward_proximity():
"""Proximity parameter works"""
responses.add(
responses.GET,
'https://api.mapbox.com/geocoding/v5/mapbox.places/1600%20pennsylvania%20ave%20nw.json?proximity=0.0,0.0&access_token=pk.test',
match_querystring=True,
body='{"query": ["1600", "pennsylvania", "ave", "nw"]}', status=200,
content_type='application/json')
response = mapbox.Geocoder(
access_token='pk.test').forward(
'1600 pennsylvania ave nw', lon=0, lat=0)
assert response.status_code == 200
assert response.json()['query'] == ["1600", "pennsylvania", "ave", "nw"]
@responses.activate
def test_geocoder_proximity_rounding():
"""Proximity parameter is rounded to 3 decimal places"""
responses.add(
responses.GET,
'https://api.mapbox.com/geocoding/v5/mapbox.places/1600%20pennsylvania%20ave%20nw.json',
match_querystring=False,
body='{"query": ["1600", "pennsylvania", "ave", "nw"]}', status=200,
content_type='application/json')
response = mapbox.Geocoder(
access_token='pk.test').forward(
'1600 pennsylvania ave nw', lon=0.123456, lat=0.987654)
# check coordinate precision for proximity flag
match = re.search(r'[&\?]proximity=([^&$]+)', response.url)
assert match is not None
for coord in re.split(r'(%2C|,)', match.group(1)):
assert _check_coordinate_precision(coord, 3)
@responses.activate
def test_geocoder_forward_bbox():
"""Bbox parameter works"""
responses.add(
responses.GET,
'https://api.mapbox.com/geocoding/v5/mapbox.places/washington.json?bbox=-78.3284%2C38.6039%2C-78.0428%2C38.7841&access_token=pk.test',
match_querystring=True,
body='{"query": ["washington"]}', status=200,
content_type='application/json')
response = mapbox.Geocoder(
access_token='pk.test').forward(
'washington', bbox=(-78.3284,38.6039,-78.0428,38.7841))
assert response.status_code == 200
assert response.json()['query'] == ["washington"]
@responses.activate
def test_geocoder_forward_limit():
"""Limit parameter works"""
responses.add(
responses.GET,
'https://api.mapbox.com/geocoding/v5/mapbox.places/washington.json?limit=3&access_token=pk.test',
match_querystring=True,
body='{"query": ["washington"], "features": [1, 2, 3]}', status=200,
content_type='application/json')
response = mapbox.Geocoder(
access_token='pk.test').forward(
'washington', limit=3)
assert response.status_code == 200
assert len(response.json()['features']) == 3
@responses.activate
def test_geocoder_reverse_limit():
"""Limit parameter works"""
lon, lat = -77.4371, 37.5227
body = json.dumps({"query": [lon, lat],
"features": [{'name': 'place'}]})
responses.add(
responses.GET,
'https://api.mapbox.com/geocoding/v5/mapbox.places/{0},{1}.json?access_token=pk.test&limit=1&types=place'.format(lon, lat),
match_querystring=True,
body=body,
status=200,
content_type='application/json')
service = mapbox.Geocoder(access_token='pk.test')
response = service.reverse(lon=lon, lat=lat, limit=1, types=['place'])
assert response.status_code == 200
assert len(response.json()['features']) == 1
@responses.activate
def test_geocoder_reverse_limit_requires_onetype():
"""Limit requires a single type"""
lon, lat = -77.123456789, 37.987654321
service = mapbox.Geocoder(access_token='pk.test')
with pytest.raises(mapbox.InvalidPlaceTypeError):
service.reverse(lon=lon, lat=lat, limit=1)
with pytest.raises(mapbox.InvalidPlaceTypeError):
service.reverse(lon=lon, lat=lat, limit=1, types=['places', 'country'])
@responses.activate
def test_geocoder_reverse_rounding():
"""Reverse geocoding parameters are rounded to 5 decimal places"""
lon, lat = -77.123456789, 37.987654321
body = json.dumps({"query": [lon, lat]})
responses.add(
responses.GET,
re.compile('https:\/\/api\.mapbox\.com\/geocoding\/v5\/mapbox\.places\/.+\.json'),
match_querystring=False,
body=body,
status=200,
content_type='application/json')
response = mapbox.Geocoder(
access_token='pk.test').reverse(
lon=lon, lat=lat)
# check coordinate precision for reverse geocoding coordinates
match = re.search(r'\/([\-\d\.\,]+)\.json', response.url)
assert match is not None
for coord in re.split(r'(%2C|,)', match.group(1)):
assert _check_coordinate_precision(coord, 5)
@responses.activate
def test_geocoder_unicode():
"""Forward geocoding works with non-ascii inputs
Specifically, the URITemplate needs to utf-8 encode all inputs
"""
responses.add(
responses.GET,
'https://api.mapbox.com/geocoding/v5/mapbox.places/Florian%C3%B3polis%2C%20Brazil.json?access_token=pk.test',
match_querystring=True,
body='{}', status=200,
content_type='application/json')
query = "Florianópolis, Brazil"
try:
query = query.decode('utf-8') # Python 2
except:
pass # Python 3
response = mapbox.Geocoder(access_token='pk.test').forward(query)
assert response.status_code == 200
@responses.activate
def test_geocoder_forward_country():
"""Country parameter of forward geocoding works"""
responses.add(
responses.GET,
'https://api.mapbox.com/geocoding/v5/mapbox.places/1600%20pennsylvania%20ave%20nw.json?country=us&access_token=pk.test',
match_querystring=True,
body='{"query": ["1600", "pennsylvania", "ave", "nw"]}', status=200,
content_type='application/json')
response = mapbox.Geocoder(
access_token='pk.test').forward('1600 pennsylvania ave nw', country=['us'])
assert response.status_code == 200
| ravik/mapbox-baseSDK | tests/test_geocoder.py | Python | mit | 12,916 | 0.004026 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Projet : Editeur, Compilateur et Micro-Ordinateur pour
un langage assembleur.
Nom du fichier : 04-02-CPU.py
Identification : 04-02-CPU
Titre : CPU
Auteurs : Francis Emond, Malek Khattech,
Mamadou Dia, Marc-André Jean
Date : 16-04-2017
Description : CPU du Micro-Ordinateur.
Le module ``CPU``
================================
Ce module contient la classe CPU qui est la représentation du CPU du
micro-ordinateur. C'est cette classe qui gère les calculs, le transfert
de la mémoire et l'exécution des instructions d'un programme.
"""
__author__ = "Francis Emond, Malek Khattech, Mamadou Dia, Marc-Andre Jean"
__version__ = "1.0"
__status__ = "Production"
# Importation des modules nécessaires.
try:
modEnum = __import__("05-Enum")
modBus = __import__("04-01-Bus")
modALU = __import__("04-03-ALU")
except ImportError:
import importlib
modEnum = importlib.import_module("Modules.05-Enum")
modBus = importlib.import_module("Modules.04-01-Bus")
modALU = importlib.import_module("Modules.04-03-ALU")
# Redéfinition.
OPCODE = modEnum.OPCODE
MODE = modEnum.MODE
REGISTRE = modEnum.REGISTRE
ADRESSAGE = modEnum.ADRESSAGE
# À partir du bit le moins significatif :
# - Parity
# - Sign
# - Carry (overflow)
# - Zero
# - CND
# Enumération pour le registre STATUS.
STATUS = modEnum.enum(PARITY=0x0001, # 0b 0000 0000 0000 0001
SIGN=0x0002, # 0b 0000 0000 0000 0010
CARRY=0x0004, # 0b 0000 0000 0000 0100
ZERO=0x0008, # 0b 0000 0000 0000 1000
CND=0x0010) # 0b 0000 0000 0001 0000
class CPU:
"""
class CPU
========================
Cette classe contient la classe cpu. Elle représente le CPU
telle que présentée dans le document de
spécification.
:example:
>>> test = CPU(modBus.Bus())
"""
def __init__(self, bus):
"""
Constructeur de la classe CPU.
Le constructeur initialise les composants du CPU dont l'ALU.
Elle s'occupe aussi de lier le CPU avec le bus en entrée.
:example:
>>> test = CPU(modBus.Bus())
:param bus: Le bus du MicroOrdinateur.
:type bus: Bus
"""
self.event = False
# Connexion avec le bus.
self.bus = bus
self.bus.register(self)
# Création de l'ALU.
self.alu = modALU.ALU()
# Création des registres.
self.regP = 0x0000 # Program counter.
self.regI = 0x0000 # Instruction register.
self.regS = 0x0000 # Status Register.
# Registres A, B, C, D
self.regA = 0x0000
self.regB = 0x0000
self.regC = 0x0000
self.regD = 0x0000
# Fin.
return
def _getReg(self, registre):
"""
Lit le registre en argument avec la valeur.
Cette fonction lit et retourne la valeur du registre en argument.
:param registre: Registre à lire.
:type registre: int (16 bits)
:rtype: Valeur dudit registre.
:rtype: int (16 bits)
"""
if registre == REGISTRE.A:
return self.regA
elif registre == REGISTRE.B:
return self.regB
elif registre == REGISTRE.C:
return self.regC
elif registre == REGISTRE.D:
return self.regD
else:
raise Exception()
# Fin impossible.
return
def _setReg(self, registre, valeur):
"""
Modifie le registre en argument avec la valeur.
Cette fonction modifie la valeur du registre en argument avec
la valeur en argument.
:param registre: Registre à modifier.
:type registre: int (16 bits)
:param valeur: Valeur à assigner.
:type valeur: int (16 bits)
"""
if registre == REGISTRE.A:
self.regA = valeur
elif registre == REGISTRE.B:
self.regB = valeur
elif registre == REGISTRE.C:
self.regC = valeur
elif registre == REGISTRE.D:
self.regD = valeur
else:
return None
return
def clock(self):
"""
Récepteur pour le signal clock.
Cette fonction est appelé lorsqu'un coup d'horloge est émit
sur le bus. Elle gère la réinitialisation du CPU si le bus est
en mode RESET. Sinon le CPU fetch la prochaine instruction.
:example:
>>> bus = modBus.Bus()
>>> test = CPU(bus)
>>> test.clock()
>>> bus.clock()
>>> bus.event()
"""
# On réinitialise le CPU si le bus est en mode reset.
if self.bus.mode == MODE.RESET:
# Registres program.
self.regP = 0x0000 # Program counter.
self.regI = 0x0000 # Instruction register.
self.regS = 0x0000 # Status Register.
# Registres A, B, C, D
self.regA = 0x0000
self.regB = 0x0000
self.regC = 0x0000
self.regD = 0x0000
# On fetch la prochaine instruction si le bus est en mode INERTE.
elif self.bus.mode == MODE.INERTE:
self._fetch()
self._decode()
self._execute()
# Fin de la fonction.
return
def _readAddress(self):
"""
Cette fonction fetch une valeur d'une adresse.
Cette fonction va chercher la valeur à une adresse selon le
mode d'adressage.
"""
adressage = self.regI & 0x00F0
self.bus.mode = MODE.READ
# 1. L'argument est l'adresse d'un registre.
if adressage == ADRESSAGE.ADDR_OF_REG:
self.bus.data = self._getReg(self.bus.data)
return
# 2. L'argument est l'adresse d'un registre qui pointe vers une
# adresse.
elif adressage == ADRESSAGE.ADDR_FROM_REG:
# On fetch l'adresse indiquer dans ce registre.
self.bus.address = self._getReg(self.bus.data)
# 3. L'argument est une adresse.
elif adressage == ADRESSAGE.ADDR:
# On retourne l'adresse.
self.bus.address = self.bus.data
# 4. L'argument est une adresse qui pointe vers une adresse.
elif adressage == ADRESSAGE.ADDR_FROM_ADDR:
# Double-fetch.
self.bus.address = self.bus.data
self.bus.event()
self.bus.mode = MODE.READ
self.bus.address = self.bus.data
# Fetch la valeur à cette adresse.
self.bus.event()
return
def _fetch(self):
"""
Cette fonction fetch la prochaine instruction à exécuter.
Cette fonction permet prépare le bus pour que la mémoire lit
la prochaine instruction à exécuter.
"""
# On place le bus en mode lecture pour la prochaine adresse.
self.bus.mode = MODE.READ
self.bus.address = self.regP
# On envoie le signal au bus.
self.bus.event()
# On lit l'instruction dans le bus.
self.regI = self.bus.data
# Fin.
return
def _decode(self):
"""
Cette fonction décode l'instruction courante.
Cette fonction refait un fetch pour les commandes néccessitant
l'argument de droite (16 bits), sinon elle peut exécuter
celle-ci.
"""
# On vérifie si l'opération n'a pas besoin d'argument droit:
if {OPCODE.NOT: True,
OPCODE.EZ: True,
OPCODE.NZ: True,
OPCODE.NOP: True,
OPCODE.HLT: True}.get(self.regI, False):
return # On quitte pour aller à l'étape d'exécution.
# Sinon on fetch l'argument de droit:
else:
# On place le bus en mode lecture.
self.bus.mode = MODE.READ
self.bus.address = self.regP + 1
# On envoie le signal au bus.
self.bus.event()
# Fin.
return
def _execute(self):
"""
Cette fonction exécute l'instruction courante.
Cette fonction exécute l'instruction courante et retourne
les résultats appropriés dans le bus ou dans les registres.
"""
# On extrait les données pour travailler.
opcode = self.regI & 0xFF00
adressage = self.regI & 0x00F0
regG = self.regI & 0x000F
valD = self.bus.data
result = 0x0000
# NOP
if opcode == OPCODE.NOP:
pass
# ADD, SUB, MUL, DIV
# OR, AND, XOR, NOT
elif opcode & 0xF000 == 0x1000 \
or opcode & 0xF000 == 0x2000:
# Reset regS 0x00FF to 0x0000.
self.regS = 0x0000
# Execute
# ADD, SUB, MUL, DIV
# Si le OPCODE est ADD:
if opcode == OPCODE.ADD:
result = self.alu.fonctionADD(self._getReg(regG),
self._getReg(valD))
# Si le OPCODE est SUB:
elif opcode == OPCODE.SUB:
result = self.alu.fonctionSUB(self._getReg(regG),
self._getReg(valD))
# Si le OPCODE est MUL:
elif opcode == OPCODE.MUL:
result = self.alu.fonctionMUL(self._getReg(regG),
self._getReg(valD))
# Si le OPCODE est DIV:
elif opcode == OPCODE.DIV:
result = self.alu.fonctionDIV(self._getReg(regG),
self._getReg(valD))
# Execute
# OR, AND, XOR, NOT
# Si le OPCODE est OR:
elif opcode == OPCODE.OR:
result = self.alu.fonctionOR(self._getReg(regG),
self._getReg(valD))
# Si le OPCODE est AND:
elif opcode == OPCODE.AND:
result = self.alu.fonctionAND(self._getReg(regG),
self._getReg(valD))
# Si le OPCODE est XOR:
elif opcode == OPCODE.XOR:
result = self.alu.fonctionXOR(self._getReg(regG),
self._getReg(valD))
# Si le OPCODE est NOT:
elif opcode == OPCODE.NOT:
result = self.alu.fonctionNOT(self._getReg(regG))
# Vérification pour registre STATUS.
# --Parité.
self.regS |= (result & STATUS.PARITY)
# --Sign
if result < 0:
# Si le résultat est négatif, Sign = True
self.regS |= STATUS.SIGN
result = abs(result)
# --Carry
if result > 0xFFFF:
# Si on a un overflow, carry = True
self.regS |= STATUS.CARRY
result &= 0xFFFF
# --Zero
if result == 0x0000:
# Si le résultat est égal à zéro, Zero = True
self.regS |= STATUS.ZERO
# On mets le résultat dans le registre A-D.
self._setReg(regG, result)
# LT, GT, LE, GE, EQ, EZ
elif opcode & 0xF000 == 0x3000:
# Reset regS 0x00FF to 0x0000.
self.regS = 0x0000
# Execute
# Si le OPCODE est LT:
if opcode == OPCODE.LT:
result = self.alu.fonctionLT(self._getReg(regG),
self._getReg(valD))
# Si le OPCODE est GT:
elif opcode == OPCODE.GT:
result = self.alu.fonctionGT(self._getReg(regG),
self._getReg(valD))
# Si le OPCODE est LE:
elif opcode == OPCODE.LE:
result = self.alu.fonctionLE(self._getReg(regG),
self._getReg(valD))
# Si le OPCODE est GE:
elif opcode == OPCODE.GE:
result = self.alu.fonctionGE(self._getReg(regG),
self._getReg(valD))
# Si le OPCODE est EQ:
elif opcode == OPCODE.EQ:
result = self.alu.fonctionEQ(self._getReg(regG),
self._getReg(valD))
# Si le OPCODE est EZ:
elif opcode == OPCODE.EZ:
result = self.alu.fonctionEZ(self._getReg(regG))
# Si le OPCODE est NZ:
elif opcode == OPCODE.NZ:
result = self.alu.fonctionNZ(self._getReg(regG))
# On applique le résultat.
if result:
self.regS |= STATUS.CND
# JMP, JMZ, JMO, JMC, HLT, SET, LD, ST, MV
elif opcode & 0xF000 == 0x0000:
# Execute : JM*
# Si le OPCODE est JMP (résoudre l'adresse)
if opcode == OPCODE.JMP:
self._readAddress()
self.regP = self.bus.address
self.bus.mode = MODE.END
return
# Si le OPCODE est JMZ et flag ZERO ON (résoudre l'adresse)
elif opcode == OPCODE.JMZ \
and self.regS & STATUS.ZERO == STATUS.ZERO:
self._readAddress()
self.regP = self.bus.address
self.bus.mode = MODE.END
return
# Si le OPCODE est JMO et flag CARRY ON (résoudre l'adresse)
elif opcode == OPCODE.JMO \
and self.regS & STATUS.CARRY == STATUS.CARRY:
self._readAddress()
self.regP = self.bus.address
self.bus.mode = MODE.END
return
# Si le OPCODE est JMC et flag CND ON (résoudre l'adresse)
elif opcode == OPCODE.JMC \
and self.regS & STATUS.CND == STATUS.CND:
self._readAddress()
self.regP = self.bus.address
self.bus.mode = MODE.END
return
# Execute : ***
# Si le OPCODE est SET
if opcode == OPCODE.SET:
self._setReg(regG, valD)
# Si le OPCODE est LD (résoudre l'adresse)
elif opcode == OPCODE.LD:
self._readAddress()
self.bus.mode = MODE.READ
self.bus.event()
self._setReg(regG, self.bus.data)
# Si le OPCODE est ST (résoudre l'adresse)
elif opcode == OPCODE.ST:
self._readAddress()
self.bus.data = self._getReg(regG)
self.bus.mode = MODE.WRITE
self.bus.event()
# Si le OPCODE est MV
elif opcode == OPCODE.MV:
self._setReg(regG, self._getReg(valD))
# Si le OPCODE est HALT
elif opcode == OPCODE.HLT:
# On empêche toute prochaine exécution
self.bus.mode = MODE.HALT
return
# On incrémente le Program Counter.
self.regP += 2
# On place le bus en MODE END (sauf pour les OPCODE's qui
# requisent de résoudre une adresse).
self.bus.mode = MODE.END
# On empêche toute prochaine exécution si le pc est illégal.
if self.regP > 0xFFFF:
self.bus.mode = MODE.HALT
# Fin.
return
# Activation des doctest
if __name__ == "__main__":
import doctest
doctest.testmod()
| MarcAndreJean/PCONC | Modules/04-02-CPU.py | Python | mit | 15,906 | 0 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2015-12-07 02:11
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('inventory', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='produce',
name='plu',
field=models.IntegerField(unique=True),
),
]
| kkoci/orthosie | inventory/migrations/0002_auto_20151206_2111.py | Python | gpl-3.0 | 438 | 0 |
# Copyright (c) 2012 The Khronos Group Inc.
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and /or associated documentation files (the "Materials "), to deal in the Materials without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Materials, and to permit persons to whom the Materials are furnished to do so, subject to
# the following conditions:
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Materials.
# THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
# See Core.Logic.FJudgementContext for the information
# of the 'context' parameter.
# This sample judging object does the following:
#
# JudgeBaseline: just verifies that the standard steps did not crash.
# JudgeSuperior: also verifies that the validation steps are not in error.
# JudgeExemplary: same as intermediate badge.
# We import an assistant script that includes the common verifications
# methods. The assistant buffers its checks, so that running them again
# does not incurs an unnecessary performance hint.
from StandardDataSets.scripts import JudgeAssistant
# Please feed your node list here:
tagLst = []
attrName = ''
attrVal = ''
dataToCheck = ''
class SimpleJudgingObject:
def __init__(self, _tagLst, _attrName, _attrVal, _data):
self.tagList = _tagLst
self.attrName = _attrName
self.attrVal = _attrVal
self.dataToCheck = _data
self.status_baseline = False
self.status_superior = False
self.status_exemplary = False
self.__assistant = JudgeAssistant.JudgeAssistant()
def JudgeBaseline(self, context):
# No step should not crash
self.__assistant.CheckCrashes(context)
# Import/export/validate must exist and pass, while Render must only exist.
self.__assistant.CheckSteps(context, ["Import", "Export", "Validate"], ["Render"])
if (self.__assistant.GetResults() == False):
self.status_baseline = False
return False
# Compare the rendered images between import and export
# Then compare images against reference test to check for non-equivalence
self.__assistant.CompareRenderedImages(context)
self.status_baseline = self.__assistant.DeferJudgement(context)
return self.status_baseline
# To pass intermediate you need to pass basic, this object could also include additional
# tests that were specific to the intermediate badge.
def JudgeSuperior(self, context):
self.status_superior = self.status_baseline
return self.status_superior
# To pass advanced you need to pass intermediate, this object could also include additional
# tests that were specific to the advanced badge
def JudgeExemplary(self, context):
self.status_exemplary = self.status_superior
return self.status_exemplary
# This is where all the work occurs: "judgingObject" is an absolutely necessary token.
# The dynamic loader looks very specifically for a class instance named "judgingObject".
#
judgingObject = SimpleJudgingObject(tagLst, attrName, attrVal, dataToCheck);
| KhronosGroup/COLLADA-CTS | StandardDataSets/collada/library_visual_scenes/visual_scene/node/_reference/_reference_node_translate_xyz_cube/_reference_node_translate_xyz_cube.py | Python | mit | 3,826 | 0.007057 |
"""
This package contains tests for the Multichain community in Tribler.
"""
| MaxVanDeursen/tribler | Tribler/Test/Community/Multichain/__init__.py | Python | lgpl-3.0 | 77 | 0 |
#!/usr/bin/env python3
# -*- coding: utf8 -*-
import tweepy
consumer_key = ""
consumer_secret = ""
access_token = ""
access_token_secret = ""
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth)
def tdata():
userid = str(input("Please input id who you want fav attack\n"))
count = input("input number you want to fav!\n")
return userid, count
def main():
t = tdata()
tl = []
fav = api.user_timeline(id = t[0], count = t[1])
for status in fav:
tl.append(status.id_str)
try:
pool.map(api.create_favorite, tl)
except tweepy.error.TweepError as e:
if e.args[0][0]['code'] == 139:
print("You have already favorited this status! \n")
else:
print(e.reason)
finally:
print("Done!")
if __name__ == "__main__":
main()
| zhangyubaka/tweepy_favbot | bot.py | Python | mit | 923 | 0.006501 |
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for ModifyEntryOverview
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-datacatalog
# [START datacatalog_v1_generated_DataCatalog_ModifyEntryOverview_sync]
from google.cloud import datacatalog_v1
def sample_modify_entry_overview():
# Create a client
client = datacatalog_v1.DataCatalogClient()
# Initialize request argument(s)
request = datacatalog_v1.ModifyEntryOverviewRequest(
name="name_value",
)
# Make the request
response = client.modify_entry_overview(request=request)
# Handle the response
print(response)
# [END datacatalog_v1_generated_DataCatalog_ModifyEntryOverview_sync]
| googleapis/python-datacatalog | samples/generated_samples/datacatalog_v1_generated_data_catalog_modify_entry_overview_sync.py | Python | apache-2.0 | 1,496 | 0.000668 |
from django.http import HttpResponseBadRequest, HttpResponse
"""
Build custom decorators for your views if you find that you are repeating
the same checks in multiple views.
"""
def ajax_required(f):
def wrap(request, *args, **kwargs):
if not request.is_ajax():
#return HttpResponse("hi")
return HttpResponseBadRequest()
return f(request, *args, **kwargs)
wrap.__doc__ = f.__doc__
wrap.__name__ = f.__name__
return wrap
| hiteshgarg14/Django-Social-Website | bookmarks/common/decorators.py | Python | mit | 478 | 0.004184 |
# Copyright 2012 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# Most of this code originated in sphinx.domains.python and
# sphinx.ext.autodoc and has been only slightly adapted for use in
# subclasses here.
# :copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS.
# :license: BSD, see LICENSE for details.
import re
from sphinx import addnodes
from sphinx.domains.python import _pseudo_parse_arglist
from sphinx.domains.python import PyModulelevel
from sphinx.ext.autodoc import Documenter
from sphinx.ext.autodoc import FunctionDocumenter
from sphinx.locale import _
yaml_sig_re = re.compile(r'yaml:\s*(.*)')
class PyYAMLFunction(PyModulelevel):
def handle_signature(self, sig, signode):
"""Transform a Python signature into RST nodes.
Return (fully qualified name of the thing, classname if any).
If inside a class, the current class name is handled intelligently:
* it is stripped from the displayed name if present
* it is added to the full name (return value) if not present
"""
name_prefix = None
name = sig
arglist = None
retann = None
# determine module and class name (if applicable), as well as full name
modname = self.options.get(
'module', self.env.temp_data.get('py:module'))
classname = self.env.temp_data.get('py:class')
fullname = name
signode['module'] = modname
signode['class'] = classname
signode['fullname'] = fullname
sig_prefix = self.get_signature_prefix(sig)
if sig_prefix:
signode += addnodes.desc_annotation(sig_prefix, sig_prefix)
if name_prefix:
signode += addnodes.desc_addname(name_prefix, name_prefix)
anno = self.options.get('annotation')
signode += addnodes.desc_name(name, name)
if not arglist:
if self.needs_arglist():
# for callables, add an empty parameter list
signode += addnodes.desc_parameterlist()
if retann:
signode += addnodes.desc_returns(retann, retann)
if anno:
signode += addnodes.desc_annotation(' ' + anno, ' ' + anno)
return fullname, name_prefix
_pseudo_parse_arglist(signode, arglist)
if retann:
signode += addnodes.desc_returns(retann, retann)
if anno:
signode += addnodes.desc_annotation(' ' + anno, ' ' + anno)
return fullname, name_prefix
def get_index_text(self, modname, name_cls):
return _('%s (in module %s)') % (name_cls[0], modname)
class YAMLFunctionDocumenter(FunctionDocumenter):
priority = FunctionDocumenter.priority + 10
objtype = 'yamlfunction'
directivetype = 'yamlfunction'
@classmethod
def can_document_member(cls, member, membername, isattr, parent):
if not FunctionDocumenter.can_document_member(member, membername,
isattr, parent):
return False
if member.__doc__ is not None and yaml_sig_re.match(member.__doc__):
return True
return False
def _find_signature(self, encoding=None):
docstrings = Documenter.get_doc(self, encoding, 2)
if len(docstrings) != 1:
return
doclines = docstrings[0]
setattr(self, '__new_doclines', doclines)
if not doclines:
return
# match first line of docstring against signature RE
match = yaml_sig_re.match(doclines[0])
if not match:
return
name = match.group(1)
# ok, now jump over remaining empty lines and set the remaining
# lines as the new doclines
i = 1
while i < len(doclines) and not doclines[i].strip():
i += 1
setattr(self, '__new_doclines', doclines[i:])
return name
def get_doc(self, encoding=None, ignore=1):
lines = getattr(self, '__new_doclines', None)
if lines is not None:
return [lines]
return Documenter.get_doc(self, encoding, ignore)
def format_signature(self):
result = self._find_signature()
self._name = result
return ''
def format_name(self):
return self._name
def setup(app):
app.add_autodocumenter(YAMLFunctionDocumenter)
app.add_directive_to_domain('py', 'yamlfunction', PyYAMLFunction)
| gforcada/jenkins-job-builder | jenkins_jobs/sphinx/yaml.py | Python | apache-2.0 | 4,991 | 0 |
#!/usr/bin/env python
# Copyright 2021 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This is generated, do not edit. Update BuildConfigGenerator.groovy and
# 3ppFetch.template instead.
from __future__ import print_function
import argparse
import json
import os
_FILE_URL = 'https://dl.google.com/dl/android/maven2/com/google/firebase/firebase-messaging/21.0.1/firebase-messaging-21.0.1.aar'
_FILE_NAME = 'firebase-messaging-21.0.1.aar'
_FILE_VERSION = '21.0.1'
def do_latest():
print(_FILE_VERSION)
def get_download_url(version):
if _FILE_URL.endswith('.jar'):
ext = '.jar'
elif _FILE_URL.endswith('.aar'):
ext = '.aar'
else:
raise Exception('Unsupported extension for %s' % _FILE_URL)
partial_manifest = {
'url': [_FILE_URL],
'name': [_FILE_NAME],
'ext': ext,
}
print(json.dumps(partial_manifest))
def main():
ap = argparse.ArgumentParser()
sub = ap.add_subparsers()
latest = sub.add_parser("latest")
latest.set_defaults(func=lambda _opts: do_latest())
download = sub.add_parser("get_url")
download.set_defaults(
func=lambda _opts: get_download_url(os.environ['_3PP_VERSION']))
opts = ap.parse_args()
opts.func(opts)
if __name__ == '__main__':
main()
| nwjs/chromium.src | third_party/android_deps/libs/com_google_firebase_firebase_messaging/3pp/fetch.py | Python | bsd-3-clause | 1,389 | 0.00072 |
import pwny
def setup():
pwny.target.assume(pwny.Target(arch=pwny.Target.Arch.x86))
| sigma-random/pwnypack | tests/__init__.py | Python | mit | 90 | 0 |
class Class(object):
pass
def func():
return 3.14
CONSTANT = 42
| retoo/pystructure | tests/python/typeinference/import_star_definitions.py | Python | lgpl-2.1 | 74 | 0.027027 |
#!/usr/bin/env python3
from heapq import heapify, heappop, heappush
with open('NUOC.INP') as f:
m, n = map(int, f.readline().split())
height = [[int(i) for i in line.split()] for line in f]
queue = ([(h, 0, i) for i, h in enumerate(height[0])]
+ [(h, m - 1, i) for i, h in enumerate(height[-1])]
+ [(height[i][0], i, 0) for i in range(m)]
+ [(height[i][-1], i, n - 1) for i in range(m)])
heapify(queue)
visited = ([[True] * n]
+ [[True] + [False] * (n - 2) + [True] for _ in range(m - 2)]
+ [[True] * n])
result = 0
while queue:
h, i, j = heappop(queue)
for x, y in (i + 1, j), (i - 1, j), (i, j + 1), (i, j - 1):
if 0 <= x < m and 0 <= y < n and not visited[x][y]:
result += max(0, h - height[x][y])
heappush(queue, (max(height[x][y], h), x, y))
visited[x][y] = True
with open('NUOC.OUT', 'w') as f: print(result, file=f)
| McSinyx/hsg | others/other/nuoc.py | Python | gpl-3.0 | 941 | 0.002125 |
import requests
from tel_types import User, Message, Update, UserProfilePhotos
import time
base_url = 'https://api.telegram.org/bot'
class Telegram:
def __init__(self, token):
self.call_url = base_url + token + '/'
self.token = token
self.req_timeout = 5
self.text_limit = 4096
self.last_error = ''
self.me = self.getMe()
def __method_create__(self, method_name, files = None, data = None):
url = self.call_url + method_name
try:
if files is not None:
ret = requests.post(url, files = files, data = data, timeout = self.req_timeout)
else :
ret = requests.post(url, data = data, timeout = self.req_timeout)
except requests.exceptions.ConnectionError:
self.last_error = 'Error: Network Issue'
ret = None
except requests.exceptions.Timeout:
self.last_error = 'Error: Timeout Occured'
ret = None
except Exception:
self.last_error = 'Unknown Error'
ret = None
return ret
def __method_create_json__(self, method_name, files = None, data = None):
tmp = self.__method_create__(method_name, files = files, data = data)
if tmp == None:
ret = None
else:
try:
ret = tmp.json()
except ValueError:
self.last_error = "Error: Request Failed (JSON object not returned)"
ret = None
return ret
def getMe(self):
tmp = self.__method_create_json__('getMe')
if tmp is None:
return None
if tmp['ok'] is False:
self.last_error = tmp['description']
return None
return User(tmp['result'])
def getUpdates(self, offset = None, limit = 100, timeout = 0):
data = {
'offset':offset,
'limit':limit,
'timeout':timeout
}
tmp = self.__method_create_json__('getUpdates', data = data)
if tmp is None:
return None
if tmp['ok'] is False:
self.last_error = tmp['description']
return None
return [Update(x) for x in tmp['result']]
def sendMessage(self, *args, **data):
if data == {}:
if len(args) != 1 or type(args[0]) != dict:
return None
data = args[0]
if 'reply_markup' in data:
data['reply_markup'] = data['reply_markup'].json_str
tmp = self.__method_create_json__('sendMessage', data = data)
if tmp is None:
return None
if tmp['ok'] is False:
self.last_error = tmp['description']
return None
return Message(tmp['result'])
def sendLargeMessage(self, **data):
if 'text' not in data:
return None
text = data['text']
while len(text) > self.text_limit:
send = self.split(text)
text = text[len(send):]
data['text'] = send
if self.sendMessage(data) is None:
return None
data['text'] = text
return self.sendMessage(data)
def forwardMessage(self, chat_id, from_chat_id, message_id):
data = {
'chat_id' : chat_id,
'from_chat_id' : from_chat_id,
'message_id' : message_id
}
tmp = self.__method_create_json__('forwardMessage', data = data)
if tmp is None:
return None
if tmp['ok'] is False:
self.last_error = tmp['description']
return None
return Message(tmp['result'])
def sendFiles(self, chat_id, input_file, file_type, caption = None, \
reply_to_message_id = None, reply_markup = None):
if input_file.file_id is input_file.file_o is None:
self.last_error = 'Error: No File Specified'
return None
data = {
'chat_id':chat_id,
'reply_to_message_id' : reply_to_message_id,
'reply_markup' : None if reply_markup is None else reply_markup.json_str
}
if caption is not None:
data['caption'] = caption
if input_file.file_id is not None:
files = {file_type:(None, input_file.file_id)}
else :
files = {file_type: input_file.file_o}
method_name = 'send' + file_type.title()
tmp = self.__method_create_json__(method_name, data = data, files = files)
if tmp is None:
return None
if tmp['ok'] is False:
self.last_error = tmp['description']
return None
return Message(tmp['result'])
def sendPhoto(self, chat_id, photo, caption = None, \
reply_to_message_id = None, reply_markup = None):
return self.sendFiles(chat_id, photo, 'photo', caption = caption, \
reply_to_message_id = reply_to_message_id, reply_markup = reply_markup)
def sendVideo(self, chat_id, photo, reply_to_message_id = None, reply_markup = None):
return self.sendFiles(chat_id, photo, 'video', \
reply_to_message_id = reply_to_message_id, reply_markup = reply_markup)
def sendAudio(self, chat_id, photo, reply_to_message_id = None, reply_markup = None):
return self.sendFiles(chat_id, photo, 'audio', \
reply_to_message_id = reply_to_message_id, reply_markup = reply_markup)
def sendDocument(self, chat_id, photo, reply_to_message_id = None, reply_markup = None):
return self.sendFiles(chat_id, photo, 'document',\
reply_to_message_id = reply_to_message_id, reply_markup = reply_markup)
def sendSticker(self, chat_id, photo, reply_to_message_id = None, reply_markup = None):
return self.sendFiles(chat_id, photo, 'sticker',\
reply_to_message_id = reply_to_message_id, reply_markup = reply_markup)
def sendLocation(self, chat_id, latitude, longitude, reply_to_message_id = None, \
reply_markup = None):
data = {
'chat_id': chat_id,
'latitude' : latitude,
'longitude' : longitude,
'reply_to_message_id' : reply_to_message_id,
'reply_markup' : None if reply_markup is None else reply_markup.json_str
}
tmp = self.__method_create_json__('sendLocation', data = data)
if tmp is None:
return None
if tmp['ok'] is False:
self.last_error = tmp['description']
return None
return Message(tmp['result'])
def sendChatAction(self, chat_id, action):
data = {
'chat_id' : chat_id,
'action' : action
}
tmp = self.__method_create_json__('sendChatAction', data = data)
if tmp is None:
return None
if tmp['ok'] is False:
self.last_error = tmp['description']
return None
return tmp['result']
def getUserProfilePhotos(self, user_id, offset = 0, limit = 100):
data = {
'user_id' : user_id,
'offset' : offset,
'limit' : limit
}
tmp = self.__method_create__('getUserProfilePhotos', data = data)
if tmp is None:
return None
if tmp['ok'] is False:
self.last_error = tmp['description']
return None
return UserProfilePhotos(tmp['result'])
def split(self, text):
prev = ''
new = ''
for x in text.splitlines():
prev = new
new = new + '\n' + x
if len(new) > self.text_limit:
break
return prev
def setDefaultTimeout(self, timeout):
self.req_timeout = timeout
class TelegramEventLoop(Telegram):
def __init__(self, token, confile = 'telegram.conf'):
super().__init__(token)
self.handlers = []
self.exit = False
self.nonText = None
self.confile = confile
def addHandler(self, check_msg, *funcs):
for x in funcs:
self.handlers.append((check_msg, x))
return check_msg
def mainLoop(self):
try:
f = open(self.confile, 'r')
last_update = int(f.read())
f.close()
except FileNotFoundError:
last_update = 0
if self.checkNetworkConnection() is False:
print('No Connection')
self.waitForNetworkConnection()
print('Connection Back')
while self.exit is False:
update = self.getUpdates(offset = last_update + 1)
if update == None:
update = []
print(self.last_error)
if self.checkNetworkConnection() is False:
print('No Connection')
self.waitForNetworkConnection()
print('Connection Back')
elif update != []:
last_update = update[0].update_id
for x in update:
last_update = max(last_update, x.update_id)
for (key,foo) in self.handlers:
if key(x) == True:
foo(x)
if update != []:
f = open(self.confile, 'w')
f.write(str(last_update))
f.close()
print('Exiting')
return
def setNonTextHandler(self, func):
self.nonText = func
def handleNonText(self, x):
print("Non-Text Message Arrived\n" + x.msg_type + "\nCalling default Handler")
if self.nonText is not None:
return self.nonText(x)
return
def doExit(self, *arg):
self.exit = True
def checkNetworkConnection(self):
try:
requests.get('https://www.example.com')
except requests.exceptions.ConnectionError:
return False
return True
def waitForNetworkConnection(self):
while self.checkNetworkConnection() is False:
time.sleep(1)
| srijanrodo/telegram-bots-python-api | telegram.py | Python | gpl-2.0 | 8,175 | 0.050765 |
from indivo.models import Record, Demographics
from base import *
class TestRecord(TestModel):
model_fields = ['label', 'demographics', 'owner', 'external_id']
model_class = Record
def _setupargs(self, label, demographics=None, owner=None, external_id=None, extid_principal_key=None):
self.label = label
self.demographics = demographics
self.owner = owner
self.local_external_id = external_id
if extid_principal_key:
self.external_id = Record.prepare_external_id(external_id, extid_principal_key.to.raw_data['account_id'])
else:
self.external_id = None
_TEST_RECORDS = [
{'label':'testing_record_label',
'demographics':ForeignKey('demographics', 'TEST_DEMOGRAPHICS', 0),
'owner':ForeignKey('account', 'TEST_ACCOUNTS', 0),
},
{'label':'test_record_label2',
'demographics':ForeignKey('demographics', 'TEST_DEMOGRAPHICS', 1),
'owner':ForeignKey('account', 'TEST_ACCOUNTS', 0),
},
{'label':'empty_record',
},
{'label':'bob',
'owner':ForeignKey('account', 'TEST_ACCOUNTS', 0),
},
{'label':'jane',
'owner':ForeignKey('account', 'TEST_ACCOUNTS', 0),
},
{'label':'test_record_extid',
'demographics':ForeignKey('demographics', 'TEST_DEMOGRAPHICS', 2),
'owner':ForeignKey('account', 'TEST_ACCOUNTS', 0),
'external_id':'RECORD5_EXTID',
'extid_principal_key':ForeignKey('account', 'TEST_ACCOUNTS', 4),
},
]
TEST_RECORDS = scope(_TEST_RECORDS, TestRecord)
| sayan801/indivo_server | indivo/tests/data/record.py | Python | gpl-3.0 | 1,535 | 0.013029 |
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
import json
import os
from sheetsite.site_queue import app
import smtplib
@app.task
def notify_one(email, subject, page, text):
print("send [%s] / %s / %s" % (email, subject, page))
server_ssl = smtplib.SMTP_SSL("smtp.gmail.com", 465)
server_ssl.ehlo() # optional, called by login()
me = os.environ['GMAIL_USERNAME']
server_ssl.login(me, os.environ['GMAIL_PASSWORD'])
msg = MIMEMultipart('alternative')
msg['Subject'] = subject
msg['From'] = me
msg['To'] = email
# Record the MIME types of both parts - text/plain and text/html.
part1 = MIMEText(text, 'plain')
part2 = MIMEText(page, 'html')
msg.attach(part1)
msg.attach(part2)
server_ssl.sendmail(me, email, msg.as_string())
server_ssl.close()
return True
@app.task
def notify_all(name, site_params, diff_html, diff_text):
print("NOTIFY_spreadsheet", site_params, name)
import daff
import jinja2
import premailer
root = os.environ['SHEETSITE_CACHE']
path = os.path.join(root, name)
print("Should look in", path)
notifications = None
for fname in ['private.json', 'public.json']:
full_fname = os.path.join(path, fname)
print("Look in", full_fname)
book = json.loads(open(full_fname).read())
if 'notifications' in book['tables']:
notifications = book['tables']['notifications']
break
if notifications is None:
print("No notifications requested")
return True
print("Notifications", notifications)
# make a html report
css = daff.DiffRender().sampleCss()
site_params = dict(site_params)
site_params['css'] = css
site_params['diff'] = diff_html
env = jinja2.Environment(loader=jinja2.PackageLoader('sheetsite', 'templates'))
template = env.get_template('update.html')
page = template.render(site_params)
page = premailer.transform(page)
site_params['diff'] = diff_text
template = env.get_template('update.txt')
page_text = template.render(site_params)
for target in notifications['rows']:
email = target.get('EMAIL', None)
if email is None:
email = target.get('email', None)
if email is not None:
if site_params['no_notify']:
print("skip email to {}".format(email))
else:
notify_one.delay(email=email,
subject="update to {}".format(site_params.get('name',
'directory')),
page=page,
text=page_text)
return True
| paulfitz/sheetsite | sheetsite/tasks/notify.py | Python | mit | 2,767 | 0.001084 |
############################ Copyrights and license ############################
# #
# Copyright 2020 Raju Subramanian <coder@mahesh.net> #
# #
# This file is part of PyGithub. #
# http://pygithub.readthedocs.io/ #
# #
# PyGithub is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Lesser General Public License as published by the Free #
# Software Foundation, either version 3 of the License, or (at your option) #
# any later version. #
# #
# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #
# details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with PyGithub. If not, see <http://www.gnu.org/licenses/>. #
# #
################################################################################
import github.GithubObject
import github.NamedUser
class GithubApp(github.GithubObject.CompletableGithubObject):
"""
This class represents github apps. The reference can be found here https://docs.github.com/en/rest/reference/apps
"""
def __repr__(self):
return self.get__repr__({"id": self._id.value, "url": self._url.value})
@property
def created_at(self):
"""
:type: datetime.datetime
"""
self._completeIfNotSet(self._created_at)
return self._created_at.value
@property
def description(self):
"""
:type: string
"""
self._completeIfNotSet(self._description)
return self._description.value
@property
def events(self):
"""
:type: list of string
"""
self._completeIfNotSet(self._events)
return self._events.value
@property
def external_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._external_url)
return self._external_url.value
@property
def html_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._html_url)
return self._html_url.value
@property
def id(self):
"""
:type: int
"""
self._completeIfNotSet(self._id)
return self._id.value
@property
def name(self):
"""
:type: string
"""
self._completeIfNotSet(self._name)
return self._name.value
@property
def owner(self):
"""
:type: :class:`Github.NamedUser.NamedUser`
"""
self._completeIfNotSet(self._owner)
return self._owner.value
@property
def permissions(self):
"""
:type: dict
"""
self._completeIfNotSet(self._permissions)
return self._permissions.value
@property
def slug(self):
"""
:type: string
"""
return self._slug.value
@property
def updated_at(self):
"""
:type: datetime.datetime
"""
self._completeIfNotSet(self._updated_at)
return self._updated_at.value
@property
def url(self):
"""
:type: string
"""
return self._url.value
def _initAttributes(self):
self._created_at = github.GithubObject.NotSet
self._description = github.GithubObject.NotSet
self._events = github.GithubObject.NotSet
self._external_url = github.GithubObject.NotSet
self._html_url = github.GithubObject.NotSet
self._id = github.GithubObject.NotSet
self._name = github.GithubObject.NotSet
self._owner = github.GithubObject.NotSet
self._permissions = github.GithubObject.NotSet
self._slug = github.GithubObject.NotSet
self._updated_at = github.GithubObject.NotSet
self._url = github.GithubObject.NotSet
def _useAttributes(self, attributes):
if "created_at" in attributes: # pragma no branch
self._created_at = self._makeDatetimeAttribute(attributes["created_at"])
if "description" in attributes: # pragma no branch
self._description = self._makeStringAttribute(attributes["description"])
if "events" in attributes: # pragma no branch
self._events = self._makeListOfStringsAttribute(attributes["events"])
if "external_url" in attributes: # pragma no branch
self._external_url = self._makeStringAttribute(attributes["external_url"])
if "html_url" in attributes: # pragma no branch
self._html_url = self._makeStringAttribute(attributes["html_url"])
if "id" in attributes: # pragma no branch
self._id = self._makeIntAttribute(attributes["id"])
if "name" in attributes: # pragma no branch
self._name = self._makeStringAttribute(attributes["name"])
if "owner" in attributes: # pragma no branch
self._owner = self._makeClassAttribute(
github.NamedUser.NamedUser, attributes["owner"]
)
if "permissions" in attributes: # pragma no branch
self._permissions = self._makeDictAttribute(attributes["permissions"])
if "slug" in attributes: # pragma no branch
self._slug = self._makeStringAttribute(attributes["slug"])
self._url = self._makeStringAttribute("/apps/" + attributes["slug"])
if "updated_at" in attributes: # pragma no branch
self._updated_at = self._makeDatetimeAttribute(attributes["updated_at"])
if "url" in attributes:
self._url = self._makeStringAttribute(attributes["url"])
| ahmad88me/PyGithub | github/GithubApp.py | Python | lgpl-3.0 | 6,426 | 0.004669 |
#
# Proximate - Peer-to-peer social networking
#
# Copyright (c) 2008-2011 Nokia Corporation
#
# All rights reserved.
#
# This software is licensed under The Clear BSD license.
# See the LICENSE file for more details.
#
from gobject import timeout_add_seconds, source_remove
from random import random, randrange
from time import time
from filesharing import Share_Meta, Subscription
from plugins import Plugin, get_plugin_by_type
from support import warning
from proximateprotocol import PLUGIN_TYPE_FETCHER, PLUGIN_TYPE_FILE_SHARING, \
PLUGIN_TYPE_MESSAGE_BOARD, PLUGIN_TYPE_NOTIFICATION, \
PLUGIN_TYPE_COMMUNITY, valid_fs_gid, \
PLUGIN_TYPE_STATE, PLUGIN_TYPE_SCHEDULER
from proximatestate import normal_traffic_mode
from typevalidator import validate, ANY, OPTIONAL_KEY, ZERO_OR_MORE, ONE_OR_MORE
from utils import n_lists, remove_all
SEARCH_TIMEOUT = 15 # in seconds
CACHE_UTIL = 66 # percent of messages to spare on cleanup
MAX_MESSAGES_IN_CACHE = 256 # max number of messages in cache
CACHE_VALIDITY_PERIOD = 3600 # in seconds
SEND_VALIDITY_PERIOD = 30 # in seconds
AUTO_BROADCAST_PERIOD = 120 # in seconds
def satisfy_criteria(criteria, meta):
if criteria != None:
for (key, value) in criteria.items():
if meta.get(key) != value:
return False
return True
def search_metas(metas, criteria, keywords):
""" Note: storage may contain message from others """
msgs = []
for meta in metas:
if not satisfy_criteria(criteria, meta):
continue
if keywords == None or len(keywords) == 0:
msgs.append(meta)
continue
l = ['']
for name in ('from', 'subject', 'purpose', 'msg'):
l.append(meta[name])
l.append('')
s = '\n'.join(l).lower()
for keyword in keywords:
if s.find(keyword.lower()) >= 0:
msgs.append(meta)
break
return msgs
class Search_Context:
def __init__(self, callback, ctx=None, criteria=None, keywords=None):
self.callback = callback
self.ctx = ctx
self.criteria = criteria
self.keywords = keywords
self.checked = {}
def process(self, user, metas):
filteredmetas = search_metas(metas, self.criteria, self.keywords)
if len(filteredmetas) == 0:
return
newmetas = []
for meta in filteredmetas:
key = (user, meta['id'])
if not self.checked.has_key(key):
newmetas.append(meta)
self.checked[key] = None
if len(newmetas) > 0:
self.callback(user, newmetas, self.ctx)
class Message_Board(Plugin):
""" Notes on messages, see 'self.msgspec' below.
'replyid' is used to create a message that is related to an older
message. The 'replyid' is the old message's gid.
If 'src' and 'dst' exist, they are guaranteed in Share_Meta
validation to be strings.
If 'ttl' exists, it is guaranteed to be a non-negative integer.
"""
msgspec = {'subject': str,
'from': str,
'msg': str,
OPTIONAL_KEY('replygid'): valid_fs_gid,
OPTIONAL_KEY('url'): str,
}
queryspec = {'t': 'msgquery',
OPTIONAL_KEY('keywords'): [ONE_OR_MORE, str],
OPTIONAL_KEY('criteria'): {str: ANY},
}
queryresultspec = {'msgs': [ZERO_OR_MORE, {}]}
def __init__(self, options):
self.register_plugin(PLUGIN_TYPE_MESSAGE_BOARD)
self.community = None
self.fetcher = None
self.fs = None
self.state = None
self.notification = None
self.statusindicator = None
self.options = options
self.gui = None
self.queryidx = 0
self.keywords = []
self.searchctxs = []
self.notifications = {}
self.periodidx = 0
self.cache = {} # maps (uid, fsid) to (timestamp, meta)
def register_ui(self, ui):
self.gui = ui
def cleanup(self):
self.state.set_plugin_variable(self.name, 'watchkeywords', self.keywords)
savednotifications = {}
for (key, value) in self.notifications.items():
if value == 1:
savednotifications[key] = 1
self.state.set_plugin_variable(self.name, 'notifications', savednotifications)
def cancel_search(self, sctx):
self.searchctxs.remove(sctx)
return False
def msg_cache(self, user, metas):
t = int(time())
uid = user.get('uid')
for meta in metas:
self.cache[(uid, meta.get('id'))] = (t, meta)
if len(self.cache) <= MAX_MESSAGES_IN_CACHE:
return
timesorted = []
for (key, value) in self.cache.items():
timestamp = value[0]
timesorted.append((timestamp, key))
timesorted.sort()
ntodelete = len(timesorted) - (CACHE_UTIL * MAX_MESSAGES_IN_CACHE) / 100
for i in xrange(ntodelete):
key = timesorted[i][1]
self.cache.pop(key)
def process_results(self, reply):
metas = []
if reply == None:
return metas
if not validate(self.queryresultspec, reply):
warning('msgboard: Invalid results: %s\n' % str(reply))
return metas
for metadict in reply['msgs']:
meta = Share_Meta()
if not meta.unserialize(metadict):
warning('msgboard: Can not unserialize: %s\n' % str(metadict))
continue
if not self.validate_message(meta):
warning('msgboard: Invalid meta: %s\n' % str(meta))
continue
metas.append(meta)
return metas
def got_query_results(self, user, reply, ctx):
metas = self.process_results(reply)
self.msg_cache(user, metas)
for meta in metas:
if self.is_hot(meta):
self.notify_user(user, meta)
for sctx in self.searchctxs:
sctx.process(user, metas)
def handle_message(self, user, sm):
""" Handle messages that were found from other users' fileshares """
if not self.validate_message(sm):
sm['ttl'] = 0
warning('msgboard: Invalid message: %s\n' % str(sm))
return
warning('New message: %s\n' % sm['subject'])
def get_state(self):
return self.keywords
def is_hot(self, meta):
if len(self.keywords) == 0 or meta.get_priv('mine'):
return False
return len(search_metas([meta], None, self.keywords)) > 0
def modify_state(self, add, keyword):
if add:
if keyword in self.keywords:
return
self.keywords.append(keyword)
else:
remove_all(self.keywords, keyword)
self.cleanup()
def notify_user(self, user, meta):
uid = user.get('uid')
key = (uid, meta['id'])
if key in self.notifications:
return
self.notifications[key] = 0
msg = 'User %s has a message titled: %s. View it?' % (user.tag(), meta['subject'])
self.notification.notify_with_response(msg, self.view_message, (key, meta))
def view_message(self, response, msg, ctx):
(key, meta) = ctx
self.notifications[key] = 1
if response == self.notification.RESPONSE_DELETED:
return True
self.gui.view_message(meta)
return True
def all_metas(self):
metas = []
for share in self.fs.get_shares(purpose=self.name):
if share.meta.get_priv('mine'):
metas.append(share.meta)
return metas
def read_state(self):
l = self.state.get_plugin_variable(self.name, 'watchkeywords')
if l != None:
self.keywords = l
notifications = self.state.get_plugin_variable(self.name, 'notifications')
if notifications != None:
self.notifications = notifications
def handle_msgpush(self, user, request):
self.got_query_results(user, request, None)
return None
def handle_request(self, user, request):
""" Handle incoming queries. Search through Share_Metas. """
if request.get('t') == 'msgpush':
return self.handle_msgpush(user, request)
if not validate(self.queryspec, request):
warning('Invalid msgboard query: %s\n' % str(request))
return None
keywords = request.get('keywords')
criteria = request.get('criteria')
if criteria == None:
criteria = {}
criteria.setdefault('src', self.community.myuid)
metas = search_metas(self.all_metas(), criteria, keywords)
if not normal_traffic_mode():
t = int(time())
metas = filter(lambda meta: self.test_send_time(meta, t), metas)
for meta in metas:
self.set_send_time(meta, t)
serializedmetas = []
for meta in metas:
serializedmetas.append(meta.serialize())
if len(serializedmetas) == 0:
if normal_traffic_mode():
return {'msgs': []}
else:
return self.fetcher.POSTPONE_REPLY
if self.fetcher.is_fetch_community_efficient():
com = self.community.get_default_community()
# Broadcast messages in bundles of three messages
push = {'t': 'msgpush'}
for metabundle in n_lists(serializedmetas, 3):
push['msgs'] = metabundle
self.fetcher.fetch_community(com, self.name, push, None, ack=False)
return self.fetcher.POSTPONE_REPLY
return {'msgs': serializedmetas}
def publish(self, d, path=None, save=True):
sm = Share_Meta(d)
sm.replicate(withidentity=True)
if not self.validate_message(sm):
warning('Not creating an invalid msgboard message: %s\n' % str(sm))
return None
share = self.fs.add_share(purpose=self.name, sharemeta=sm, save=save)
if share == None:
return None
return share.meta
def delete(self, meta):
shareid = meta.get('id')
self.fs.remove_share(self.fs.get_share(shareid))
self.gui.message_deleted_cb(meta)
def search(self, callback, ctx=None, criteria=None, keywords=None, replicated=False, fetch=True):
""" The caller gets an indetermistic number of result callbacks.
Empty keywords, or keywords == None, means get all messages. """
if criteria == None:
criteria = {}
sctx = Search_Context(callback, ctx=ctx, criteria=criteria, keywords=keywords)
if fetch:
self.searchctxs.append(sctx)
timeout_add_seconds(SEARCH_TIMEOUT, self.cancel_search, sctx)
self.statusindicator.set_status('Searching messages', timeout=SEARCH_TIMEOUT)
# Query others
req = {'t': 'msgquery'}
if keywords != None:
req['keywords'] = keywords
req['criteria'] = criteria
com = self.community.get_default_community()
self.fetcher.fetch_community(com, self.name, req, self.got_query_results)
self.query_cache(sctx)
# Then query myself
sctx.process(self.community.get_myself(), self.all_metas())
def set_send_time(self, meta, t):
meta.set_priv('sendtime', t)
def test_send_time(self, meta, t):
""" Returns True iff message should be sent """
sendtime = meta.get_priv('sendtime')
tlimit = t - SEND_VALIDITY_PERIOD
if sendtime == None or sendtime < tlimit or t < sendtime:
return True
else:
return False
def query_cache(self, sctx):
timelimit = int(time()) - CACHE_VALIDITY_PERIOD
d = {}
todelete = []
for (key, value) in self.cache.items():
(uid, id) = key
(mtime, meta) = value
user = self.community.get_user(uid)
if user == None or mtime < timelimit:
todelete.append(key)
continue
d.setdefault(user, []).append(meta)
for key in todelete:
self.cache.pop(key)
for (user, metas) in d.items():
sctx.process(user, metas)
def query_messages(self, showmine=False, target=None):
""" Start an asynchronous query process. Results are displayed in the
GUI as they come in from peers. """
criteria = None
if target != None:
criteria = {'community': target.get('name')}
# Generate a new query context that is passed along with the query.
# This structure is used to process incoming results, and to
# reject results from older queries.
self.queryidx += 1
queryctx = (self.queryidx, [], criteria)
fetch = not showmine
self.search(self.collect_messages, ctx=queryctx, criteria=criteria, fetch=fetch)
if showmine:
self.statusindicator.set_status(None)
def collect_messages(self, user, metas, queryctx):
(queryidx, queryresults, criteria) = queryctx
if queryidx != self.queryidx:
return
queryresults += metas
# First, take messages published by myself so that others may not inject
# gids my messages into my view.
gids = {}
mymetas = []
for meta in queryresults:
if not meta.get_priv('mine'):
continue
gid = meta.get('gid')
if gid != None:
gids[gid] = None
mymetas.append(meta)
# Second, take results from others
metas = []
for meta in queryresults:
if meta.get_priv('mine'):
continue
gid = meta.get('gid')
if gid != None and gid in gids:
continue
gids[gid] = None
metas.append(meta)
# Put my metas last
metas += mymetas
self.gui.update_message_list(metas)
def periodic(self, t, ctx):
if not self.fetcher.is_fetch_community_efficient():
return False
if random() >= (5.0 / AUTO_BROADCAST_PERIOD):
return True
l = []
t = int(time())
for meta in self.all_metas():
if self.test_send_time(meta, t):
l.append((meta['id'], meta))
l.sort()
if len(l) == 0:
return True
meta = l[self.periodidx % len(l)][1]
self.periodidx = (self.periodidx + 1) % len(l)
self.set_send_time(meta, t)
push = {'t': 'msgpush',
'msgs': [meta.serialize()]}
com = self.community.get_default_community()
self.fetcher.fetch_community(com, self.name, push, None, ack=False)
return True
def ready(self):
self.community = get_plugin_by_type(PLUGIN_TYPE_COMMUNITY)
self.fetcher = get_plugin_by_type(PLUGIN_TYPE_FETCHER)
self.fs = get_plugin_by_type(PLUGIN_TYPE_FILE_SHARING)
self.state = get_plugin_by_type(PLUGIN_TYPE_STATE)
self.notification = get_plugin_by_type(PLUGIN_TYPE_NOTIFICATION)
self.sch = get_plugin_by_type(PLUGIN_TYPE_SCHEDULER)
self.sch.call_periodic(5 * self.sch.SECOND, self.periodic)
self.statusindicator = self.notification.get_progress_indicator('Msg board')
# Subscribe to fileshares that are msgboard messages
self.fs.subscribe(Subscription(purpose=self.name, callback=self.handle_message))
self.fetcher.register_handler(self.name, self.handle_request, self.name)
self.read_state()
def validate_message(self, sm):
return validate(self.msgspec, sm.d)
def init(options):
Message_Board(options)
| proximate/proximate | messageboard.py | Python | bsd-3-clause | 15,997 | 0.002688 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from osc_lib.tests import utils
from unittest import mock
class TestApplicationCatalog(utils.TestCommand):
def setUp(self):
super(TestApplicationCatalog, self).setUp()
self.app.client_manager.application_catalog = mock.Mock()
| openstack/python-muranoclient | muranoclient/tests/unit/osc/v1/fakes.py | Python | apache-2.0 | 823 | 0 |
def forenkling(a,b):
while b!=0:
gammel_b=b
b=a%b
a=gammel_b
#print(a,b)
return a
print(forenkling(30,20))
print(forenkling(10,2))
def gcd(a,b):
a=forenkling(a,b)
return a
def reduce_fraction(a,b):
divisor=forenkling(a,b)
a=int(a/divisor)
b=int(b/divisor)
return a,b
def main():
print('Dette forkorter brøker på formen a/b:')
a = int(input('Skriv inn et heltall a: '))
b = int(input('Skriv inn et heltall b: '))
a,b=reduce_fraction(a,b)
if a!=b:
print('Forkortningen av brøken gir: ',a,'/',b,sep='')
else:
print('Forkortningen av brøken gir: 1')
main()
| TorleifHensvold/ITGK3 | Oving5/torleif/07_Forenkling_av_brøker.py | Python | mit | 706 | 0.04416 |
# -*- coding: utf-8 -*-
"""The application's model objects"""
from zope.sqlalchemy import ZopeTransactionExtension
from sqlalchemy.orm import scoped_session, sessionmaker
#from sqlalchemy import MetaData
from sqlalchemy.ext.declarative import declarative_base
# Global session manager: DBSession() returns the Thread-local
# session object appropriate for the current web request.
maker = sessionmaker(autoflush=True, autocommit=False, extension=ZopeTransactionExtension())
maker2 = sessionmaker(autoflush=False, autocommit=False)
DBSession = scoped_session(maker)
DBSession2 = scoped_session(maker2)
# Base class for all of our model classes: By default, the data model is
# defined with SQLAlchemy's declarative extension, but if you need more
# control, you can switch to the traditional method.
DeclarativeBase = declarative_base()
DeclarativeBase2 = declarative_base()
# There are two convenient ways for you to spare some typing.
# You can have a query property on all your model classes by doing this:
# DeclarativeBase.query = DBSession.query_property()
# Or you can use a session-aware mapper as it was used in TurboGears 1:
# DeclarativeBase = declarative_base(mapper=DBSession.mapper)
# Global metadata.
# The default metadata is the one from the declarative base.
metadata = DeclarativeBase.metadata
metadata2 = DeclarativeBase2.metadata
# If you have multiple databases with overlapping table names, you'll need a
# metadata for each database. Feel free to rename 'metadata2'.
#metadata2 = MetaData()
#####
# Generally you will not want to define your table's mappers, and data objects
# here in __init__ but will want to create modules them in the model directory
# and import them at the bottom of this file.
#
######
def init_model(engine1, engine2):
"""Call me before using any of the tables or classes in the model."""
# DBSession.configure(bind=engine)
DBSession.configure(bind=engine1)
DBSession2.configure(bind=engine2)
metadata.bind = engine1
metadata2.bind = engine2
# Import your model modules here.
from vatsystem.model.auth import User, Group, Permission
from vatsystem.model.base_erp import *
from vatsystem.model.base_vat import *
from vatsystem.model.erp import *
from vatsystem.model.systemutil import *
| LamCiuLoeng/vat | vatsystem/model/__init__.py | Python | mit | 2,266 | 0.004854 |
# -*- coding:utf-8 -*-
"""
Description:
Issue Transaction
Usage:
from AntShares.Core.IssueTransaction import IssueTransaction
"""
from AntShares.Core.AssetType import AssetType
from AntShares.Helper import *
from AntShares.Core.Transaction import Transaction
from AntShares.Core.TransactionType import TransactionType
from random import randint
class IssueTransaction(Transaction):
"""docstring for IssueTransaction"""
def __init__(self, inputs, outputs):
super(IssueTransaction, self).__init__(inputs, outputs)
self.TransactionType = TransactionType.IssueTransaction # 0x40
self.Nonce = self.genNonce()
def genNonce(self):
return random.randint(268435456, 4294967295)
def getScriptHashesForVerifying(self):
"""Get ScriptHash From SignatureContract"""
pass
def serializeExclusiveData(self, writer):
writer.writeUInt32(self.Nonce)
| AntSharesSDK/antshares-python | sdk/AntShares/Core/IssueTransaction.py | Python | apache-2.0 | 922 | 0 |
import sys
import logging
import profile
import pstats
try:
import cStringIO as StringIO
StringIO # pyflakes
except ImportError:
import StringIO
from instrumenting import utils
class BaseProfilingHandler(utils.InstrumentingHandler):
"""
Python logging handler which profiles code.
It can also optionally log profiling stats and/or dump the raw
stats to a file.
"""
def __init__(self, start=False, stop=False, functions=None,
restriction=[50], strip_dirs=True,
sort_stats=['cumulative'], print_formats=['stats'],
level=logging.NOTSET):
utils.InstrumentingHandler.__init__(self, level=level)
self.setUpProfiler()
self.start = start
self.stop = stop
self.functions = functions
self.print_formats = print_formats
def emit(self, record):
"""
Start or stop the configured profiler logging details.
If the handler is configured to start the profiler and it is
already started, a warning message is logged and it is left
running. Similarly, if the handler is configured to stop the
profiler and it is already stopped, a warning message is
logged and it is not started.
In order to avoid surprising performance impacts, if the
handler is configured such that it enables and disables the
profiler for the same single log message, an error message is
logged but the profiler is still disabled.
"""
started = False
if self.start:
if self.running():
self.log(logging.WARNING,
'Profiler %r already running, ignoring start'
% self.profiler)
else:
self.log(logging.INFO,
'Starting profiler %r' % self.profiler)
import pdb; pdb.set_trace()
self.enable()
started = True
if self.stop:
if not self.running():
self.log(logging.WARNING,
'Profiler %r not running, ignoring stop'
% self.profiler)
else:
if started:
self.log(logging.ERROR,
'Handler for profiler %r configured to start '
'and stop for the same log message'
% self.profiler)
self.log(logging.INFO,
'Stopping profiler %r' % self.profiler)
import pdb; pdb.set_trace()
self.disable()
if not started and self.print_formats:
self.log(logging.DEBUG, 'Printing profiler %r stats:\n%s'
% (self.profiler, self.log_stats()))
def log_stats(self):
stream = StringIO.StringIO()
stats = self.get_stats(stream)
if stats is None:
return
if self.strip_dirs:
stats.strip_dirs()
if self.sort_stats:
stats.sort_stats(self.sort_stats)
for method in self.print_formats:
getattr(stats, 'print_'+method)(*self.restriction)
return stream.getvalues()
# Profiler specific support
def setUpProfiler(self):
"""Set up the selected profiler."""
raise NotImplemented
def enable(self):
raise NotImplemented
def disable(self):
raise NotImplemented
def running(self):
return isinstance(sys.getprofile(), type(self.profiler))
def get_stats(self, stream):
if self.running():
self.log(logging.ERROR,
"Cannot get stats when the profiler from the "
"`profile` module is already running")
return None
import pdb; pdb.set_trace()
stats = pstats.Stats(self.profiler, stream=stream)
return stats
class ProfileHandler(BaseProfilingHandler):
"""Use the pure-python `profile` module to profile on logging events."""
def setUpProfiler(self):
if not self.functions:
raise ValueError(
'The `profile` module does not support profiling '
'an already running stack')
self.profiler = profile.Profile()
def running(self):
hook = sys.getprofile()
return (hook is self.profiler.dispatcher
and isinstance(hook.im_self, type(self.profiler)))
| rpatterson/instrumenting | src/instrumenting/profilehandler.py | Python | gpl-2.0 | 4,513 | 0.000886 |
print("hello world!")
| Pyroseza/Random | test.py | Python | mit | 23 | 0.043478 |
import ConfigParser
import os.path
import ast
DEFAULT_CONF_FILE = '/etc/unbound/unbound_ec2.conf'
DEFAULT_AWS_REGION = 'us-west-1'
DEFAULT_ZONE = 'zone.tld'
DEFAULT_REVERSE_ZONE = '127.in-addr.arpa'
DEFAULT_TTL = '300'
DEFAULT_CACHE_TTL = '30'
DEFAULT_SERVER_TYPE = 'caching'
DEFAULT_LOOKUP_TYPE = 'cache'
DEFAULT_LOOKUP_TAG_NAME_INCLUDE_DOMAIN = 'True'
DEFAULT_LOOKUP_FILTERS = "{'instance-state-name': 'running'}"
DEFAULT_IP_ORDER = 'private'
DEFAULT_FORWARDED_ZONES = ''
class UnboundEc2Conf(object):
"""Configuration parser for Unbound EC2 module.
"""
def __init__(self, conf_file=None):
self.config = ConfigParser.ConfigParser()
self.conf_file = conf_file if conf_file else os.environ.get('UNBOUND_EC2_CONF',
DEFAULT_CONF_FILE).encode('ascii')
self.ec2 = {}
self.main = {}
self.lookup = {}
self.lookup_filters = {}
self.server = {}
def set_defaults(self):
"""Sets default values for defined self instance attributes.
"""
self.ec2['aws_region'] = os.environ.get('AWS_DEFAULT_REGION', DEFAULT_AWS_REGION).encode('ascii')
self.main['zone'] = os.environ.get('UNBOUND_ZONE', DEFAULT_ZONE).encode('ascii')
self.main['reverse_zone'] = os.environ.get('UNBOUND_REVERSE_ZONE', DEFAULT_REVERSE_ZONE).encode('ascii')
self.main['ttl'] = self.__try_type(os.environ.get('UNBOUND_TTL', DEFAULT_TTL).encode('ascii'))
self.main['cache_ttl'] = self.__try_type(
os.environ.get('UNBOUND_CACHE_TTL', DEFAULT_CACHE_TTL).encode('ascii'))
self.server['type'] = os.environ.get('UNBOUND_SERVER_TYPE', DEFAULT_SERVER_TYPE).encode('ascii')
self.lookup['type'] = os.environ.get('UNBOUND_LOOKUP_TYPE', DEFAULT_LOOKUP_TYPE).encode('ascii')
self.lookup['tag_name_include_domain'] = self.__try_type(
os.environ.get('UNBOUND_LOOKUP_TAG_NAME_INCLUDE_DOMAIN',
DEFAULT_LOOKUP_TAG_NAME_INCLUDE_DOMAIN).encode('ascii'))
self.lookup_filters = self.__try_type(
os.environ.get('UNBOUND_LOOKUP_FILTERS', DEFAULT_LOOKUP_FILTERS).encode('ascii'))
self.main['ip_order'] = os.environ.get('UNBOUND_IP_ORDER', DEFAULT_IP_ORDER).encode('ascii')
self.main['forwarded_zones'] = os.environ.get('UNBOUND_FORWARDED_ZONES', DEFAULT_FORWARDED_ZONES)\
.encode('ascii')
def parse(self):
"""Tries to read defined configuration file and merge values with instance attributes.
"""
result = False
if os.path.isfile(self.conf_file):
self.config.read(self.conf_file)
for section in self.config.sections():
setattr(self, section, self.__get_merged_attribute(section, dict(self.config.items(section))))
result = True
return result
def __get_merged_attribute(self, name, value):
string_result = value
if getattr(self, name):
string_result = getattr(self, name).copy()
string_result.update(value)
result = {}
for key in string_result:
result[key] = self.__try_type(string_result[key])
return result
def __try_type(self, value):
try:
result = ast.literal_eval(value)
except (ValueError, SyntaxError):
result = value
return result
| unibet/unbound-ec2 | unbound_ec2/config.py | Python | isc | 3,425 | 0.00438 |
"""
Module to set up run time parameters for Clawpack.
The values set in the function setrun are then written out to data files
that will be read in by the Fortran code.
"""
import os
from pyclaw import data
#------------------------------
def setrun(claw_pkg='classic'):
#------------------------------
"""
Define the parameters used for running Clawpack.
INPUT:
claw_pkg expected to be "classic" for this setrun.
OUTPUT:
rundata - object of class ClawRunData
"""
assert claw_pkg.lower() == 'classic', "Expected claw_pkg = 'classic'"
ndim = 1
rundata = data.ClawRunData(claw_pkg, ndim)
#------------------------------------------------------------------
# Problem-specific parameters to be written to setprob.data:
#------------------------------------------------------------------
probdata = rundata.new_UserData(name='probdata',fname='setprob.data')
probdata.add_param('ndim', 2, 'ndim')
probdata.add_param('rho', 1., 'density of medium')
probdata.add_param('bulk', 4., 'bulk modulus')
probdata.add_param('width', 0.1, 'width used in qinit')
#------------------------------------------------------------------
# Standard Clawpack parameters to be written to claw.data:
#------------------------------------------------------------------
clawdata = rundata.clawdata # initialized when rundata instantiated
# ---------------
# Spatial domain:
# ---------------
# Number of space dimensions:
clawdata.ndim = ndim
# Lower and upper edge of computational domain:
clawdata.xlower = 0.0
clawdata.xupper = 1.5
# Number of grid cells:
clawdata.mx = 2000
# ---------------
# Size of system:
# ---------------
# Number of equations in the system:
clawdata.meqn = 2
# Number of auxiliary variables in the aux array (initialized in setaux)
clawdata.maux = 0
# Index of aux array corresponding to capacity function, if there is one:
clawdata.mcapa = 0
# -------------
# Initial time:
# -------------
clawdata.t0 = 0.0
# -------------
# Output times:
#--------------
# Specify at what times the results should be written to fort.q files.
# Note that the time integration stops after the final output time.
# The solution at initial time t0 is always written in addition.
clawdata.outstyle = 1
if clawdata.outstyle==1:
# Output nout frames at equally spaced times up to tfinal:
clawdata.nout = 4
clawdata.tfinal = 0.2
elif clawdata.outstyle == 2:
# Specify a list of output times.
clawdata.tout = [0.5, 1.0] # used if outstyle == 2
clawdata.nout = len(clawdata.tout)
elif clawdata.outstyle == 3:
# Output every iout timesteps with a total of ntot time steps:
iout = 1
ntot = 5
clawdata.iout = [iout, ntot]
# ---------------------------------------------------
# Verbosity of messages to screen during integration:
# ---------------------------------------------------
# The current t, dt, and cfl will be printed every time step
# at AMR levels <= verbosity. Set verbosity = 0 for no printing.
# (E.g. verbosity == 2 means print only on levels 1 and 2.)
clawdata.verbosity = 0
# --------------
# Time stepping:
# --------------
# if dt_variable==1: variable time steps used based on cfl_desired,
# if dt_variable==0: fixed time steps dt = dt_initial will always be used.
clawdata.dt_variable = 1
# Initial time step for variable dt.
# If dt_variable==0 then dt=dt_initial for all steps:
clawdata.dt_initial = 0.5
# Max time step to be allowed if variable dt used:
clawdata.dt_max = 1e+99
# Desired Courant number if variable dt used, and max to allow without
# retaking step with a smaller dt:
clawdata.cfl_desired = 1.0
clawdata.cfl_max = 1.0
# Maximum number of time steps to allow between output times:
clawdata.max_steps = 5000
# ------------------
# Method to be used:
# ------------------
# Order of accuracy: 1 => Godunov, 2 => Lax-Wendroff plus limiters
clawdata.order = 1
# Transverse order for 2d or 3d (not used in 1d):
clawdata.order_trans = 0
# Number of waves in the Riemann solution:
clawdata.mwaves = 2
# List of limiters to use for each wave family:
# Required: len(mthlim) == mwaves
clawdata.mthlim = [2, 2]
# Source terms splitting:
# src_split == 0 => no source term (src routine never called)
# src_split == 1 => Godunov (1st order) splitting used,
# src_split == 2 => Strang (2nd order) splitting used, not recommended.
clawdata.src_split = 1
# --------------------
# Boundary conditions:
# --------------------
# Number of ghost cells (usually 2)
clawdata.mbc = 2
# Choice of BCs at xlower and xupper:
# 0 => user specified (must modify bcN.f to use this option)
# 1 => extrapolation (non-reflecting outflow)
# 2 => periodic (must specify this at both boundaries)
# 3 => solid wall for systems where q(2) is normal velocity
clawdata.mthbc_xlower = 3
clawdata.mthbc_xupper = 1
return rundata
# end of function setrun
# ----------------------
if __name__ == '__main__':
# Set up run-time parameters and write all data files.
import sys
if len(sys.argv) == 2:
rundata = setrun(sys.argv[1])
else:
rundata = setrun()
rundata.write()
| clawpack/clawpack-4.x | doc/sphinx/example-acoustics-2d/1drad/setrun.py | Python | bsd-3-clause | 5,754 | 0.012165 |
#!/usr/bin/env python
# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
import os
import gviz_api
import webrtc.data_helper
def main():
"""
This Python script displays a web page with test created with the
video_quality_measurement program, which is a tool in WebRTC.
The script requires on two external files and one Python library:
- A HTML template file with layout and references to the json variables
defined in this script
- A data file in Python format, containing the following:
- test_configuration - a dictionary of test configuration names and values.
- frame_data_types - a dictionary that maps the different metrics to their
data types.
- frame_data - a list of dictionaries where each dictionary maps a metric to
it's value.
- The gviz_api.py of the Google Visualization Python API, available at
http://code.google.com/p/google-visualization-python/
The HTML file is shipped with the script, while the data file must be
generated by running video_quality_measurement with the --python flag
specified.
"""
print 'Content-type: text/html\n' # the newline is required!
page_template_filename = '../templates/chart_page_template.html'
# The data files must be located in the project tree for app engine being
# able to access them.
data_filenames = ['../data/vp8_sw.py', '../data/vp8_hw.py']
# Will contain info/error messages to be displayed on the resulting page.
messages = []
# Load the page HTML template.
try:
f = open(page_template_filename)
page_template = f.read()
f.close()
except IOError as e:
ShowErrorPage('Cannot open page template file: %s<br>Details: %s' %
(page_template_filename, e))
return
# Read data from external Python script files. First check that they exist.
for filename in data_filenames:
if not os.path.exists(filename):
messages.append('Cannot open data file: %s' % filename)
data_filenames.remove(filename)
# Read data from all existing input files.
data_list = []
test_configurations = []
names = []
for filename in data_filenames:
read_vars = {} # empty dictionary to load the data into.
execfile(filename, read_vars, read_vars)
test_configuration = read_vars['test_configuration']
table_description = read_vars['frame_data_types']
table_data = read_vars['frame_data']
# Verify the data in the file loaded properly.
if not table_description or not table_data:
messages.append('Invalid input file: %s. Missing description list or '
'data dictionary variables.' % filename)
continue
# Frame numbers appear as number type in the data, but Chart API requires
# values of the X-axis to be of string type.
# Change the frame_number column data type:
table_description['frame_number'] = ('string', 'Frame number')
# Convert all the values to string types:
for row in table_data:
row['frame_number'] = str(row['frame_number'])
# Store the unique data from this file in the high level lists.
test_configurations.append(test_configuration)
data_list.append(table_data)
# Name of the test run must be present.
test_name = FindConfiguration(test_configuration, 'name')
if not test_name:
messages.append('Invalid input file: %s. Missing configuration key '
'"name"', filename)
continue
names.append(test_name)
# Create data helper and build data tables for each graph.
helper = webrtc.data_helper.DataHelper(data_list, table_description,
names, messages)
# Loading it into gviz_api.DataTable objects and create JSON strings.
description, data = helper.CreateConfigurationTable(test_configurations)
configurations = gviz_api.DataTable(description, data)
json_configurations = configurations.ToJSon() # pylint: disable=W0612
description, data = helper.CreateData('ssim')
ssim = gviz_api.DataTable(description, data)
# pylint: disable=W0612
json_ssim_data = ssim.ToJSon(helper.GetOrdering(description))
description, data = helper.CreateData('psnr')
psnr = gviz_api.DataTable(description, data)
# pylint: disable=W0612
json_psnr_data = psnr.ToJSon(helper.GetOrdering(description))
description, data = helper.CreateData('packets_dropped')
packet_loss = gviz_api.DataTable(description, data)
# pylint: disable=W0612
json_packet_loss_data = packet_loss.ToJSon(helper.GetOrdering(description))
description, data = helper.CreateData('bit_rate')
# Add a column of data points for the desired bit rate to be plotted.
# (uses test configuration from the last data set, assuming it is the same
# for all of them)
desired_bit_rate = FindConfiguration(test_configuration, 'bit_rate_in_kbps')
if not desired_bit_rate:
ShowErrorPage('Cannot configuration field named "bit_rate_in_kbps"')
return
desired_bit_rate = int(desired_bit_rate)
# Add new column data type description.
description['desired_bit_rate'] = ('number', 'Desired bit rate (kbps)')
for row in data:
row['desired_bit_rate'] = desired_bit_rate
bit_rate = gviz_api.DataTable(description, data)
# pylint: disable=W0612
json_bit_rate_data = bit_rate.ToJSon(helper.GetOrdering(description))
# Format the messages list with newlines.
messages = '\n'.join(messages)
# Put the variables as JSon strings into the template.
print page_template % vars()
def FindConfiguration(configuration, name):
""" Finds a configuration value using it's name.
Returns the first configuration with a matching name. Returns None if no
matching configuration is found. """
return_value = None
for row in configuration:
if row['name'] == name:
return_value = row['value']
break
return return_value
def ShowErrorPage(error_message):
print '<html><body>%s</body></html>' % error_message
if __name__ == '__main__':
main()
| golden1232004/webrtc_new | tools/python_charts/webrtc/main.py | Python | gpl-3.0 | 6,301 | 0.011903 |
import sys
from os.path import dirname
import drain.step, drain.serialize
from drain.drake import is_target_filename, is_step_filename
if len(sys.argv) == 1:
raise ValueError('Need at least one argument')
args = sys.argv[1:]
drain.PATH = dirname(dirname(dirname(args[0])))
if is_target_filename(args[0]):
output = drain.serialize.load(args[0])
args = args[1:]
else:
output = None
if not is_step_filename(args[0]):
raise ValueError('Need a step to run')
step = drain.serialize.load(args[0])
inputs = []
for i in args[1:]:
if is_step_filename(i) or is_target_filename(i):
inputs.append(drain.serialize.load(i))
step.execute(output=output, inputs=inputs)
| potash/drain | bin/run_step.py | Python | mit | 692 | 0.001445 |
#! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
import os,sys
from waflib.Tools import ccroot,ar,gcc
from waflib.Configure import conf
@conf
def find_icc(conf):
if sys.platform=='cygwin':
conf.fatal('The Intel compiler does not work on Cygwin')
v=conf.env
cc=None
if v['CC']:cc=v['CC']
elif'CC'in conf.environ:cc=conf.environ['CC']
if not cc:cc=conf.find_program('icc',var='CC')
if not cc:cc=conf.find_program('ICL',var='CC')
if not cc:conf.fatal('Intel C Compiler (icc) was not found')
cc=conf.cmd_to_list(cc)
conf.get_cc_version(cc,icc=True)
v['CC']=cc
v['CC_NAME']='icc'
def configure(conf):
conf.find_icc()
conf.find_ar()
conf.gcc_common_flags()
conf.gcc_modifier_platform()
conf.cc_load_tools()
conf.cc_add_flags()
conf.link_add_flags()
| bit-trade-one/SoundModuleAP | lib-src/lv2/sratom/waflib/Tools/icc.py | Python | gpl-2.0 | 890 | 0.057303 |
#Java/SQL stuff
from java.lang import *
#Grinder stuff
from net.grinder.script.Grinder import grinder
from net.grinder.script import Test
#misc
import time
import sys
#project specific
from framework import TSdata_w, TSdata_h, importstrs
#import relevant t_DATABASENAME depending on settings in grinder.properties
inp = grinder.getProperties()["grinder.inp"]
inp = importstrs(inp)
exec(inp)
class TestRunner:
def __init__(self):
self.testdb = DBAccess(*dbargs)
self.numstreams = 100
logstr = self.testdb.init_insert(100000, self.numstreams, False)
grinder.logger.info(logstr)
#this has a crazy amount of overhead in python, need to figure out
#what's up
def __call__(self):
try:
res = self.testdb.run_insert_h()
grinder.logger.info("Insertion Results as (start time, end time, "
"completion" +
" time): (" + str(res[0]) + ", " + str(res[1]) +
", " + str(res[2]) + ")")
print("done insert")
except StopIteration:
# the test is complete
grinder.logger.info("Insertion finished at: " + str(time.time()))
self.testdb.close_all()
grinder.stopThisWorkerThread()
res = self.testdb.run_query_all()
grinder.logger.info("Query Results as (start time, end time, "
"completion" +
" time): (" + str(res[0]) + ", " + str(res[1]) +
", " + str(res[2]) + ")")
#log db size
size = self.testdb.get_db_size()
grinder.logger.info("The database size is now " + size + " bytes.")
self.testdb.reset_conn_state()
| sagark/tsdb-perf-test | tests/insertlongstream.py | Python | bsd-2-clause | 1,776 | 0.013514 |
class YamlFileNames(object):
rules = 'rules.yaml'
parsers = 'parsers.yaml'
default_log_types = 'log_types.yaml'
unix_log_types = 'unix_log_types.yaml'
windows_log_types = 'windows_log_types.yaml'
settings = 'settings.yaml'
| epawlowska/whylog | whylog/config/consts.py | Python | bsd-3-clause | 247 | 0 |
import codecs
from setuptools import setup
VERSION = '0.2.0'
def read_long_description():
long_desc = []
with codecs.open('README.rst', 'r', 'utf8') as longdesc:
long_desc.append(longdesc.read())
with codecs.open('HISTORY.rst', 'r', 'utf8') as history:
long_desc.append(history.read())
return u'\n\n'.join(long_desc)
LONG_DESCRIPTION = read_long_description()
setup(
name='get_image_size',
url='https://github.com/scardine/image_size',
version=VERSION,
long_description=LONG_DESCRIPTION,
author='github.com/scardine',
author_email=' ',
license='MIT',
py_modules=['get_image_size'],
entry_points={
'console_scripts': [
'get-image-size = get_image_size:main',
],
},
)
| scardine/image_size | setup.py | Python | mit | 776 | 0.001289 |
# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ChromeOS image pusher (from cbuildbot to signer).
This pushes files from the archive bucket to the signer bucket and marks
artifacts for signing (which a signing process will look for).
"""
from __future__ import print_function
import ConfigParser
import cStringIO
import errno
import getpass
import os
import re
import tempfile
import textwrap
from chromite.cbuildbot import constants
from chromite.lib import commandline
from chromite.lib import cros_build_lib
from chromite.lib import git
from chromite.lib import gs
from chromite.lib import osutils
from chromite.lib import signing
# This will split a fully qualified ChromeOS version string up.
# R34-5126.0.0 will break into "34" and "5126.0.0".
VERSION_REGEX = r'^R([0-9]+)-([^-]+)'
# The test signers will scan this dir looking for test work.
# Keep it in sync with the signer config files [gs_test_buckets].
TEST_SIGN_BUCKET_BASE = 'gs://chromeos-throw-away-bucket/signer-tests'
# Ketsets that are only valid in the above test bucket.
TEST_KEYSETS = set(('test-keys-mp', 'test-keys-premp'))
class PushError(Exception):
"""When an (unknown) error happened while trying to push artifacts."""
class MissingBoardInstructions(Exception):
"""Raised when a board lacks any signer instructions."""
class InputInsns(object):
"""Object to hold settings for a signable board.
Note: The format of the instruction file pushimage outputs (and the signer
reads) is not exactly the same as the instruction file pushimage reads.
"""
def __init__(self, board):
self.board = board
config = ConfigParser.ConfigParser()
config.readfp(open(self.GetInsnFile('DEFAULT')))
try:
input_insn = self.GetInsnFile('recovery')
config.readfp(open(input_insn))
except IOError as e:
if e.errno == errno.ENOENT:
# This board doesn't have any signing instructions.
# This is normal for new or experimental boards.
raise MissingBoardInstructions(input_insn)
raise
self.cfg = config
def GetInsnFile(self, image_type):
"""Find the signer instruction files for this board/image type.
Args:
image_type: The type of instructions to load. It can be a common file
(like "DEFAULT"), or one of the --sign-types.
Returns:
Full path to the instruction file using |image_type| and |self.board|.
"""
if image_type == image_type.upper():
name = image_type
elif image_type == 'recovery':
name = self.board
else:
name = '%s.%s' % (self.board, image_type)
return os.path.join(signing.INPUT_INSN_DIR, '%s.instructions' % name)
@staticmethod
def SplitCfgField(val):
"""Split a string into multiple elements.
This centralizes our convention for multiple elements in the input files
being delimited by either a space or comma.
Args:
val: The string to split.
Returns:
The list of elements from having done split the string.
"""
return val.replace(',', ' ').split()
def GetChannels(self):
"""Return the list of channels to sign for this board.
If the board-specific config doesn't specify a preference, we'll use the
common settings.
"""
return self.SplitCfgField(self.cfg.get('insns', 'channel'))
def GetKeysets(self):
"""Return the list of keysets to sign for this board."""
return self.SplitCfgField(self.cfg.get('insns', 'keyset'))
def OutputInsns(self, image_type, output_file, sect_insns, sect_general):
"""Generate the output instruction file for sending to the signer.
Note: The format of the instruction file pushimage outputs (and the signer
reads) is not exactly the same as the instruction file pushimage reads.
Args:
image_type: The type of image we will be signing (see --sign-types).
output_file: The file to write the new instruction file to.
sect_insns: Items to set/override in the [insns] section.
sect_general: Items to set/override in the [general] section.
"""
config = ConfigParser.ConfigParser()
config.readfp(open(self.GetInsnFile(image_type)))
# Clear channel entry in instructions file, ensuring we only get
# one channel for the signer to look at. Then provide all the
# other details for this signing request to avoid any ambiguity
# and to avoid relying on encoding data into filenames.
for sect, fields in zip(('insns', 'general'), (sect_insns, sect_general)):
if not config.has_section(sect):
config.add_section(sect)
for k, v in fields.iteritems():
config.set(sect, k, v)
output = cStringIO.StringIO()
config.write(output)
data = output.getvalue()
osutils.WriteFile(output_file, data)
cros_build_lib.Debug('generated insns file for %s:\n%s', image_type, data)
def MarkImageToBeSigned(ctx, tbs_base, insns_path, priority):
"""Mark an instructions file for signing.
This will upload a file to the GS bucket flagging an image for signing by
the signers.
Args:
ctx: A viable gs.GSContext.
tbs_base: The full path to where the tobesigned directory lives.
insns_path: The path (relative to |tbs_base|) of the file to sign.
priority: Set the signing priority (lower == higher prio).
Returns:
The full path to the remote tobesigned file.
"""
if priority < 0 or priority > 99:
raise ValueError('priority must be [0, 99] inclusive')
if insns_path.startswith(tbs_base):
insns_path = insns_path[len(tbs_base):].lstrip('/')
tbs_path = '%s/tobesigned/%02i,%s' % (tbs_base, priority,
insns_path.replace('/', ','))
with tempfile.NamedTemporaryFile(
bufsize=0, prefix='pushimage.tbs.') as temp_tbs_file:
lines = [
'PROG=%s' % __file__,
'USER=%s' % getpass.getuser(),
'HOSTNAME=%s' % cros_build_lib.GetHostName(fully_qualified=True),
'GIT_REV=%s' % git.RunGit(constants.CHROMITE_DIR,
['rev-parse', 'HEAD']).output.rstrip(),
]
osutils.WriteFile(temp_tbs_file.name, '\n'.join(lines) + '\n')
# The caller will catch gs.GSContextException for us.
ctx.Copy(temp_tbs_file.name, tbs_path)
return tbs_path
def PushImage(src_path, board, versionrev=None, profile=None, priority=50,
sign_types=None, dry_run=False, mock=False, force_keysets=()):
"""Push the image from the archive bucket to the release bucket.
Args:
src_path: Where to copy the files from; can be a local path or gs:// URL.
Should be a full path to the artifacts in either case.
board: The board we're uploading artifacts for (e.g. $BOARD).
versionrev: The full Chromium OS version string (e.g. R34-5126.0.0).
profile: The board profile in use (e.g. "asan").
priority: Set the signing priority (lower == higher prio).
sign_types: If set, a set of types which we'll restrict ourselves to
signing. See the --sign-types option for more details.
dry_run: Show what would be done, but do not upload anything.
mock: Upload to a testing bucket rather than the real one.
force_keysets: Set of keysets to use rather than what the inputs say.
Returns:
A dictionary that maps 'channel' -> ['gs://signer_instruction_uri1',
'gs://signer_instruction_uri2',
...]
"""
# Whether we hit an unknown error. If so, we'll throw an error, but only
# at the end (so that we still upload as many files as possible).
unknown_error = False
if versionrev is None:
# Extract milestone/version from the directory name.
versionrev = os.path.basename(src_path)
# We only support the latest format here. Older releases can use pushimage
# from the respective branch which deals with legacy cruft.
m = re.match(VERSION_REGEX, versionrev)
if not m:
raise ValueError('version %s does not match %s' %
(versionrev, VERSION_REGEX))
milestone = m.group(1)
version = m.group(2)
# Normalize board to always use dashes not underscores. This is mostly a
# historical artifact at this point, but we can't really break it since the
# value is used in URLs.
boardpath = board.replace('_', '-')
if profile is not None:
boardpath += '-%s' % profile.replace('_', '-')
ctx = gs.GSContext(dry_run=dry_run)
try:
input_insns = InputInsns(board)
except MissingBoardInstructions as e:
cros_build_lib.Warning('board "%s" is missing base instruction file: %s',
board, e)
cros_build_lib.Warning('not uploading anything for signing')
return
channels = input_insns.GetChannels()
# We want force_keysets as a set, and keysets as a list.
force_keysets = set(force_keysets)
keysets = list(force_keysets) if force_keysets else input_insns.GetKeysets()
if mock:
cros_build_lib.Info('Upload mode: mock; signers will not process anything')
tbs_base = gs_base = os.path.join(constants.TRASH_BUCKET, 'pushimage-tests',
getpass.getuser())
elif TEST_KEYSETS & force_keysets:
cros_build_lib.Info('Upload mode: test; signers will process test keys')
# We need the tbs_base to be in the place the signer will actually scan.
tbs_base = TEST_SIGN_BUCKET_BASE
gs_base = os.path.join(tbs_base, getpass.getuser())
else:
cros_build_lib.Info('Upload mode: normal; signers will process the images')
tbs_base = gs_base = constants.RELEASE_BUCKET
sect_general = {
'config_board': board,
'board': boardpath,
'version': version,
'versionrev': versionrev,
'milestone': milestone,
}
sect_insns = {}
if dry_run:
cros_build_lib.Info('DRY RUN MODE ACTIVE: NOTHING WILL BE UPLOADED')
cros_build_lib.Info('Signing for channels: %s', ' '.join(channels))
cros_build_lib.Info('Signing for keysets : %s', ' '.join(keysets))
instruction_urls = {}
def _ImageNameBase(image_type=None):
lmid = ('%s-' % image_type) if image_type else ''
return 'ChromeOS-%s%s-%s' % (lmid, versionrev, boardpath)
for channel in channels:
cros_build_lib.Debug('\n\n#### CHANNEL: %s ####\n', channel)
sect_insns['channel'] = channel
sub_path = '%s-channel/%s/%s' % (channel, boardpath, version)
dst_path = '%s/%s' % (gs_base, sub_path)
cros_build_lib.Info('Copying images to %s', dst_path)
recovery_base = _ImageNameBase('recovery')
factory_base = _ImageNameBase('factory')
firmware_base = _ImageNameBase('firmware')
test_base = _ImageNameBase('test')
hwqual_tarball = 'chromeos-hwqual-%s-%s.tar.bz2' % (board, versionrev)
# Upload all the files first before flagging them for signing.
files_to_copy = (
# <src> <dst>
# <signing type> <sfx>
('recovery_image.tar.xz', recovery_base, 'tar.xz',
'recovery'),
('factory_image.zip', factory_base, 'zip',
'factory'),
('firmware_from_source.tar.bz2', firmware_base, 'tar.bz2',
'firmware'),
('image.zip', _ImageNameBase(), 'zip', ''),
('chromiumos_test_image.tar.xz', test_base, 'tar.xz', ''),
('debug.tgz', 'debug-%s' % boardpath, 'tgz', ''),
(hwqual_tarball, '', '', ''),
('au-generator.zip', '', '', ''),
)
files_to_sign = []
for src, dst, sfx, image_type in files_to_copy:
if not dst:
dst = src
elif sfx:
dst += '.%s' % sfx
try:
ctx.Copy(os.path.join(src_path, src), os.path.join(dst_path, dst))
except gs.GSNoSuchKey:
cros_build_lib.Warning('Skipping %s as it does not exist', src)
continue
except gs.GSContextException:
unknown_error = True
cros_build_lib.Error('Skipping %s due to unknown GS error', src,
exc_info=True)
continue
if image_type:
dst_base = dst[:-(len(sfx) + 1)]
assert dst == '%s.%s' % (dst_base, sfx)
files_to_sign += [[image_type, dst_base, '.%s' % sfx]]
# Now go through the subset for signing.
for keyset in keysets:
cros_build_lib.Debug('\n\n#### KEYSET: %s ####\n', keyset)
sect_insns['keyset'] = keyset
for image_type, dst_name, suffix in files_to_sign:
dst_archive = '%s%s' % (dst_name, suffix)
sect_general['archive'] = dst_archive
sect_general['type'] = image_type
# See if the caller has requested we only sign certain types.
if sign_types:
if not image_type in sign_types:
cros_build_lib.Info('Skipping %s signing as it was not requested',
image_type)
continue
else:
# In the default/automatic mode, only flag files for signing if the
# archives were actually uploaded in a previous stage.
gs_artifact_path = os.path.join(dst_path, dst_archive)
try:
exists = ctx.Exists(gs_artifact_path)
except gs.GSContextException:
unknown_error = True
exists = False
cros_build_lib.Error('Unknown error while checking %s',
gs_artifact_path, exc_info=True)
if not exists:
cros_build_lib.Info('%s does not exist. Nothing to sign.',
gs_artifact_path)
continue
input_insn_path = input_insns.GetInsnFile(image_type)
if not os.path.exists(input_insn_path):
cros_build_lib.Info('%s does not exist. Nothing to sign.',
input_insn_path)
continue
# Generate the insn file for this artifact that the signer will use,
# and flag it for signing.
with tempfile.NamedTemporaryFile(
bufsize=0, prefix='pushimage.insns.') as insns_path:
input_insns.OutputInsns(image_type, insns_path.name, sect_insns,
sect_general)
gs_insns_path = '%s/%s' % (dst_path, dst_name)
if keyset != keysets[0]:
gs_insns_path += '-%s' % keyset
gs_insns_path += '.instructions'
try:
ctx.Copy(insns_path.name, gs_insns_path)
except gs.GSContextException:
unknown_error = True
cros_build_lib.Error('Unknown error while uploading insns %s',
gs_insns_path, exc_info=True)
continue
try:
MarkImageToBeSigned(ctx, tbs_base, gs_insns_path, priority)
except gs.GSContextException:
unknown_error = True
cros_build_lib.Error('Unknown error while marking for signing %s',
gs_insns_path, exc_info=True)
continue
cros_build_lib.Info('Signing %s image %s', image_type, gs_insns_path)
instruction_urls.setdefault(channel, []).append(gs_insns_path)
if unknown_error:
raise PushError('hit some unknown error(s)', instruction_urls)
return instruction_urls
def main(argv):
parser = commandline.ArgumentParser(description=__doc__)
# The type of image_dir will strip off trailing slashes (makes later
# processing simpler and the display prettier).
parser.add_argument('image_dir', default=None, type='local_or_gs_path',
help='full path of source artifacts to upload')
parser.add_argument('--board', default=None, required=True,
help='board to generate symbols for')
parser.add_argument('--profile', default=None,
help='board profile in use (e.g. "asan")')
parser.add_argument('--version', default=None,
help='version info (normally extracted from image_dir)')
parser.add_argument('-n', '--dry-run', default=False, action='store_true',
help='show what would be done, but do not upload')
parser.add_argument('-M', '--mock', default=False, action='store_true',
help='upload things to a testing bucket (dev testing)')
parser.add_argument('--test-sign-mp', default=False, action='store_true',
help='mung signing behavior to sign w/test mp keys')
parser.add_argument('--test-sign-premp', default=False, action='store_true',
help='mung signing behavior to sign w/test premp keys')
parser.add_argument('--priority', type=int, default=50,
help='set signing priority (lower == higher prio)')
parser.add_argument('--sign-types', default=None, nargs='+',
choices=('recovery', 'factory', 'firmware'),
help='only sign specified image types')
parser.add_argument('--yes', action='store_true', default=False,
help='answer yes to all prompts')
opts = parser.parse_args(argv)
opts.Freeze()
force_keysets = set()
if opts.test_sign_mp:
force_keysets.add('test-keys-mp')
if opts.test_sign_premp:
force_keysets.add('test-keys-premp')
# If we aren't using mock or test or dry run mode, then let's prompt the user
# to make sure they actually want to do this. It's rare that people want to
# run this directly and hit the release bucket.
if not (opts.mock or force_keysets or opts.dry_run) and not opts.yes:
prolog = '\n'.join(textwrap.wrap(textwrap.dedent(
'Uploading images for signing to the *release* bucket is not something '
'you generally should be doing yourself.'), 80)).strip()
if not cros_build_lib.BooleanPrompt(
prompt='Are you sure you want to sign these images',
default=False, prolog=prolog):
cros_build_lib.Die('better safe than sorry')
PushImage(opts.image_dir, opts.board, versionrev=opts.version,
profile=opts.profile, priority=opts.priority,
sign_types=opts.sign_types, dry_run=opts.dry_run, mock=opts.mock,
force_keysets=force_keysets)
| bpsinc-native/src_third_party_chromite | scripts/pushimage.py | Python | bsd-3-clause | 18,246 | 0.006577 |
# WARPnet Client<->Server Architecture
# WARPnet Parameter Definitions
#
# Author: Siddharth Gupta
import struct, time
from warpnet_common_params import *
from warpnet_client_definitions import *
from twisted.internet import reactor
import binascii
# Struct IDs
STRUCTID_CONTROL = 0x13
STRUCTID_CONTROL_ACK = 0x14
STRUCTID_COMMAND = 0x17
STRUCTID_COMMAND_ACK = 0x18
STRUCTID_OBSERVE_BER = 0x24
STRUCTID_OBSERVE_BER_REQ = 0x25
STRUCTID_OBSERVE_PER = 0x26
STRUCTID_OBSERVE_PER_REQ = 0x27
# Command IDs
COMMANDID_STARTTRIAL = 0x40
COMMANDID_STOPTRIAL = 0x41
COMMANDID_RESET_PER = 0x50
COMMANDID_ENABLE_BER_TESTING = 0x51
COMMANDID_DISABLE_BER_TESTING = 0x52
########################
## Struct Definitions ##
########################
# ControlStruct is a ClientStruct that stores some basic parameters to pass to the WARP board. The local variable can be accessed
# globally by calling ControlStruct.txPower etc. The struct must also understand the conversion from integer values to binary
# using the prepToSend function; it will be provided with the nodeID.
# typedef struct {
# char structID;
# char nodeID;
# char txPower;
# char channel;
# char modOrderHeader;
# char modOrderPayload;
# short reserved;
# int pktGen_period;
# int pktGen_length;
# } warpnetControl;
class ControlStruct(ClientStruct):
txPower = -1
channel = -1
modOrderHeader = -1
modOrderPayload = -1
reserved = 0
packetGeneratorPeriod = 0
packetGeneratorLength = 0
def __init__(self):
self.structID = STRUCTID_CONTROL
self.txPower = 63
self.channel = 4
self.modOrderHeader = 0
self.modOrderPayload = 2
self.packetGeneratorPeriod = 0
self.packetGeneratorLength = 1300
self.expectedReturnStructID = STRUCTID_CONTROL_ACK
def prepToSend(self, nodeID):
self.updateDone = False
return struct.pack('!6BHII', self.structID, nodeID, self.txPower, self.channel, self.modOrderHeader, self.modOrderPayload, self.reserved, self.packetGeneratorPeriod, self.packetGeneratorLength)
def updateFromNode(self, rawData, pcapts):
dataTuple = struct.unpack('!BBH', rawData[0:4])
#print "Control struct successfully applied at node %d" % dataTuple[1]
#CommandStruct is used to send commands or requests to the WARP nodes
# The cmdIDs are defined above
# Matching C code definition:
# typedef struct {
# char structID;
# char nodeID;
# char cmdID;
# char cmdParam;
# } warpnetCommand;
class CommandStruct(ClientStruct):
cmdID = -1
cmdParam = -1
def __init__(self, cmdID, cmdParam):
self.structID = STRUCTID_COMMAND
self.expectedReturnStructID = STRUCTID_COMMAND_ACK
self.cmdID = cmdID
self.cmdParam = cmdParam
def prepToSend(self, nodeID):
self.updateDone = False
return struct.pack('!4B', self.structID, nodeID, self.cmdID, self.cmdParam)
def updateFromNode(self, rawData, pcapts):
pass
#print "Successfully executed command %d" % self.cmdID
#ObservePERStruct collects packet error rate (PER) data from WARP nodes
# Matching C code definition:
# typedef struct {
# unsigned char structID;
# unsigned char nodeID;
# unsigned char reqNum;
# unsigned char reqType;
# unsigned int numPkts_tx;
# unsigned int numPkts_rx_good;
# unsigned int numPkts_rx_goodHdrBadPyld;
# unsigned int numPkts_rx_badHdr;
# } warpnetObservePER;
class ObservePERStruct(ClientStruct):
numPkts_tx = -1
numPkts_rx_good = -1
numPkts_rx_goodHdrBadPyld = -1
numPkts_rx_badHdr = -1
reqNum = -1
reqType = -1
def __init__(self, logger=None):
ClientStruct.__init__(self, logger)
self.structID = STRUCTID_OBSERVE_PER_REQ
self.expectedReturnStructID = STRUCTID_OBSERVE_PER
self.numPkts_tx = 0
self.numPkts_rx_good = 0
self.numPkts_rx_goodHdrBadPyld = 0
self.numPkts_rx_badHdr = 0
self.reqNum = 0
self.reqType = 0
def prepToSend(self, nodeID):
self.updateDone = False
return struct.pack('!4B', self.structID, nodeID, self.reqNum, self.reqType)
def updateFromNode(self, rawData, pcapts):
dataTuple = struct.unpack('!2B 2B 4I', rawData[0:20])
self.reqNum = dataTuple[2]
self.reqType = dataTuple[3]
self.numPkts_tx = dataTuple[4]
self.numPkts_rx_good = dataTuple[5]
self.numPkts_rx_goodHdrBadPyld = dataTuple[6]
self.numPkts_rx_badHdr = dataTuple[7]
#Client struct for collecting BER updates from the ber_processor program
# Matching C code struct:
# typedef struct {
# unsigned char structID;
# unsigned char nodeID;
# unsigned short sequenceNumber;
# unsigned char nodeID_tx;
# unsigned char nodeID_rx;
# unsigned short mac_seqNum;
# unsigned char mac_pktType;
# unsigned char reserved0;
# unsigned char reserved1;
# unsigned char reserved2;
# unsigned int bits_rx;
# unsigned int bits_errors;
# } warpnetObserveBER;
class ObserveBERStruct(ClientStruct):
totalBitsReceived = 0
totalBitErrors = 0
nodeID_tx = -1
nodeID_rx = -1
def __init__(self, logger=None):
ClientStruct.__init__(self, logger)
self.structID = STRUCTID_OBSERVE_BER_REQ
self.expectedReturnStructID = STRUCTID_OBSERVE_BER
self.totalBitsReceived = 0
self.totalBitErrors = 0
def prepToSend(self, nodeID):
self.updateDone = False
return struct.pack('!BBH', self.structID, nodeID, 0)
def updateFromNode(self, rawData, pcapts):
dataTuple = struct.unpack('!2B H 2B H 2I', rawData[0:16])
self.nodeID_tx = dataTuple[3]
self.nodeID_rx = dataTuple[4]
self.totalBitsReceived += dataTuple[6]
self.totalBitErrors += dataTuple[7]
def clearBitCounts(self):
self.totalBitsReceived = 0
self.totalBitErrors = 0
| shailcoolboy/Warp-Trinity | ResearchApps/Measurement/examples/TxPower_vs_BER/warpnet_experiment_structs.py | Python | bsd-2-clause | 5,510 | 0.023775 |
from django import forms
from django.contrib.auth import get_user_model
from django.contrib.auth.forms import ReadOnlyPasswordHashField
from django.utils.translation import ugettext_lazy as _
User = get_user_model()
class AdminUserCreationForm(forms.ModelForm):
password1 = forms.CharField(label='Password', widget=forms.PasswordInput)
password2 = forms.CharField(label='Password confirmation', widget=forms.PasswordInput)
class Meta:
model = User
fields = ['username', 'email']
def clean_password2(self):
password1 = self.cleaned_data.get('password1')
password2 = self.cleaned_data.get('password2')
if password1 and password2 and password1 != password2:
raise forms.ValidationError("Passwords don't match")
return password2
def save(self, commit=True):
user = super(AdminUserCreationForm, self).save(commit=False)
user.set_password(self.cleaned_data["password1"])
if commit:
user.save()
return user
class AdminUserChangeForm(forms.ModelForm):
password = ReadOnlyPasswordHashField(label= ("Password"),
help_text= ("<a href=\"../password/\">Change password</a>"))
class Meta:
model = User
fields = ['username', 'email', 'password', 'is_banned', 'is_admin']
def clean_password(self):
return self.initial['password']
class AdminPasswordChangeForm(forms.Form):
error_messages = {
'password_mismatch': _("The two password fields didn't match."),
}
password1 = forms.CharField(label=_("Password"),
widget=forms.PasswordInput)
password2 = forms.CharField(label=_("Password (again)"),
widget=forms.PasswordInput)
def __init__(self, user, *args, **kwargs):
self.user = user
super(AdminPasswordChangeForm, self).__init__(*args, **kwargs)
def clean_password2(self):
password1 = self.cleaned_data.get('password1')
password2 = self.cleaned_data.get('password2')
if password1 and password2:
if password1 != password2:
raise forms.ValidationError(
self.error_messages['password_mismatch'])
return password2
def save(self, commit=True):
self.user.set_password(self.cleaned_data["password1"])
if commit:
self.user.save()
return self.user | kasper190/SPAforum | accounts/forms.py | Python | mit | 2,436 | 0.002053 |
#!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_system_email_server
short_description: Configure the email server used by the FortiGate various things. For example, for sending email messages to users to support user
authentication features in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS device by allowing the
user to set and modify system feature and email_server category.
Examples include all parameters and values need to be adjusted to datasources before usage.
Tested with FOS v6.0.5
version_added: "2.9"
author:
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Requires fortiosapi library developed by Fortinet
- Run as a local_action in your playbook
requirements:
- fortiosapi>=0.9.8
options:
host:
description:
- FortiOS or FortiGate IP address.
type: str
required: false
username:
description:
- FortiOS or FortiGate username.
type: str
required: false
password:
description:
- FortiOS or FortiGate password.
type: str
default: ""
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
type: str
default: root
https:
description:
- Indicates if the requests towards FortiGate must use HTTPS protocol.
type: bool
default: true
ssl_verify:
description:
- Ensures FortiGate certificate must be verified by a proper CA.
type: bool
default: true
system_email_server:
description:
- Configure the email server used by the FortiGate various things. For example, for sending email messages to users to support user authentication
features.
default: null
type: dict
suboptions:
authenticate:
description:
- Enable/disable authentication.
type: str
choices:
- enable
- disable
password:
description:
- SMTP server user password for authentication.
type: str
port:
description:
- SMTP server port.
type: int
reply_to:
description:
- Reply-To email address.
type: str
security:
description:
- Connection security used by the email server.
type: str
choices:
- none
- starttls
- smtps
server:
description:
- SMTP server IP address or hostname.
type: str
source_ip:
description:
- SMTP server IPv4 source IP.
type: str
source_ip6:
description:
- SMTP server IPv6 source IP.
type: str
ssl_min_proto_version:
description:
- Minimum supported protocol version for SSL/TLS connections (default is to follow system global setting).
type: str
choices:
- default
- SSLv3
- TLSv1
- TLSv1-1
- TLSv1-2
type:
description:
- Use FortiGuard Message service or custom email server.
type: str
choices:
- custom
username:
description:
- SMTP server user name for authentication.
type: str
validate_server:
description:
- Enable/disable validation of server certificate.
type: str
choices:
- enable
- disable
'''
EXAMPLES = '''
- hosts: localhost
vars:
host: "192.168.122.40"
username: "admin"
password: ""
vdom: "root"
ssl_verify: "False"
tasks:
- name: Configure the email server used by the FortiGate various things. For example, for sending email messages to users to support user authentication
features.
fortios_system_email_server:
host: "{{ host }}"
username: "{{ username }}"
password: "{{ password }}"
vdom: "{{ vdom }}"
https: "False"
system_email_server:
authenticate: "enable"
password: "<your_own_value>"
port: "5"
reply_to: "<your_own_value>"
security: "none"
server: "192.168.100.40"
source_ip: "84.230.14.43"
source_ip6: "<your_own_value>"
ssl_min_proto_version: "default"
type: "custom"
username: "<your_own_value>"
validate_server: "enable"
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "id"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
from ansible.module_utils.network.fortimanager.common import FAIL_SOCKET_MSG
def login(data, fos):
host = data['host']
username = data['username']
password = data['password']
ssl_verify = data['ssl_verify']
fos.debug('on')
if 'https' in data and not data['https']:
fos.https('off')
else:
fos.https('on')
fos.login(host, username, password, verify=ssl_verify)
def filter_system_email_server_data(json):
option_list = ['authenticate', 'password', 'port',
'reply_to', 'security', 'server',
'source_ip', 'source_ip6', 'ssl_min_proto_version',
'type', 'username', 'validate_server']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def underscore_to_hyphen(data):
if isinstance(data, list):
for elem in data:
elem = underscore_to_hyphen(elem)
elif isinstance(data, dict):
new_data = {}
for k, v in data.items():
new_data[k.replace('_', '-')] = underscore_to_hyphen(v)
data = new_data
return data
def system_email_server(data, fos):
vdom = data['vdom']
system_email_server_data = data['system_email_server']
filtered_data = underscore_to_hyphen(filter_system_email_server_data(system_email_server_data))
return fos.set('system',
'email-server',
data=filtered_data,
vdom=vdom)
def is_successful_status(status):
return status['status'] == "success" or \
status['http_method'] == "DELETE" and status['http_status'] == 404
def fortios_system(data, fos):
if data['system_email_server']:
resp = system_email_server(data, fos)
return not is_successful_status(resp), \
resp['status'] == "success", \
resp
def main():
fields = {
"host": {"required": False, "type": "str"},
"username": {"required": False, "type": "str"},
"password": {"required": False, "type": "str", "no_log": True},
"vdom": {"required": False, "type": "str", "default": "root"},
"https": {"required": False, "type": "bool", "default": True},
"ssl_verify": {"required": False, "type": "bool", "default": True},
"system_email_server": {
"required": False, "type": "dict", "default": None,
"options": {
"authenticate": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"password": {"required": False, "type": "str"},
"port": {"required": False, "type": "int"},
"reply_to": {"required": False, "type": "str"},
"security": {"required": False, "type": "str",
"choices": ["none", "starttls", "smtps"]},
"server": {"required": False, "type": "str"},
"source_ip": {"required": False, "type": "str"},
"source_ip6": {"required": False, "type": "str"},
"ssl_min_proto_version": {"required": False, "type": "str",
"choices": ["default", "SSLv3", "TLSv1",
"TLSv1-1", "TLSv1-2"]},
"type": {"required": False, "type": "str",
"choices": ["custom"]},
"username": {"required": False, "type": "str"},
"validate_server": {"required": False, "type": "str",
"choices": ["enable", "disable"]}
}
}
}
module = AnsibleModule(argument_spec=fields,
supports_check_mode=False)
legacy_mode = 'host' in module.params and module.params['host'] is not None and \
'username' in module.params and module.params['username'] is not None and \
'password' in module.params and module.params['password'] is not None
if not legacy_mode:
if module._socket_path:
connection = Connection(module._socket_path)
fos = FortiOSHandler(connection)
is_error, has_changed, result = fortios_system(module.params, fos)
else:
module.fail_json(**FAIL_SOCKET_MSG)
else:
try:
from fortiosapi import FortiOSAPI
except ImportError:
module.fail_json(msg="fortiosapi module is required")
fos = FortiOSAPI()
login(module.params, fos)
is_error, has_changed, result = fortios_system(module.params, fos)
fos.logout()
if not is_error:
module.exit_json(changed=has_changed, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
| amenonsen/ansible | lib/ansible/modules/network/fortios/fortios_system_email_server.py | Python | gpl-3.0 | 12,462 | 0.001284 |
import json
from functools import reduce
from typing import Optional
from asyncpg import Connection
from qllr.common import DATETIME_FORMAT, clean_name, convert_timestamp_to_tuple
from qllr.db import cache
from qllr.exceptions import MatchNotFound, PlayerNotFound
from qllr.settings import MOVING_AVG_COUNT
async def get_player_info_mod_date(
con: Connection, steam_id: int, gametype_id: Optional[int] = None
):
query = """
SELECT MAX(last_played_timestamp)
FROM gametype_ratings
WHERE steam_id = $1
"""
params = [steam_id]
if gametype_id is not None:
query += " AND gametype_id = $2"
params.append(gametype_id)
return convert_timestamp_to_tuple(await con.fetchval(query, *params))
async def get_player_info(con: Connection, steam_id: int):
await con.set_type_codec(
"json", encoder=json.dumps, decoder=json.loads, schema="pg_catalog"
)
def choose_rating_values(item: dict):
if cache.USE_AVG_PERF[item["gametype_short"]]:
item["rating"] = item["r2_value"]
item["rating_d"] = 0
else:
item["rating"] = item["r1_mean"]
item["rating_d"] = item["r1_deviation"]
del item["r1_mean"]
del item["r1_deviation"]
del item["r2_value"]
return item
# player name, rating and games played
query = """
SELECT json_build_object(
'name', p.name,
'ratings', COALESCE(t.ratings, '{ }')
)
FROM players p
LEFT JOIN (
SELECT gr.steam_id, array_agg( json_build_object(
'r1_mean', CAST( ROUND( CAST(gr.r1_mean AS NUMERIC), 2) AS REAL ),
'r1_deviation', CAST( ROUND( CAST(gr.r1_deviation AS NUMERIC), 2) AS REAL ),
'r2_value', CAST( ROUND( CAST(gr.r2_value AS NUMERIC), 2) AS REAL ),
'n', gr.n,
'gametype_short', g.gametype_short,
'gametype', g.gametype_name
) ORDER by gr.n DESC ) AS ratings
FROM gametype_ratings gr
LEFT JOIN gametypes g ON g.gametype_id = gr.gametype_id
WHERE gr.steam_id = $1
GROUP BY gr.steam_id
) t ON p.steam_id = t.steam_id
WHERE p.steam_id = $1
"""
result = await con.fetchval(query, steam_id)
if result is None:
raise PlayerNotFound(steam_id)
result["ratings"] = list(map(choose_rating_values, result["ratings"]))
# weapon stats (frags + acc)
query = """
SELECT array_agg(json_build_object(
'name', w.weapon_name,
'short', w.weapon_short,
'frags', t2.frags,
'acc', t.accuracy
) ORDER BY t.weapon_id ASC)
FROM (
SELECT
weapon_id,
CASE
WHEN SUM(shots) = 0 THEN 0
ELSE CAST(100. * SUM(hits) / SUM(shots) AS INT)
END AS accuracy
FROM (
SELECT weapon_id, frags, hits, shots
FROM scoreboards_weapons sw
LEFT JOIN ( -- TODO: need to change from LEFT JOIN to WHERE match_id IN
SELECT m.match_id
FROM matches m
LEFT JOIN scoreboards s ON s.match_id = m.match_id
WHERE steam_id = $2
ORDER BY timestamp DESC LIMIT $1
) m ON m.match_id = sw.match_id
WHERE sw.steam_id = $2
) sw
GROUP BY weapon_id
) t
LEFT JOIN weapons w ON t.weapon_id = w.weapon_id
LEFT JOIN (
SELECT
weapon_id,
SUM(frags) AS frags
FROM scoreboards_weapons sw
WHERE steam_id = $2
GROUP BY weapon_id
) t2 ON t2.weapon_id = t.weapon_id
"""
# TODO: cover case, where weapon_status is empty array
result["weapon_stats"] = await con.fetchval(query, MOVING_AVG_COUNT, steam_id) or []
# fav map
query = """
SELECT map_name
FROM (
SELECT map_id, COUNT(*) AS n
FROM matches m
WHERE match_id IN (SELECT match_id FROM scoreboards WHERE steam_id = $1)
GROUP BY map_id
) t
LEFT JOIN maps ON maps.map_id = t.map_id
ORDER BY n DESC, maps.map_id ASC
LIMIT 1
"""
row = await con.fetchval(query, steam_id)
fav_map = "None"
if row is not None:
fav_map = row
fav_gt = "None"
if len(result["ratings"]) > 0:
fav_gt = result["ratings"][0]["gametype"]
result["fav"] = {
"map": fav_map,
"gt": fav_gt,
"wpn": reduce(
lambda sum, x: sum if sum["frags"] > x["frags"] else x,
result["weapon_stats"],
{"frags": 0, "name": "None"},
)["name"],
}
# 10 last matches
query = """
SELECT
array_agg(json_build_object(
'match_id', m.match_id,
'datetime', to_char(to_timestamp(timestamp), '{DATETIME_FORMAT}'),
'timestamp', timestamp,
'gametype', g.gametype_short,
'result', CASE
WHEN m.team1_score > m.team2_score AND m.team = 1 THEN 'Win'
WHEN m.team1_score < m.team2_score AND m.team = 2 THEN 'Win'
ELSE 'Loss'
END,
'team1_score', m.team1_score,
'team2_score', m.team2_score,
'map', mm.map_name
) ORDER BY timestamp DESC) AS matches
FROM(
SELECT s.steam_id, s.team, m.*
FROM scoreboards s
LEFT JOIN matches m ON s.match_id = m.match_id
WHERE s.steam_id = $1
ORDER BY timestamp DESC
LIMIT 10
) m
LEFT JOIN gametypes g ON g.gametype_id = m.gametype_id
LEFT JOIN maps mm ON mm.map_id = m.map_id
""".format(
DATETIME_FORMAT=DATETIME_FORMAT
)
result["matches"] = await con.fetchval(query, steam_id)
return {"response": result, "title": clean_name(result["name"])}
async def get_best_match_of_player(
con: Connection, steam_id: int, gametype_id: int
) -> str:
query = """
SELECT s.match_id::text
FROM scoreboards s
WHERE match_id IN (
SELECT match_id
FROM matches
WHERE gametype_id = $1
) AND
match_perf IS NOT NULL AND
alive_time >= 1200 AND
steam_id = $2
ORDER BY match_perf DESC
LIMIT 1
"""
result = await con.fetchval(query, gametype_id, steam_id)
if result is None:
raise MatchNotFound("could not detect player's best match")
return result
| em92/quakelive-local-ratings | qllr/blueprints/player/methods.py | Python | agpl-3.0 | 6,371 | 0.000942 |
# -*- coding: utf-8 -*-
"""
logbook.notifiers
~~~~~~~~~~~~~~~~~
System notify handlers for OSX and Linux.
:copyright: (c) 2010 by Armin Ronacher, Christopher Grebs.
:license: BSD, see LICENSE for more details.
"""
import os
import sys
import base64
from time import time
from logbook.base import NOTSET, ERROR, WARNING
from logbook.handlers import Handler, LimitingHandlerMixin
from logbook.helpers import get_application_name, PY2, http_client, u
if PY2:
from urllib import urlencode
else:
from urllib.parse import urlencode
def create_notification_handler(application_name=None, level=NOTSET,
icon=None):
"""Creates a handler perfectly fit the current platform. On Linux
systems this creates a :class:`LibNotifyHandler`, on OS X systems it
will create a :class:`GrowlHandler`.
"""
if sys.platform == 'darwin':
return GrowlHandler(application_name, level=level, icon=icon)
return LibNotifyHandler(application_name, level=level, icon=icon)
class NotificationBaseHandler(Handler, LimitingHandlerMixin):
"""Baseclass for notification handlers."""
def __init__(self, application_name=None, record_limit=None,
record_delta=None, level=NOTSET, filter=None, bubble=False):
Handler.__init__(self, level, filter, bubble)
LimitingHandlerMixin.__init__(self, record_limit, record_delta)
if application_name is None:
application_name = get_application_name()
self.application_name = application_name
def make_title(self, record):
"""Called to get the title from the record."""
return u('%s: %s') % (record.channel, record.level_name.title())
def make_text(self, record):
"""Called to get the text of the record."""
return record.message
class GrowlHandler(NotificationBaseHandler):
"""A handler that dispatches to Growl. Requires that either growl-py or
py-Growl are installed.
"""
def __init__(self, application_name=None, icon=None, host=None,
password=None, record_limit=None, record_delta=None,
level=NOTSET, filter=None, bubble=False):
NotificationBaseHandler.__init__(self, application_name, record_limit,
record_delta, level, filter, bubble)
# growl is using the deprecated md5 module, but we really don't need
# to see that deprecation warning
from warnings import filterwarnings
filterwarnings(module='Growl', category=DeprecationWarning,
action='ignore')
try:
import Growl
self._growl = Growl
except ImportError:
raise RuntimeError('The growl module is not available. You have '
'to install either growl-py or py-Growl to '
'use the GrowlHandler.')
if icon is not None:
if not os.path.isfile(icon):
raise IOError('Filename to an icon expected.')
icon = self._growl.Image.imageFromPath(icon)
else:
try:
icon = self._growl.Image.imageWithIconForCurrentApplication()
except TypeError:
icon = None
self._notifier = self._growl.GrowlNotifier(
applicationName=self.application_name,
applicationIcon=icon,
notifications=['Notset', 'Debug', 'Info', 'Notice', 'Warning',
'Error', 'Critical'],
hostname=host,
password=password
)
self._notifier.register()
def is_sticky(self, record):
"""Returns `True` if the sticky flag should be set for this record.
The default implementation marks errors and criticals sticky.
"""
return record.level >= ERROR
def get_priority(self, record):
"""Returns the priority flag for Growl. Errors and criticals are
get highest priority (2), warnings get higher priority (1) and the
rest gets 0. Growl allows values between -2 and 2.
"""
if record.level >= ERROR:
return 2
elif record.level == WARNING:
return 1
return 0
def emit(self, record):
if not self.check_delivery(record)[1]:
return
self._notifier.notify(record.level_name.title(),
self.make_title(record),
self.make_text(record),
sticky=self.is_sticky(record),
priority=self.get_priority(record))
class LibNotifyHandler(NotificationBaseHandler):
"""A handler that dispatches to libnotify. Requires pynotify installed.
If `no_init` is set to `True` the initialization of libnotify is skipped.
"""
def __init__(self, application_name=None, icon=None, no_init=False,
record_limit=None, record_delta=None, level=NOTSET,
filter=None, bubble=False):
NotificationBaseHandler.__init__(self, application_name, record_limit,
record_delta, level, filter, bubble)
try:
import pynotify
self._pynotify = pynotify
except ImportError:
raise RuntimeError('The pynotify library is required for '
'the LibNotifyHandler.')
self.icon = icon
if not no_init:
pynotify.init(self.application_name)
def set_notifier_icon(self, notifier, icon):
"""Used to attach an icon on a notifier object."""
try:
from gtk import gdk
except ImportError:
# TODO: raise a warning?
raise RuntimeError('The gtk.gdk module is required to set an icon.')
if icon is not None:
if not isinstance(icon, gdk.Pixbuf):
icon = gdk.pixbuf_new_from_file(icon)
notifier.set_icon_from_pixbuf(icon)
def get_expires(self, record):
"""Returns either EXPIRES_DEFAULT or EXPIRES_NEVER for this record.
The default implementation marks errors and criticals as EXPIRES_NEVER.
"""
pn = self._pynotify
return pn.EXPIRES_NEVER if record.level >= ERROR else pn.EXPIRES_DEFAULT
def get_urgency(self, record):
"""Returns the urgency flag for pynotify. Errors and criticals are
get highest urgency (CRITICAL), warnings get higher priority (NORMAL)
and the rest gets LOW.
"""
pn = self._pynotify
if record.level >= ERROR:
return pn.URGENCY_CRITICAL
elif record.level == WARNING:
return pn.URGENCY_NORMAL
return pn.URGENCY_LOW
def emit(self, record):
if not self.check_delivery(record)[1]:
return
notifier = self._pynotify.Notification(self.make_title(record),
self.make_text(record))
notifier.set_urgency(self.get_urgency(record))
notifier.set_timeout(self.get_expires(record))
self.set_notifier_icon(notifier, self.icon)
notifier.show()
class BoxcarHandler(NotificationBaseHandler):
"""Sends notifications to boxcar.io. Can be forwarded to your iPhone or
other compatible device.
"""
api_url = 'https://boxcar.io/notifications/'
def __init__(self, email, password, record_limit=None, record_delta=None,
level=NOTSET, filter=None, bubble=False):
NotificationBaseHandler.__init__(self, None, record_limit,
record_delta, level, filter, bubble)
self.email = email
self.password = password
def get_screen_name(self, record):
"""Returns the value of the screen name field."""
return record.level_name.title()
def emit(self, record):
if not self.check_delivery(record)[1]:
return
body = urlencode({
'notification[from_screen_name]':
self.get_screen_name(record).encode('utf-8'),
'notification[message]':
self.make_text(record).encode('utf-8'),
'notification[from_remote_service_id]': str(int(time() * 100))
})
con = http_client.HTTPSConnection('boxcar.io')
con.request('POST', '/notifications/', headers={
'Authorization': 'Basic ' +
base64.b64encode((u('%s:%s') % (self.email, self.password))
.encode('utf-8')).strip(),
}, body=body)
con.close()
class NotifoHandler(NotificationBaseHandler):
"""Sends notifications to notifo.com. Can be forwarded to your Desktop,
iPhone, or other compatible device.
"""
def __init__(self, application_name=None, username=None, secret=None,
record_limit=None, record_delta=None, level=NOTSET,
filter=None, bubble=False, hide_level=False):
try:
import notifo
except ImportError:
raise RuntimeError(
'The notifo module is not available. You have '
'to install notifo to use the NotifoHandler.'
)
NotificationBaseHandler.__init__(self, None, record_limit,
record_delta, level, filter, bubble)
self._notifo = notifo
self.application_name = application_name
self.username = username
self.secret = secret
self.hide_level = hide_level
def emit(self, record):
if self.hide_level:
_level_name = None
else:
_level_name = self.level_name
self._notifo.send_notification(self.username, self.secret, None,
record.message, self.application_name,
_level_name, None)
class PushoverHandler(NotificationBaseHandler):
"""Sends notifications to pushover.net. Can be forwarded to your Desktop,
iPhone, or other compatible device. If `priority` is not one of -2, -1, 0,
or 1, it is set to 0 automatically.
"""
def __init__(self, application_name=None, apikey=None, userkey=None,
device=None, priority=0, sound=None, record_limit=None,
record_delta=None, level=NOTSET, filter=None, bubble=False):
super(PushoverHandler, self).__init__(None, record_limit, record_delta,
level, filter, bubble)
self.application_name = application_name
self.apikey = apikey
self.userkey = userkey
self.device = device
self.priority = priority
self.sound = sound
if self.application_name is None:
self.title = None
elif len(self.application_name) > 100:
self.title = "%s..." % (self.application_name[:-3],)
else:
self.title = self.application_name
if self.priority not in [-2, -1, 0, 1]:
self.priority = 0
def emit(self, record):
if len(record.message) > 512:
message = "%s..." % (record.message[:-3],)
else:
message = record.message
body_dict = {
'token': self.apikey,
'user': self.userkey,
'message': message,
'priority': self.priority
}
if self.title is not None:
body_dict['title'] = self.title
if self.device is not None:
body_dict['device'] = self.device
if self.sound is not None:
body_dict['sound'] = self.sound
body = urlencode(body_dict)
con = http_client.HTTPSConnection('api.pushover.net')
con.request('POST', '/1/messages.json', body=body)
con.close()
| pombredanne/logbook | logbook/notifiers.py | Python | bsd-3-clause | 11,853 | 0.000169 |
#!/usr/bin/env python
# encoding: utf-8
"""
release_push.py
Created by Jonathan Burke on 2013-12-30.
Copyright (c) 2015 University of Washington. All rights reserved.
"""
#See README-maintainers.html for more information
from release_vars import *
from release_utils import *
from sanity_checks import *
import urllib
import zipfile
#ensure that the latest built version is
def check_release_version( previous_release, new_release ):
if version_to_integer( previous_release ) >= version_to_integer( new_release ):
raise Exception( "Previous release version ( " + previous_release + " ) should be less than " +
"the new release version( " + new_release + " )" )
def copy_release_dir( path_to_dev, path_to_live, release_version ):
source_location = os.path.join( path_to_dev, release_version )
dest_location = os.path.join( path_to_live, release_version )
if os.path.exists( dest_location ):
prompt_to_delete( dest_location )
if os.path.exists( dest_location ):
raise Exception( "Destination location exists: " + dest_location )
cmd = "cp -r %s %s" % ( source_location, dest_location )
execute( cmd )
return dest_location
def copy_htaccess():
execute("cp %s %s" % (DEV_HTACCESS, LIVE_HTACCESS))
ensure_group_access(LIVE_HTACCESS)
def copy_releases_to_live_site( checker_version, afu_version):
copy_release_dir( JSR308_INTERM_RELEASES_DIR, JSR308_LIVE_RELEASES_DIR, checker_version )
copy_release_dir( CHECKER_INTERM_RELEASES_DIR, CHECKER_LIVE_RELEASES_DIR, checker_version )
copy_release_dir( AFU_INTERM_RELEASES_DIR, AFU_LIVE_RELEASES_DIR, afu_version )
def update_release_symlinks( checker_version, afu_version ):
afu_latest_release_dir = os.path.join( AFU_LIVE_RELEASES_DIR, afu_version )
checker_latest_release_dir = os.path.join( CHECKER_LIVE_RELEASES_DIR, checker_version )
force_symlink( os.path.join( JSR308_LIVE_RELEASES_DIR, checker_version ), os.path.join( JSR308_LIVE_SITE, "current" ) )
force_symlink( checker_latest_release_dir, os.path.join( CHECKER_LIVE_SITE, "current" ) )
force_symlink( afu_latest_release_dir, os.path.join( AFU_LIVE_SITE, "current" ) )
#After the copy operations the index.htmls will point into the dev directory
force_symlink( os.path.join( afu_latest_release_dir, "annotation-file-utilities.html" ), os.path.join( afu_latest_release_dir, "index.html" ) )
force_symlink( os.path.join( checker_latest_release_dir, "checker-framework-webpage.html" ), os.path.join( checker_latest_release_dir, "index.html" ) )
def ensure_group_access_to_releases():
ensure_group_access( JSR308_LIVE_RELEASES_DIR )
ensure_group_access( AFU_LIVE_RELEASES_DIR )
ensure_group_access( CHECKER_LIVE_RELEASES_DIR )
def push_maven_artifacts_to_release_repo( version ):
mvn_deploy_mvn_plugin( MAVEN_PLUGIN_DIR, MAVEN_PLUGIN_POM, version, MAVEN_LIVE_REPO )
# Deploy jsr308 and checker-qual jars to maven repo
mvn_deploy( CHECKER_BINARY, CHECKER_BINARY_POM, MAVEN_LIVE_REPO )
mvn_deploy( CHECKER_QUAL, CHECKER_QUAL_POM, MAVEN_LIVE_REPO )
mvn_deploy( JAVAC_BINARY, JAVAC_BINARY_POM, MAVEN_LIVE_REPO )
mvn_deploy( JDK7_BINARY, JDK7_BINARY_POM, MAVEN_LIVE_REPO )
mvn_deploy( JDK8_BINARY, JDK8_BINARY_POM, MAVEN_LIVE_REPO )
def stage_maven_artifacts_in_maven_central( new_checker_version ):
pgp_user="checker-framework-dev@googlegroups.com"
pgp_passphrase = read_first_line( PGP_PASSPHRASE_FILE )
mvn_dist = os.path.join(MAVEN_PLUGIN_DIR, "dist" )
execute( "mkdir -p " + mvn_dist )
#build Jar files with only readmes for artifacts that don't have sources/javadocs
ant_cmd = "ant -f release.xml -Ddest.dir=%s -Dmaven.plugin.dir=%s jar-maven-extras" % (mvn_dist, MAVEN_PLUGIN_DIR)
execute(ant_cmd, True, False, CHECKER_FRAMEWORK_RELEASE)
#At the moment, checker.jar is the only artifact with legitimate accompanying source/javadoc jars
mvn_sign_and_deploy_all( SONATYPE_OSS_URL, SONATYPE_STAGING_REPO_ID, CHECKER_BINARY_RELEASE_POM, CHECKER_BINARY,
CHECKER_SOURCE, CHECKER_JAVADOC,
pgp_user, pgp_passphrase )
mvn_sign_and_deploy_all( SONATYPE_OSS_URL, SONATYPE_STAGING_REPO_ID, CHECKER_QUAL_RELEASE_POM, CHECKER_QUAL,
os.path.join(MAVEN_RELEASE_DIR, mvn_dist, "checker-qual-source.jar" ),
os.path.join(MAVEN_RELEASE_DIR, mvn_dist, "checker-qual-javadoc.jar" ),
pgp_user, pgp_passphrase )
mvn_sign_and_deploy_all( SONATYPE_OSS_URL, SONATYPE_STAGING_REPO_ID, CHECKER_COMPAT_QUAL_RELEASE_POM,
CHECKER_COMPAT_QUAL,
os.path.join(MAVEN_RELEASE_DIR, mvn_dist, "checker-compat-qual-source.jar" ),
os.path.join(MAVEN_RELEASE_DIR, mvn_dist, "checker-compat-qual-javadoc.jar" ),
pgp_user, pgp_passphrase )
mvn_sign_and_deploy_all( SONATYPE_OSS_URL, SONATYPE_STAGING_REPO_ID, JAVAC_BINARY_RELEASE_POM, JAVAC_BINARY,
os.path.join(MAVEN_RELEASE_DIR, mvn_dist, "compiler-source.jar" ),
os.path.join(MAVEN_RELEASE_DIR, mvn_dist, "compiler-javadoc.jar" ),
pgp_user, pgp_passphrase )
mvn_sign_and_deploy_all( SONATYPE_OSS_URL, SONATYPE_STAGING_REPO_ID, JDK7_BINARY_RELEASE_POM, JDK7_BINARY,
os.path.join(MAVEN_RELEASE_DIR, mvn_dist, "jdk7-source.jar" ),
os.path.join(MAVEN_RELEASE_DIR, mvn_dist, "jdk7-javadoc.jar" ),
pgp_user, pgp_passphrase )
mvn_sign_and_deploy_all( SONATYPE_OSS_URL, SONATYPE_STAGING_REPO_ID, JDK8_BINARY_RELEASE_POM, JDK8_BINARY,
os.path.join(MAVEN_RELEASE_DIR, mvn_dist, "jdk8-source.jar" ),
os.path.join(MAVEN_RELEASE_DIR, mvn_dist, "jdk8-javadoc.jar" ),
pgp_user, pgp_passphrase )
mvn_sign_and_deploy_all( SONATYPE_OSS_URL, SONATYPE_STAGING_REPO_ID, JAVACUTIL_BINARY_RELEASE_POM, JAVACUTIL_BINARY,
JAVACUTIL_SOURCE_JAR, JAVACUTIL_JAVADOC_JAR,
pgp_user, pgp_passphrase )
mvn_sign_and_deploy_all( SONATYPE_OSS_URL, SONATYPE_STAGING_REPO_ID, DATAFLOW_BINARY_RELEASE_POM, DATAFLOW_BINARY,
DATAFLOW_SOURCE_JAR, DATAFLOW_JAVADOC_JAR,
pgp_user, pgp_passphrase )
plugin_jar = find_mvn_plugin_jar( MAVEN_PLUGIN_DIR, new_checker_version )
plugin_source_jar = find_mvn_plugin_jar( MAVEN_PLUGIN_DIR, new_checker_version, "sources" )
plugin_javadoc_jar = os.path.join( MAVEN_RELEASE_DIR, mvn_dist, "checkerframework-maven-plugin-javadoc.jar" )
mvn_sign_and_deploy_all( SONATYPE_OSS_URL, SONATYPE_STAGING_REPO_ID, MAVEN_PLUGIN_RELEASE_POM, plugin_jar,
plugin_source_jar, plugin_javadoc_jar, pgp_user, pgp_passphrase )
delete_path( mvn_dist )
def run_link_checker( site, output ):
check_links_script = os.path.join(CHECKER_FRAMEWORK_RELEASE, "checkLinks.sh")
cmd = ["sh", check_links_script, site]
out_file = open( output, 'w+' )
print("Executing: " + " ".join(cmd) )
process = subprocess.Popen(cmd, stdout=out_file, stderr=out_file)
process.communicate()
process.wait()
out_file.close()
if process.returncode != 0:
raise Exception('Non-zero return code( %s ) while executing %s' % (process.returncode, cmd))
return output
def check_all_links( jsr308_website, afu_website, checker_website, suffix ):
jsr308Check = run_link_checker( jsr308_website, "/tmp/jsr308." + suffix + ".check" )
afuCheck = run_link_checker( afu_website, "/tmp/afu." + suffix + ".check" )
checkerCheck = run_link_checker( checker_website, "/tmp/checker-framework." + suffix + ".check" )
print( "Link checker results can be found at:\n" +
"\t" + jsr308Check + "\n" +
"\t" + afuCheck + "\n" +
"\t" + checkerCheck + "\n" )
continue_script = prompt_w_suggestion("Delete " + suffix + " site link checker results?", "yes", "^(Yes|yes|No|no)$")
if is_yes(continue_script):
delete( jsr308Check )
delete( afuCheck )
delete( checkerCheck )
def push_interm_to_release_repos():
hg_push_or_fail( INTERM_JSR308_REPO )
hg_push_or_fail( INTERM_ANNO_REPO )
hg_push_or_fail( INTERM_CHECKER_REPO )
def continue_or_exit( msg ):
continue_script = prompt_w_suggestion(msg + " Continue?", "yes", "^(Yes|yes|No|no)$")
if continue_script == "no" or continue_script == "No":
raise Exception( "User elected NOT to continue at prompt: " + msg )
def read_args(argv):
test = True
if len( argv ) == 2:
if argv[1] == "release":
test = False
else:
print_usage()
else:
if len( argv ) > 2:
print_usage()
raise Exception( "Invalid arguments. " + ",".join(argv) )
return test
def print_usage():
print ( "Usage: python release_build.py [release]\n" +
"The only argument this script takes is \"release\". If this argument is " +
"NOT specified then the script will execute all steps that checking and prompting " +
"steps but will NOT actually perform a release. This is for testing the script." )
def main(argv):
# MANUAL Indicates a manual step
# SEMIAUTO Indicates a mostly automated step with possible prompts. Most of these steps become fully-automated when --auto is used.
# AUTO Indicates the step is fully-automated.
# Note that many prompts will cause scripts to exit if you say 'no'. This will require you to re-run
# the script from the beginning, which may take a long time. It is better to say 'yes' to the script
# prompts and follow the indicated steps even if they are redundant/you have done them already. Also,
# be sure to carefully read all instructions on the command-line before typing yes. This is because
# the scripts do not ask you to say 'yes' after each step, so you may miss a step if you only read
# the paragraph asking you to say 'yes'.
set_umask()
test_mode = read_args( argv )
msg = ( "You have chosen test_mode. \nThis means that this script will execute all build steps that " +
"do not have side-effects. That is, this is a test run of the script. All checks and user prompts " +
"will be shown but no steps will be executed that will cause the release to be deployed or partially " +
"deployed.\n" +
"If you meant to do an actual release, re-run this script with one argument, \"release\"." )
if not test_mode:
msg = "You have chosen release_mode. Please follow the prompts to run a full Checker Framework release"
continue_or_exit( msg + "\n" )
if test_mode:
print("Continuing in test mode.")
else:
print("Continuing in release mode.")
check_hg_user()
print( "\nNOTE: Please read all the prompts printed by this script very carefully, as their" )
print( "contents may have changed since the last time you ran it." )
print_step( "Push Step 0: Verify Requirements\n" ) # MANUAL
print( " If this is your first time running the release_push script, please verify that you have met " +
"all the requirements specified in README-maintainers.html \"Pre-release Checklist\"\n" )
continue_or_exit("")
# The release script checks that the new release version is greater than the previous release version.
print_step( "Push Step 1: Checking release versions" ) # SEMIAUTO
dev_jsr308_website = os.path.join( HTTP_PATH_TO_DEV_SITE, "jsr308" )
live_jsr308_website = os.path.join( HTTP_PATH_TO_LIVE_SITE, "jsr308" )
dev_afu_website = os.path.join( HTTP_PATH_TO_DEV_SITE, "annotation-file-utilities" )
live_afu_website = os.path.join( HTTP_PATH_TO_LIVE_SITE, "annotation-file-utilities" )
dev_checker_website = os.path.join( HTTP_PATH_TO_DEV_SITE, "checker-framework" )
live_checker_website = os.path.join( HTTP_PATH_TO_LIVE_SITE, "checker-framework" )
current_checker_version = current_distribution_by_website( live_checker_website )
new_checker_version = current_distribution( CHECKER_FRAMEWORK )
check_release_version( current_checker_version, new_checker_version )
#note, get_afu_version_from_html uses the file path not the web url
dev_afu_website_file = os.path.join( FILE_PATH_TO_DEV_SITE, "annotation-file-utilities", "index.html" )
live_afu_website_file = os.path.join( FILE_PATH_TO_LIVE_SITE, "annotation-file-utilities", "index.html" )
current_afu_version = get_afu_version_from_html( live_afu_website_file )
new_afu_version = get_afu_version_from_html( dev_afu_website_file )
check_release_version( current_afu_version, new_afu_version )
print("Checker Framework/JSR308: current-version=%s new-version=%s" % (current_checker_version, new_checker_version ) )
print("AFU: current-version=%s new-version=%s" % (current_afu_version, new_afu_version ) )
continue_or_exit("Please ensure that you have run release_build.py since the last push to " +
"any of the JSR308, AFU, or Checker Framework repositories." )
# Runs the link the checker on all websites at:
# http://types.cs.washington.edu/dev/
# The output of the link checker is written to files in the /tmp directory
# whose locations will be output at the command prompt.
# It is ok for there to be broken links in JSR308 referring to ../../specification.
# These links are broken because there is no specification in the dev website (the
# specification is released separately). If there are any other broken links, fix
# them. In rare instances (such as when a link is correct but the link checker is
# unable to check it), you may add a suppression to the checklink-args.txt file.
# In extremely rare instances (such as when a website happens to be down at the
# time you ran the link checker), you may ignore an error.
print_step( "Push Step 2: Check links on development site" ) # SEMIAUTO
if prompt_yes_no( "Run link-checker on DEV site?", True ):
check_all_links( dev_jsr308_website, dev_afu_website, dev_checker_website, "dev" )
# Runs sanity tests on the development release. Later, we will run a smaller set of sanity
# tests on the live release to ensure no errors occurred when promoting the release.
# NOTE: In this step you will also be prompted to build and manually test the Eclipse plugin.
print_step( "Push Step 3: Run development sanity tests" ) # SEMIAUTO
continue_or_exit(
"Later in this step you will build and install the Eclipse plugin using the latest artifacts. See\n" +
"README-developers.html under the checker-framework/release directory\n\n")
print_step(" 3a: Run javac sanity test on development release." )
if prompt_yes_no( "Run javac sanity test on development release?", True ):
javac_sanity_check( dev_checker_website, new_checker_version )
print_step("3b: Run Maven sanity test on development release." )
if prompt_yes_no( "Run Maven sanity test on development repo?", True ):
maven_sanity_check( "maven-dev", MAVEN_DEV_REPO, new_checker_version )
print_step( "3c: Build the Eclipse plugin and test." )
print("Please download: http://types.cs.washington.edu/dev/checker-framework/current/checker-framework.zip")
print("Use the jars in the dist directory along with the instructions at " +
"checker-framework/eclipse/README-developers.html to build the Eclipse plugin." +
"Please install this version in the latest version of Eclipse and follow the tutorial at:\n" +
"http://types.cs.washington.edu/dev/checker-framework/tutorial/" )
continue_or_exit("If the tutorial doesn't work, please abort the release and contact the appropriate developer.")
# The Central repository is a repository of build artifacts for build programs like Maven and Ivy.
# This step stages (but doesn't release) the Checker Framework's Maven artifacts in the Sonatypes
# Central Repository.
# Once staging is complete, there are manual steps to log into Sonatypes Central and "close" the
# staging repository. Closing allows us to test the artifacts.
# This step deploys the artifacts to the Central repository and prompts the user to close the
# artifacts. Later, you will be prompted to release the staged artifacts after we commit the
# release to our Google Code repositories.
# For more information on deploying to the Central Repository see:
# https://docs.sonatype.org/display/Repository/Sonatype+OSS+Maven+Repository+Usage+Guide
print_step( "Push Step 4: Stage Maven artifacts in Central" ) # SEMIAUTO
print_step("4a: Stage the artifacts at Maven central." )
if prompt_yes_no( "Stage Maven artifacts in Maven Central?" ):
stage_maven_artifacts_in_maven_central( new_checker_version )
print_step("4b: Close staged artifacts at Maven central." )
print( "Maven artifacts have been staged! Please 'close' (but don't release) the artifacts. " +
"To close, log into https://oss.sonatype.org using your " +
"Sonatype credentials and follow the 'close' instructions at: " + SONATYPE_CLOSING_DIRECTIONS_URL )
print_step("4c: Run Maven sanity test on Maven central artifacts." )
if prompt_yes_no( "Run Maven sanity test on Maven central artifacts?", True ):
repo_url = raw_input( "Please enter the repo URL of the closed artifacts. To find this URL " +
"log into https://oss.sonatype.org. Go to the Staging Repositories. Find " +
"the repository you just closed and paste that URL here:\n" )
maven_sanity_check( "maven-staging", repo_url, new_checker_version )
# This step copies the development release directories to the live release directories.
# It then adds the appropriate permissions to the release. Symlinks need to be updated to point
# to the live website rather than the development website. A straight copy of the directory
# will NOT update the symlinks.
print_step("Push Step 5. Push dev website to live website" ) # SEMIAUTO
continue_or_exit("Copy release to the live website?")
if not test_mode:
print("Copying to live site")
copy_releases_to_live_site( new_checker_version, new_afu_version )
copy_htaccess()
ensure_group_access_to_releases()
update_release_symlinks( new_checker_version, new_afu_version )
else:
print( "Test mode: Skipping copy to live site!" )
# Runs the link the checker on all websites at:
# http://types.cs.washington.edu/
# The output of the link checker is written to files in the /tmp directory whose locations
# will be output at the command prompt. Review the link checker output.
# The set of broken links that is displayed by this check will differ from those in push
# step 2 because the Checker Framework manual and website uses a mix of absolute and
# relative links. Therefore, some links from the development site actually point to the
# live site (the previous release). After step 5, these links point to the current
# release and may be broken.
# NOTE: There will be many broken links within the jdk-api directory see Open JDK/JSR308 Javadoc
print( "Push Step 6. Check live site links" ) # SEMIAUTO
if prompt_yes_no( "Run link Checker on LIVE site?", True ):
check_all_links( live_jsr308_website, live_afu_website, live_checker_website, "live" )
# This step downloads the checker-framework.zip file of the newly live release and ensures we
# can run the Nullness Checker. If this step fails, you should backout the release.
print_step( "Push Step 7: Run javac sanity test on the live release." ) # SEMIAUTO
if prompt_yes_no( "Run javac sanity test on live release?", True ):
javac_sanity_check( live_checker_website, new_checker_version )
# You must manually deploy the Eclipse plugin. Follow the instructions at the prompt.
print_step("Push Step 8: Deploy the Eclipse Plugin to the live site." ) # MANUAL
continue_or_exit( "Follow the instruction under 'Releasing the Plugin' in checker-framework/eclipse/README-developers.html to " +
"deploy the Eclipse plugin to the live website. Please install the plugin from the new " +
"live repository and run it on a file in which you expect a type error. If you run into errors, " +
"back out the release!" )
# This step pushes the changes committed to the interm repositories to the Google Code
# repositories. This is the first irreversible change. After this point, you can no longer
# backout changes and should do another release in case of critical errors.
print_step( "Push Step 9. Commit changes to repositories" ) # SEMIAUTO
if prompt_yes_no( "Push the release to Google code repositories? This is irreversible." ):
if not test_mode:
push_interm_to_release_repos()
print( "Pushed to repos" )
else:
print( "Test mode: Skipping push to Google Code!" )
# This is a manual step that releases the staged Maven artifacts to the actual Central repository.
# This is also an irreversible step. Once you have released these artifacts they will be forever
# available to the Java community through the Central repository. Follow the prompts. The Maven
# artifacts (such as checker-qual.jar) are still needed, but the Maven plug-in is no longer maintained.
print_step( "Push Step 10. Release staged artifacts in Central repository." ) # MANUAL
if test_mode:
msg = ( "Test Mode: You are in test_mode. Please 'DROP' the artifacts. " +
"To drop, log into https://oss.sonatype.org using your " +
"Sonatype credentials and follow the 'DROP' instructions at: " + SONATYPE_DROPPING_DIRECTIONS_URL )
else:
msg = ( "Please 'release' the artifacts, but IMPORTANTLY first ensure that the Checker Framework maven plug-in directory" +
"(and only that directory) is removed from the artifacts. " +
"To release, log into https://oss.sonatype.org using your " +
"Sonatype credentials and follow the 'close' instructions at: " + SONATYPE_RELEASE_DIRECTIONS_URL )
# Later we will fix this so that the maven plug-in directory directory is not included in the first place.
print( msg )
prompt_until_yes()
# A prompt describes the email you should send to all relevant mailing lists.
# Please fill out the email and announce the release.
print_step( "Push Step 11. Announce the release." ) # MANUAL
continue_or_exit( "Please announce the release using the email structure below.\n" +
"Note that this text may have changed since the last time a release was performed.\n" +
get_announcement_email( new_checker_version ) )
print_step( "Push Step 12. Push Eclipse plugin files." ) # MANUAL
if test_mode:
msg = ( "Test Mode: You are in test_mode. If you built the Eclipse plugin on" +
"your local machine, you may want to revert any files that were modified." )
else:
msg = ( "If you built the Eclipse plugin on your local machine, there are a few " +
"changed files with version number changes that need to be pushed.\n" +
"Do not push the .classpath file. The following files should be pushed:\n" +
"checker-framework-eclipse-feature/feature.xml\n" +
"checker-framework-eclipse-plugin/META-INF/MANIFEST.MF\n" +
"checker-framework-eclipse-update-site/site.xml" )
print( msg )
prompt_until_yes()
if __name__ == "__main__":
sys.exit(main(sys.argv))
| SoftwareEngineeringToolDemos/ICSE-2011-Checker-Framework | release/release_push.py | Python | gpl-2.0 | 24,373 | 0.020843 |
from django.test import SimpleTestCase
from corehq.apps.export.const import USERNAME_TRANSFORM
from corehq.apps.export.models import (
DocRow,
RowNumberColumn,
PathNode,
ExportRow,
ScalarItem,
ExportColumn,
TableConfiguration,
)
class TableConfigurationTest(SimpleTestCase):
def test_get_column(self):
table_configuration = TableConfiguration(
path=[PathNode(name='form', is_repeat=False), PathNode(name="repeat1", is_repeat=True)],
columns=[
ExportColumn(
item=ScalarItem(
path=[
PathNode(name='form'),
PathNode(name='repeat1', is_repeat=True),
PathNode(name='q1')
],
)
),
ExportColumn(
item=ScalarItem(
path=[
PathNode(name="form"),
PathNode(name="user_id"),
],
transform=USERNAME_TRANSFORM
)
),
ExportColumn(
item=ScalarItem(
path=[
PathNode(name='form'),
PathNode(name='repeat1', is_repeat=True),
PathNode(name='q2')
],
)
),
]
)
index, column = table_configuration.get_column(
[
PathNode(name='form'),
PathNode(name='repeat1', is_repeat=True),
PathNode(name='q1')
],
'ScalarItem',
None,
)
self.assertEqual(
column.item.path,
[
PathNode(name='form'),
PathNode(name='repeat1', is_repeat=True),
PathNode(name='q1')
]
)
self.assertEqual(index, 0)
index, column = table_configuration.get_column(
[
PathNode(name='form'),
PathNode(name='repeat1', is_repeat=True),
PathNode(name='DoesNotExist')
],
'ScalarItem',
None,
)
self.assertIsNone(column)
# Verify that get_column ignores deid transforms
index, column = table_configuration.get_column(
[PathNode(name="form"), PathNode(name="user_id")],
'ScalarItem',
USERNAME_TRANSFORM
)
self.assertIsNotNone(column)
self.assertEqual(index, 1)
class TableConfigurationGetSubDocumentsTest(SimpleTestCase):
def test_basic(self):
table = TableConfiguration(path=[])
self.assertEqual(
table._get_sub_documents(
{'foo': 'a'},
0
),
[
DocRow(row=(0,), doc={'foo': 'a'})
]
)
def test_simple_repeat(self):
table = TableConfiguration(
path=[PathNode(name="foo", is_repeat=True)]
)
self.assertEqual(
table._get_sub_documents(
{
'foo': [
{'bar': 'a'},
{'bar': 'b'},
]
},
0
),
[
DocRow(row=(0, 0), doc={'bar': 'a'}),
DocRow(row=(0, 1), doc={'bar': 'b'})
]
)
def test_nested_repeat(self):
table = TableConfiguration(
path=[PathNode(name='foo', is_repeat=True), PathNode(name='bar', is_repeat=True)],
)
self.assertEqual(
table._get_sub_documents(
{
'foo': [
{
'bar': [
{'baz': 'a'},
{'baz': 'b'}
],
},
{
'bar': [
{'baz': 'c'}
],
},
],
},
0
),
[
DocRow(row=(0, 0, 0), doc={'baz': 'a'}),
DocRow(row=(0, 0, 1), doc={'baz': 'b'}),
DocRow(row=(0, 1, 0), doc={'baz': 'c'}),
]
)
def test_single_iteration_repeat(self):
table = TableConfiguration(
path=[PathNode(name='group1', is_repeat=False), PathNode(name='repeat1', is_repeat=True)],
)
self.assertEqual(
table._get_sub_documents(
{
'group1': {
'repeat1': {
'baz': 'a'
},
}
},
0
),
[
DocRow(row=(0, 0), doc={'baz': 'a'}),
]
)
class TableConfigurationGetRowsTest(SimpleTestCase):
def test_simple(self):
table_configuration = TableConfiguration(
path=[],
columns=[
ExportColumn(
item=ScalarItem(
path=[PathNode(name='form'), PathNode(name='q3')],
),
selected=True,
),
ExportColumn(
item=ScalarItem(
path=[PathNode(name='form'), PathNode(name='q1')],
),
selected=True,
),
ExportColumn(
item=ScalarItem(
path=[PathNode(name='form'), PathNode(name='q2')],
),
selected=False,
),
]
)
submission = {
'domain': 'my-domain',
'_id': '1234',
"form": {
"q1": "foo",
"q2": "bar",
"q3": "baz"
}
}
self.assertEqual(
[row.data for row in table_configuration.get_rows(submission, 0)],
[['baz', 'foo']]
)
def test_repeat(self):
table_configuration = TableConfiguration(
path=[PathNode(name="form", is_repeat=False), PathNode(name="repeat1", is_repeat=True)],
columns=[
ExportColumn(
item=ScalarItem(
path=[
PathNode(name="form"),
PathNode(name="repeat1", is_repeat=True),
PathNode(name="q1")
],
),
selected=True,
),
]
)
submission = {
'domain': 'my-domain',
'_id': '1234',
'form': {
'repeat1': [
{'q1': 'foo'},
{'q1': 'bar'}
]
}
}
self.assertEqual(
[row.data for row in table_configuration.get_rows(submission, 0)],
[ExportRow(['foo']).data, ExportRow(['bar']).data]
)
def test_double_repeat(self):
table_configuration = TableConfiguration(
path=[
PathNode(name="form", is_repeat=False),
PathNode(name="repeat1", is_repeat=True),
PathNode(name="group1", is_repeat=False),
PathNode(name="repeat2", is_repeat=True),
],
columns=[
RowNumberColumn(
selected=True
),
ExportColumn(
item=ScalarItem(
path=[
PathNode(name='form'),
PathNode(name='repeat1', is_repeat=True),
PathNode(name='group1'),
PathNode(name='repeat2', is_repeat=True),
PathNode(name='q1')
],
),
selected=True,
),
]
)
submission = {
'domain': 'my-domain',
'_id': '1234',
'form': {
'repeat1': [
{
'group1': {
'repeat2': [
{'q1': 'foo'},
{'q1': 'bar'}
]
}
},
{
'group1': {
'repeat2': [
{'q1': 'beep'},
{'q1': 'boop'}
]
}
},
]
}
}
self.assertEqual(
[row.data for row in table_configuration.get_rows(submission, 0)],
[
["0.0.0", 0, 0, 0, 'foo'],
["0.0.1", 0, 0, 1, 'bar'],
["0.1.0", 0, 1, 0, 'beep'],
["0.1.1", 0, 1, 1, 'boop']
]
)
def test_empty_group(self):
table_configuration = TableConfiguration(
path=[
PathNode(name="form", is_repeat=False),
PathNode(name="group", is_repeat=False),
PathNode(name="repeat1", is_repeat=True)
],
columns=[
ExportColumn(
item=ScalarItem(
path=[
PathNode(name="form"),
PathNode(name="group"),
PathNode(name="repeat1", is_repeat=True),
PathNode(name="q1")
],
),
selected=True,
),
]
)
submission = {
'domain': 'my-domain',
'_id': '1234',
'form': {
'group': ''
}
}
self.assertEqual(
[row.data for row in table_configuration.get_rows(submission, 0)], []
)
| qedsoftware/commcare-hq | corehq/apps/export/tests/test_table_configuration.py | Python | bsd-3-clause | 10,403 | 0.000481 |
# -*- coding: utf-8 -*-
#
# gPodder - A media aggregator and podcast client
# Copyright (c) 2005-2014 Thomas Perl and the gPodder Team
#
# gPodder is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# gPodder is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
# gpodder.query - Episode Query Language (EQL) implementation (2010-11-29)
#
import gpodder
import re
import datetime
class Matcher(object):
"""Match implementation for EQL
This class implements the low-level matching of
EQL statements against episode objects.
"""
def __init__(self, episode):
self._episode = episode
def match(self, term):
try:
return bool(eval(term, {'__builtins__': None}, self))
except Exception, e:
print e
return False
def __getitem__(self, k):
episode = self._episode
# Adjectives (for direct usage)
if k == 'new':
return (episode.state == gpodder.STATE_NORMAL and episode.is_new)
elif k in ('downloaded', 'dl'):
return episode.was_downloaded(and_exists=True)
elif k in ('deleted', 'rm'):
return episode.state == gpodder.STATE_DELETED
elif k == 'played':
return not episode.is_new
elif k == 'downloading':
return episode.downloading
elif k == 'archive':
return episode.archive
elif k in ('finished', 'fin'):
return episode.is_finished()
elif k in ('video', 'audio'):
return episode.file_type() == k
elif k == 'torrent':
return episode.url.endswith('.torrent') or 'torrent' in episode.mime_type
# Nouns (for comparisons)
if k in ('megabytes', 'mb'):
return float(episode.file_size) / (1024*1024)
elif k == 'title':
return episode.title
elif k == 'description':
return episode.description
elif k == 'since':
return (datetime.datetime.now() - datetime.datetime.fromtimestamp(episode.published)).days
elif k == 'age':
return episode.age_in_days()
elif k in ('minutes', 'min'):
return float(episode.total_time) / 60
elif k in ('remaining', 'rem'):
return float(episode.total_time - episode.current_position) / 60
raise KeyError(k)
class EQL(object):
"""A Query in EQL
Objects of this class represent a query on episodes
using EQL. Example usage:
>>> q = EQL('downloaded and megabytes > 10')
>>> q.filter(channel.get_all_episodes())
>>> EQL('new and video').match(episode)
Regular expression queries are also supported:
>>> q = EQL('/^The.*/')
>>> q = EQL('/community/i')
Normal string matches are also supported:
>>> q = EQL('"S04"')
>>> q = EQL("'linux'")
Normal EQL queries cannot be mixed with RegEx
or string matching yet, so this does NOT work:
>>> EQL('downloaded and /The.*/i')
"""
def __init__(self, query):
self._query = query
self._flags = 0
self._regex = False
self._string = False
# Regular expression based query
match = re.match(r'^/(.*)/(i?)$', query)
if match is not None:
self._regex = True
self._query, flags = match.groups()
if flags == 'i':
self._flags |= re.I
# String based query
match = re.match("^([\"'])(.*)(\\1)$", query)
if match is not None:
self._string = True
a, query, b = match.groups()
self._query = query.lower()
# For everything else, compile the expression
if not self._regex and not self._string:
try:
self._query = compile(query, '<eql-string>', 'eval')
except Exception, e:
print e
self._query = None
def match(self, episode):
if self._query is None:
return False
if self._regex:
return re.search(self._query, episode.title, self._flags) is not None
elif self._string:
return self._query in episode.title.lower() or self._query in episode.description.lower()
return Matcher(episode).match(self._query)
def filter(self, episodes):
return filter(self.match, episodes)
def UserEQL(query):
"""EQL wrapper for user input
Automatically adds missing quotes around a
non-EQL string for user-based input. In this
case, EQL queries need to be enclosed in ().
"""
if query is None:
return None
if query == '' or (query and query[0] not in "(/'\""):
return EQL("'%s'" % query)
else:
return EQL(query)
| somini/gpodder | src/gpodder/query.py | Python | gpl-3.0 | 5,258 | 0.001331 |
from greenlet import greenlet
import os
from sandbox import Sandbox, SandboxConfig
from redux.internal.exceptions import RobotDeathException
import traceback
# this is just for testing
LOADER_CODE_TEST = """
import time
while True:
try:
print 'running... %s' % str(rc)
time.sleep(1)
print 'done'
defer()
except Exception:
print 'caught exception... ignoring'
"""
# This will be the real player execution code
LOADER_CODE = """
from {team}.player import RobotPlayer
player = RobotPlayer(rc)
player.run()
"""
# TODO paramaterize
BYTECODE_LIMIT = 8000
def passthrough(thread, throw=False):
"""A one shot greenlet that simply resumes an existing greenlet and then
returns. This allows greenlets to be resumed without a presistent parent.
"""
def _run():
retval = thread.resume(throw)
if retval is None:
raise Exception('robot run method returned')
return retval
g = greenlet(run=_run)
thread.parent = g
return g.switch()
class Scheduler():
_instance = None
def __init__(self, game_world):
self.game_world = game_world
self.current_thread = None
self.threads = {}
self.threads_to_kill = set()
@classmethod
def create(cls, game_world=None):
cls._instance = Scheduler(game_world)
@classmethod
def destroy(cls):
cls._instance = None
@classmethod
def instance(cls):
return cls._instance
def spawn_thread(self, robot):
"""Spawn a new player"""
player = Player(RobotController(robot, self.game_world).interface())
thread = PlayerThread(player)
self.threads[robot.id] = thread
def run_thread(self, id):
"""Run a player thread for the given robot id"""
print '[SCHEDULER] running thread', id
self.current_thread = self.threads.get(id)
assert not self.current_thread is None, 'null thread?'
# check if the robot is scheduled to be killed
throw = id in self.threads_to_kill
# check if the robot is over the bytecode limit
if self.get_bytecode_left() < 0 and not throw:
self.current_thread.bytecode_used -= BYTECODE_LIMIT
return
# resume robot execution
try:
passthrough(self.current_thread.player, throw)
except Exception as e:
if not isinstance(e, RobotDeathException):
traceback.print_exc()
del self.threads[id]
self.current_thread = None
def end_thread(self):
self.current_thread.bytecode_used -= min(8000, self.current_thread.bytecode_used)
self.current_thread.player.pause()
def current_robot(self):
return self.current_thread.player.robot_controller.robot
def kill_robot(self, id):
self.threads_to_kill.add(id)
def increment_bytecode(self, amt):
assert amt >= 0, 'negative bytecode increments not allowed'
self.current_thread.bytecode_used += amt
if self.current_thread.bytecode_used > BYTECODE_LIMIT:
self.end_thread()
def get_bytecode_left(self):
return BYTECODE_LIMIT - self.current_thread.bytecode_used
def get_bytecode_used(self):
return self.current_thread.bytecode_used
class Player(greenlet):
def __init__(self, robot_controller):
super(Player, self).__init__()
self.robot_controller = robot_controller
config = SandboxConfig(use_subprocess=False)
config.enable('traceback')
config.enable('stdout')
config.enable('stderr')
config.enable('time')
# TODO need to allow *all* imports from team package
config.allowModule(robot_controller.robot.team + '.player', 'RobotPlayer')
# TODO need a better method for override the sys_path
config.sys_path = config.sys_path + (os.getcwd(),)
# add additional builtins to the config
# - increment_clock
this = self
def increment_clock(amt):
Scheduler.instance().increment_bytecode(amt)
# TODO need a better method to add builtins additions
config._builtins_additions = {
'increment_clock': increment_clock,
}
self.sandbox = Sandbox(config)
self.running = False
def resume(self, throw=False):
return self.switch(throw)
def pause(self):
# break out of the sandbox
self.sandbox.disable_protections()
# return execution to the scheduler
throw = self.parent.switch()
if throw:
raise RobotDeathException('killed by engine')
# re-enable sandbox protections
self.sandbox.enable_protections()
def run(self, *args):
statement = LOADER_CODE.format(team=self.robot_controller.robot.team)
safelocals = { 'rc': self.robot_controller }
self.running = True
self.sandbox.execute(statement, globals={}, locals=safelocals)
class PlayerThread(object):
def __init__(self, player):
self.bytecode_used = 0
self.player = player
class RobotController(object):
def __init__(self, robot, game_world):
self.robot = robot
self.game_world = game_world
def yield_execution(self):
# TODO yield bonus
Scheduler.instance().end_thread()
def interface(self):
"""
Returns an encapsulated version of the controller that can safely be
passed to the sandboxed player code.
"""
this = self
class _interface(object):
def __init__(self):
self._robot = this.robot.interface() # TODO robot should cache its own interface
def yield_execution(self):
this.yield_execution()
@property
def robot(self):
return self._robot
return _interface() | trun/redux | redux/internal/scheduler.py | Python | mit | 5,911 | 0.00203 |
# coding=utf-8
# ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------
from datetime import datetime
from azure.core import credentials
import pytest
import six
import time
from azure.containerregistry import (
RepositoryProperties,
ArtifactManifestProperties,
ArtifactManifestOrder,
ArtifactTagProperties,
ArtifactTagOrder,
ContainerRegistryClient,
)
from azure.core.exceptions import ResourceNotFoundError, ClientAuthenticationError
from azure.core.paging import ItemPaged
from testcase import ContainerRegistryTestClass, get_authority
from constants import TO_BE_DELETED, HELLO_WORLD, ALPINE, BUSYBOX, DOES_NOT_EXIST
from preparer import acr_preparer
class TestContainerRegistryClient(ContainerRegistryTestClass):
@acr_preparer()
def test_list_repository_names(self, containerregistry_endpoint):
client = self.create_registry_client(containerregistry_endpoint)
repositories = client.list_repository_names()
assert isinstance(repositories, ItemPaged)
count = 0
prev = None
for repo in repositories:
count += 1
assert isinstance(repo, six.string_types)
assert prev != repo
prev = repo
assert count > 0
@acr_preparer()
def test_list_repository_names_by_page(self, containerregistry_endpoint):
client = self.create_registry_client(containerregistry_endpoint)
results_per_page = 2
total_pages = 0
repository_pages = client.list_repository_names(results_per_page=results_per_page)
prev = None
for page in repository_pages.by_page():
page_count = 0
for repo in page:
assert isinstance(repo, six.string_types)
assert prev != repo
prev = repo
page_count += 1
assert page_count <= results_per_page
total_pages += 1
assert total_pages >= 1
@acr_preparer()
def test_delete_repository(self, containerregistry_endpoint, containerregistry_resource_group):
self.import_image(containerregistry_endpoint, HELLO_WORLD, [TO_BE_DELETED])
client = self.create_registry_client(containerregistry_endpoint)
client.delete_repository(TO_BE_DELETED)
for repo in client.list_repository_names():
if repo == TO_BE_DELETED:
raise ValueError("Repository not deleted")
@acr_preparer()
def test_delete_repository_does_not_exist(self, containerregistry_endpoint):
client = self.create_registry_client(containerregistry_endpoint)
client.delete_repository("not_real_repo")
@acr_preparer()
def test_get_repository_properties(self, containerregistry_endpoint):
client = self.create_registry_client(containerregistry_endpoint)
properties = client.get_repository_properties(ALPINE)
assert isinstance(properties, RepositoryProperties)
assert properties.name == ALPINE
@acr_preparer()
def test_update_repository_properties(self, containerregistry_endpoint):
repository = self.get_resource_name("repo")
tag_identifier = self.get_resource_name("tag")
self.import_image(containerregistry_endpoint, HELLO_WORLD, ["{}:{}".format(repository, tag_identifier)])
client = self.create_registry_client(containerregistry_endpoint)
properties = client.get_repository_properties(repository)
properties.can_delete = False
properties.can_read = False
properties.can_list = False
properties.can_write = False
new_properties = client.update_repository_properties(repository, properties)
assert properties.can_delete == new_properties.can_delete
assert properties.can_read == new_properties.can_read
assert properties.can_list == new_properties.can_list
assert properties.can_write == new_properties.can_write
new_properties.can_delete = True
new_properties.can_read = True
new_properties.can_list = True
new_properties.can_write = True
new_properties = client.update_repository_properties(repository, new_properties)
assert new_properties.can_delete == True
assert new_properties.can_read == True
assert new_properties.can_list == True
assert new_properties.can_write == True
@acr_preparer()
def test_update_repository_properties_kwargs(self, containerregistry_endpoint):
repo = self.get_resource_name("repo")
tag = self.get_resource_name("tag")
self.import_image(containerregistry_endpoint, HELLO_WORLD, ["{}:{}".format(repo, tag)])
client = self.create_registry_client(containerregistry_endpoint)
properties = client.get_repository_properties(repo)
properties = self.set_all_properties(properties, True)
received = client.update_repository_properties(repo, properties)
self.assert_all_properties(properties, True)
received = client.update_repository_properties(repo, can_delete=False)
assert received.can_delete == False
assert received.can_list == True
assert received.can_read == True
assert received.can_write == True
received = client.update_repository_properties(repo, can_read=False)
assert received.can_delete == False
assert received.can_list == True
assert received.can_read == False
assert received.can_write == True
received = client.update_repository_properties(repo, can_write=False)
assert received.can_delete == False
assert received.can_list == True
assert received.can_read == False
assert received.can_write == False
received = client.update_repository_properties(repo, can_list=False)
assert received.can_delete == False
assert received.can_list == False
assert received.can_read == False
assert received.can_write == False
received = client.update_repository_properties(
repo,
can_delete=True,
can_read=True,
can_write=True,
can_list=True,
)
self.assert_all_properties(received, True)
@acr_preparer()
def test_list_registry_artifacts(self, containerregistry_endpoint):
client = self.create_registry_client(containerregistry_endpoint)
count = 0
for artifact in client.list_manifest_properties(BUSYBOX):
assert isinstance(artifact, ArtifactManifestProperties)
assert isinstance(artifact.created_on, datetime)
assert isinstance(artifact.last_updated_on, datetime)
assert artifact.repository_name == BUSYBOX
assert artifact.fully_qualified_reference in self.create_fully_qualified_reference(containerregistry_endpoint, BUSYBOX, artifact.digest)
count += 1
assert count > 0
@acr_preparer()
def test_list_registry_artifacts_by_page(self, containerregistry_endpoint):
client = self.create_registry_client(containerregistry_endpoint)
results_per_page = 2
pages = client.list_manifest_properties(BUSYBOX, results_per_page=results_per_page)
page_count = 0
for page in pages.by_page():
reg_count = 0
for tag in page:
reg_count += 1
assert reg_count <= results_per_page
page_count += 1
assert page_count >= 1
@acr_preparer()
def test_list_registry_artifacts_descending(self, containerregistry_endpoint):
client = self.create_registry_client(containerregistry_endpoint)
prev_last_updated_on = None
count = 0
for artifact in client.list_manifest_properties(BUSYBOX, order_by=ArtifactManifestOrder.LAST_UPDATED_ON_DESCENDING):
if prev_last_updated_on:
assert artifact.last_updated_on < prev_last_updated_on
prev_last_updated_on = artifact.last_updated_on
count += 1
assert count > 0
prev_last_updated_on = None
count = 0
for artifact in client.list_manifest_properties(BUSYBOX, order_by="timedesc"):
if prev_last_updated_on:
assert artifact.last_updated_on < prev_last_updated_on
prev_last_updated_on = artifact.last_updated_on
count += 1
assert count > 0
@acr_preparer()
def test_list_registry_artifacts_ascending(self, containerregistry_endpoint):
client = self.create_registry_client(containerregistry_endpoint)
prev_last_updated_on = None
count = 0
for artifact in client.list_manifest_properties(BUSYBOX, order_by=ArtifactManifestOrder.LAST_UPDATED_ON_ASCENDING):
if prev_last_updated_on:
assert artifact.last_updated_on > prev_last_updated_on
prev_last_updated_on = artifact.last_updated_on
count += 1
assert count > 0
prev_last_updated_on = None
count = 0
for artifact in client.list_manifest_properties(BUSYBOX, order_by="timeasc"):
if prev_last_updated_on:
assert artifact.last_updated_on > prev_last_updated_on
prev_last_updated_on = artifact.last_updated_on
count += 1
assert count > 0
@acr_preparer()
def test_get_manifest_properties(self, containerregistry_endpoint):
repo = self.get_resource_name("repo")
tag = self.get_resource_name("tag")
self.import_image(containerregistry_endpoint, HELLO_WORLD, ["{}:{}".format(repo, tag)])
client = self.create_registry_client(containerregistry_endpoint)
properties = client.get_manifest_properties(repo, tag)
assert isinstance(properties, ArtifactManifestProperties)
assert properties.repository_name == repo
assert properties.fully_qualified_reference in self.create_fully_qualified_reference(containerregistry_endpoint, repo, properties.digest)
@acr_preparer()
def test_get_manifest_properties_does_not_exist(self, containerregistry_endpoint):
repo = self.get_resource_name("repo")
tag = self.get_resource_name("tag")
self.import_image(containerregistry_endpoint, HELLO_WORLD, ["{}:{}".format(repo, tag)])
client = self.create_registry_client(containerregistry_endpoint)
manifest = client.get_manifest_properties(repo, tag)
digest = manifest.digest
digest = digest[:-10] + u"a" * 10
with pytest.raises(ResourceNotFoundError):
client.get_manifest_properties(repo, digest)
@acr_preparer()
def test_update_manifest_properties(self, containerregistry_endpoint):
repo = self.get_resource_name("repo")
tag = self.get_resource_name("tag")
self.import_image(containerregistry_endpoint, HELLO_WORLD, ["{}:{}".format(repo, tag)])
client = self.create_registry_client(containerregistry_endpoint)
properties = client.get_manifest_properties(repo, tag)
properties.can_delete = False
properties.can_read = False
properties.can_write = False
properties.can_list = False
received = client.update_manifest_properties(repo, tag, properties)
assert received.can_delete == properties.can_delete
assert received.can_read == properties.can_read
assert received.can_write == properties.can_write
assert received.can_list == properties.can_list
properties.can_delete = True
properties.can_read = True
properties.can_write = True
properties.can_list = True
received = client.update_manifest_properties(repo, tag, properties)
assert received.can_delete == True
assert received.can_read == True
assert received.can_write == True
assert received.can_list == True
@acr_preparer()
def test_update_manifest_properties_kwargs(self, containerregistry_endpoint):
repo = self.get_resource_name("repo")
tag = self.get_resource_name("tag")
self.import_image(containerregistry_endpoint, HELLO_WORLD, ["{}:{}".format(repo, tag)])
client = self.create_registry_client(containerregistry_endpoint)
properties = client.get_manifest_properties(repo, tag)
received = client.update_manifest_properties(repo, tag, can_delete=False)
assert received.can_delete == False
received = client.update_manifest_properties(repo, tag, can_read=False)
assert received.can_read == False
received = client.update_manifest_properties(repo, tag, can_write=False)
assert received.can_write == False
received = client.update_manifest_properties(repo, tag, can_list=False)
assert received.can_list == False
received = client.update_manifest_properties(
repo, tag, can_delete=True, can_read=True, can_write=True, can_list=True
)
assert received.can_delete == True
assert received.can_read == True
assert received.can_write == True
assert received.can_list == True
@acr_preparer()
def test_get_tag_properties(self, containerregistry_endpoint):
repo = self.get_resource_name("repo")
tag = self.get_resource_name("tag")
self.import_image(containerregistry_endpoint, HELLO_WORLD, ["{}:{}".format(repo, tag)])
client = self.create_registry_client(containerregistry_endpoint)
properties = client.get_tag_properties(repo, tag)
assert isinstance(properties, ArtifactTagProperties)
assert properties.name == tag
@acr_preparer()
def test_get_tag_properties_does_not_exist(self, containerregistry_endpoint):
client = self.create_registry_client(containerregistry_endpoint)
with pytest.raises(ResourceNotFoundError):
client.get_tag_properties("Nonexistent", "Nonexistent")
@acr_preparer()
def test_update_tag_properties(self, containerregistry_endpoint):
repo = self.get_resource_name("repo")
tag = self.get_resource_name("tag")
self.import_image(containerregistry_endpoint, HELLO_WORLD, ["{}:{}".format(repo, tag)])
client = self.create_registry_client(containerregistry_endpoint)
properties = client.get_tag_properties(repo, tag)
properties.can_delete = False
properties.can_read = False
properties.can_write = False
properties.can_list = False
received = client.update_tag_properties(repo, tag, properties)
assert received.can_delete == properties.can_delete
assert received.can_read == properties.can_read
assert received.can_write == properties.can_write
assert received.can_list == properties.can_list
properties.can_delete = True
properties.can_read = True
properties.can_write = True
properties.can_list = True
received = client.update_tag_properties(repo, tag, properties)
assert received.can_delete == True
assert received.can_read == True
assert received.can_write == True
assert received.can_list == True
@acr_preparer()
def test_update_tag_properties_kwargs(self, containerregistry_endpoint):
repo = self.get_resource_name("repo")
tag = self.get_resource_name("tag")
self.import_image(containerregistry_endpoint, HELLO_WORLD, ["{}:{}".format(repo, tag)])
client = self.create_registry_client(containerregistry_endpoint)
properties = client.get_tag_properties(repo, tag)
received = client.update_tag_properties(repo, tag, can_delete=False)
assert received.can_delete == False
received = client.update_tag_properties(repo, tag, can_read=False)
assert received.can_read == False
received = client.update_tag_properties(repo, tag, can_write=False)
assert received.can_write == False
received = client.update_tag_properties(repo, tag, can_list=False)
assert received.can_list == False
received = client.update_tag_properties(
repo, tag, can_delete=True, can_read=True, can_write=True, can_list=True
)
assert received.can_delete == True
assert received.can_read == True
assert received.can_write == True
assert received.can_list == True
@acr_preparer()
def test_list_tag_properties(self, containerregistry_endpoint):
repo = self.get_resource_name("repo")
tag = self.get_resource_name("tag")
tags = ["{}:{}".format(repo, tag + str(i)) for i in range(4)]
self.import_image(containerregistry_endpoint, HELLO_WORLD, tags)
client = self.create_registry_client(containerregistry_endpoint)
count = 0
for tag in client.list_tag_properties(repo):
assert "{}:{}".format(repo, tag.name) in tags
count += 1
assert count == 4
@acr_preparer()
def test_list_tag_properties_order_descending(self, containerregistry_endpoint):
repo = self.get_resource_name("repo")
tag = self.get_resource_name("tag")
tags = ["{}:{}".format(repo, tag + str(i)) for i in range(4)]
self.import_image(containerregistry_endpoint, HELLO_WORLD, tags)
client = self.create_registry_client(containerregistry_endpoint)
prev_last_updated_on = None
count = 0
for tag in client.list_tag_properties(repo, order_by=ArtifactTagOrder.LAST_UPDATED_ON_DESCENDING):
assert "{}:{}".format(repo, tag.name) in tags
if prev_last_updated_on:
assert tag.last_updated_on < prev_last_updated_on
prev_last_updated_on = tag.last_updated_on
count += 1
assert count == 4
prev_last_updated_on = None
count = 0
for tag in client.list_tag_properties(repo, order_by="timedesc"):
assert "{}:{}".format(repo, tag.name) in tags
if prev_last_updated_on:
assert tag.last_updated_on < prev_last_updated_on
prev_last_updated_on = tag.last_updated_on
count += 1
assert count == 4
@acr_preparer()
def test_list_tag_properties_order_ascending(self, containerregistry_endpoint):
repo = self.get_resource_name("repo")
tag = self.get_resource_name("tag")
tags = ["{}:{}".format(repo, tag + str(i)) for i in range(4)]
self.import_image(containerregistry_endpoint, HELLO_WORLD, tags)
client = self.create_registry_client(containerregistry_endpoint)
prev_last_updated_on = None
count = 0
for tag in client.list_tag_properties(repo, order_by=ArtifactTagOrder.LAST_UPDATED_ON_ASCENDING):
assert "{}:{}".format(repo, tag.name) in tags
if prev_last_updated_on:
assert tag.last_updated_on > prev_last_updated_on
prev_last_updated_on = tag.last_updated_on
count += 1
assert count == 4
prev_last_updated_on = None
count = 0
for tag in client.list_tag_properties(repo, order_by="timeasc"):
assert "{}:{}".format(repo, tag.name) in tags
if prev_last_updated_on:
assert tag.last_updated_on > prev_last_updated_on
prev_last_updated_on = tag.last_updated_on
count += 1
assert count == 4
@acr_preparer()
def test_delete_tag(self, containerregistry_endpoint):
repo = self.get_resource_name("repo")
tag = self.get_resource_name("tag")
tags = ["{}:{}".format(repo, tag + str(i)) for i in range(4)]
self.import_image(containerregistry_endpoint, HELLO_WORLD, tags)
client = self.create_registry_client(containerregistry_endpoint)
client.delete_tag(repo, tag + str(0))
count = 0
for tag in client.list_tag_properties(repo):
assert "{}:{}".format(repo, tag.name) in tags[1:]
count += 1
assert count == 3
@acr_preparer()
def test_delete_tag_does_not_exist(self, containerregistry_endpoint):
client = self.create_registry_client(containerregistry_endpoint)
client.delete_tag(DOES_NOT_EXIST, DOES_NOT_EXIST)
@acr_preparer()
def test_delete_manifest(self, containerregistry_endpoint):
repo = self.get_resource_name("repo")
tag = self.get_resource_name("tag")
self.import_image(containerregistry_endpoint, HELLO_WORLD, ["{}:{}".format(repo, tag)])
client = self.create_registry_client(containerregistry_endpoint)
client.delete_manifest(repo, tag)
self.sleep(10)
with pytest.raises(ResourceNotFoundError):
client.get_manifest_properties(repo, tag)
@acr_preparer()
def test_delete_manifest_does_not_exist(self, containerregistry_endpoint):
repo = self.get_resource_name("repo")
tag = self.get_resource_name("tag")
self.import_image(containerregistry_endpoint, HELLO_WORLD, ["{}:{}".format(repo, tag)])
client = self.create_registry_client(containerregistry_endpoint)
manifest = client.get_manifest_properties(repo, tag)
digest = manifest.digest
digest = digest[:-10] + u"a" * 10
client.delete_manifest(repo, digest)
@acr_preparer()
def test_expiration_time_parsing(self, containerregistry_endpoint):
from azure.containerregistry._authentication_policy import ContainerRegistryChallengePolicy
client = self.create_registry_client(containerregistry_endpoint)
for repo in client.list_repository_names():
pass
for policy in client._client._client._pipeline._impl_policies:
if isinstance(policy, ContainerRegistryChallengePolicy):
policy._exchange_client._expiration_time = 0
break
count = 0
for repo in client.list_repository_names():
count += 1
assert count >= 1
# Live only, the fake credential doesn't check auth scope the same way
@pytest.mark.live_test_only
@acr_preparer()
def test_construct_container_registry_client(self, containerregistry_endpoint):
authority = get_authority(containerregistry_endpoint)
credential = self.get_credential(authority)
client = ContainerRegistryClient(endpoint=containerregistry_endpoint, credential=credential, audience="https://microsoft.com")
with pytest.raises(ClientAuthenticationError):
properties = client.get_repository_properties(HELLO_WORLD)
with pytest.raises(ValueError):
client = ContainerRegistryClient(endpoint=containerregistry_endpoint, credential=credential)
@acr_preparer()
def test_set_api_version(self, containerregistry_endpoint):
client = self.create_registry_client(containerregistry_endpoint)
assert client._client._config.api_version == "2021-07-01"
client = self.create_registry_client(containerregistry_endpoint, api_version = "2019-08-15-preview")
assert client._client._config.api_version == "2019-08-15-preview"
with pytest.raises(ValueError):
client = self.create_registry_client(containerregistry_endpoint, api_version = "2019-08-15")
| Azure/azure-sdk-for-python | sdk/containerregistry/azure-containerregistry/tests/test_container_registry_client.py | Python | mit | 23,434 | 0.004139 |
from django.conf.urls import patterns, include, url
from django.contrib.sitemaps import Sitemap
from django.views.generic import TemplateView
from django.contrib import admin
from {{ project_name }}.views import HomePageView, ContactPageView, RobotPageView, HumanPageView
from {{ project_name }}.sitemap import BlogSitemap, HardCodedSitemap
admin.autodiscover()
sitemaps = {
'blog': BlogSitemap,
'hardcodedpages': HardCodedSitemap,
}
urlpatterns = patterns('',
url(
regex=r"^$",
view=HomePageView.as_view(),
name="homepage",
),
url(
regex=r"^contact/$",
view=ContactPageView.as_view(),
name="contactpage",
),
url(
regex=r"^robots\.txt$",
view=RobotPageView.as_view(),
name="robots",
),
url(
regex=r"^humans\.txt$",
view=HumanPageView.as_view(),
name="humans",
),
url(r'^sitemap\.xml$', 'django.contrib.sitemaps.views.sitemap', {'sitemaps': sitemaps}),
url(r'^grappelli/', include('grappelli.urls')),
url(r'^tinymce/', include('tinymce.urls')),
url(r'^admin/', include(admin.site.urls)),
url("^blog/", include('blog.urls', namespace='blog', app_name='blog')),
)
| vandorjw/django-template-project | project/project_name/urls.py | Python | mit | 1,221 | 0.005733 |
#!/usr/bin/env python
# Software License Agreement (BSD License)
#
# Copyright (c) 2012, Yujin Robot
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of the Yujin Robot nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# Author: Younghun Ju <yhju@yujinrobot.com> <yhju83@gmail.com>
import roslib; roslib.load_manifest('kobuki_auto_docking')
import rospy
import actionlib
from kobuki_msgs.msg import AutoDockingAction, AutoDockingGoal
from actionlib_msgs.msg import GoalStatus
from std_msgs.msg import Bool
def doneCb(status, result):
if 0: print ''
elif status == GoalStatus.PENDING : state='PENDING'
elif status == GoalStatus.ACTIVE : state='ACTIVE'
elif status == GoalStatus.PREEMPTED : state='PREEMPTED'
elif status == GoalStatus.SUCCEEDED : state='SUCCEEDED'
elif status == GoalStatus.ABORTED : state='ABORTED'
elif status == GoalStatus.REJECTED : state='REJECTED'
elif status == GoalStatus.PREEMPTING: state='PREEMPTING'
elif status == GoalStatus.RECALLING : state='RECALLING'
elif status == GoalStatus.RECALLED : state='RECALLED'
elif status == GoalStatus.LOST : state='LOST'
# Print state of action server
print 'Result - [ActionServer: ' + state + ']: ' + result.text
def activeCb():
if 0: print 'Action server went active.'
def feedbackCb(feedback):
# Print state of dock_drive module (or node.)
print 'Feedback: [DockDrive: ' + feedback.state + ']: ' + feedback.text
def callback(data):
if data.data == True:
dock_drive_client()
def dock_drive_client():
# add timeout setting
client = actionlib.SimpleActionClient('dock_drive_action', AutoDockingAction)
while not client.wait_for_server(rospy.Duration(5.0)):
if rospy.is_shutdown(): return
print 'Action server is not connected yet. still waiting...'
goal = AutoDockingGoal();
client.send_goal(goal, doneCb, activeCb, feedbackCb)
print 'Goal: Sent.'
rospy.on_shutdown(client.cancel_goal)
client.wait_for_result()
#print ' - status:', client.get_goal_status_text()
return client.get_result()
if __name__ == '__main__':
try:
rospy.init_node('dock_drive_client_py', anonymous=True)
topic = "/robotcmd/dock"
if rospy.has_param("~dock_active_topic"):
topic = rospy.get_param("~dock_active_topic")
rospy.Subscriber(topic,Bool, callback)
rospy.spin()
#print ''
#print "Result: ", result
except rospy.ROSInterruptException:
print "program interrupted before completion"
| CARMinesDouai/MultiRobotExplorationPackages | inria_demo/scripts/autodock_client.py | Python | mit | 3,861 | 0.019684 |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# This module copyright (C) 2015 Therp BV <http://therp.nl>.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name": "2D matrix for x2many fields",
"version": "1.0",
"author": "Therp BV",
"license": "AGPL-3",
"category": "Hidden/Dependency",
"summary": "Show list fields as a matrix",
"depends": [
'web',
],
"data": [
'views/templates.xml',
],
"qweb": [
'static/src/xml/web_widget_x2many_2d_matrix.xml',
],
"test": [
],
"auto_install": False,
"installable": True,
"application": False,
"external_dependencies": {
'python': [],
},
}
| cgstudiomap/cgstudiomap | main/parts/web/web_widget_x2many_2d_matrix/__openerp__.py | Python | agpl-3.0 | 1,516 | 0 |
# Code in this file is copied and adapted from
# https://github.com/openai/evolution-strategies-starter.
import gym
import numpy as np
import tree
import ray
import ray.experimental.tf_utils
from ray.rllib.agents.es.es_tf_policy import make_session
from ray.rllib.models import ModelCatalog
from ray.rllib.policy.policy import Policy
from ray.rllib.policy.sample_batch import SampleBatch
from ray.rllib.utils.filter import get_filter
from ray.rllib.utils.framework import try_import_tf
from ray.rllib.utils.spaces.space_utils import unbatch
tf1, tf, tfv = try_import_tf()
class ARSTFPolicy(Policy):
def __init__(self, obs_space, action_space, config):
super().__init__(obs_space, action_space, config)
self.action_noise_std = self.config["action_noise_std"]
self.preprocessor = ModelCatalog.get_preprocessor_for_space(
self.observation_space)
self.observation_filter = get_filter(self.config["observation_filter"],
self.preprocessor.shape)
self.single_threaded = self.config.get("single_threaded", False)
if self.config["framework"] == "tf":
self.sess = make_session(single_threaded=self.single_threaded)
self.inputs = tf1.placeholder(
tf.float32, [None] + list(self.preprocessor.shape))
else:
if not tf1.executing_eagerly():
tf1.enable_eager_execution()
self.sess = self.inputs = None
# Policy network.
self.dist_class, dist_dim = ModelCatalog.get_action_dist(
self.action_space, self.config["model"], dist_type="deterministic")
self.model = ModelCatalog.get_model_v2(
obs_space=self.preprocessor.observation_space,
action_space=self.action_space,
num_outputs=dist_dim,
model_config=self.config["model"])
self.sampler = None
if self.sess:
dist_inputs, _ = self.model({SampleBatch.CUR_OBS: self.inputs})
dist = self.dist_class(dist_inputs, self.model)
self.sampler = dist.sample()
self.variables = ray.experimental.tf_utils.TensorFlowVariables(
dist_inputs, self.sess)
self.sess.run(tf1.global_variables_initializer())
else:
self.variables = ray.experimental.tf_utils.TensorFlowVariables(
[], None, self.model.variables())
self.num_params = sum(
np.prod(variable.shape.as_list())
for _, variable in self.variables.variables.items())
def compute_actions(self,
observation,
add_noise=False,
update=True,
**kwargs):
# Squeeze batch dimension (we always calculate actions for only a
# single obs).
observation = observation[0]
observation = self.preprocessor.transform(observation)
observation = self.observation_filter(observation[None], update=update)
# `actions` is a list of (component) batches.
# Eager mode.
if not self.sess:
dist_inputs, _ = self.model({SampleBatch.CUR_OBS: observation})
dist = self.dist_class(dist_inputs, self.model)
actions = dist.sample()
actions = tree.map_structure(lambda a: a.numpy(), actions)
# Graph mode.
else:
actions = self.sess.run(
self.sampler, feed_dict={self.inputs: observation})
actions = unbatch(actions)
if add_noise and isinstance(self.action_space, gym.spaces.Box):
actions += np.random.randn(*actions.shape) * self.action_noise_std
return actions, [], {}
def compute_single_action(self,
observation,
add_noise=False,
update=True,
**kwargs):
action, state_outs, extra_fetches = self.compute_actions(
[observation], add_noise=add_noise, update=update, **kwargs)
return action[0], state_outs, extra_fetches
def get_state(self):
return {"state": self.get_flat_weights()}
def set_state(self, state):
return self.set_flat_weights(state["state"])
def set_flat_weights(self, x):
self.variables.set_flat(x)
def get_flat_weights(self):
return self.variables.get_flat()
| richardliaw/ray | rllib/agents/ars/ars_tf_policy.py | Python | apache-2.0 | 4,456 | 0 |
# Opus/UrbanSim urban simulation software.
# Copyright (C) 2005-2009 University of Washington
# See opus_core/LICENSE
import os
import waterdemand
basepath = waterdemand.__path__[0]
path = os.path.join(basepath, "docs", "latex")
cwd = os.getcwd()
os.chdir(path)
modules = ["userguide"]
for module in modules:
# hack - make a fake index file to prime the pump, so that latex doesn't give an error the first time it is run
# (There's probaby a better way to do this, but this works.)
index_file = file(module + ".ind", 'w')
index_file.write(r"\begin{theindex} \end{theindex}")
index_file.close()
# run latex, make the index, then run latex again to resolve cross-references correctly and include the index
if os.system("pdflatex -interaction=nonstopmode " + module + ".tex") > 0:
raise Exception("pdflatex failed")
# The makeindex command will fail if the module doesn't have an index - so it's important NOT to check
# if the result of the system call succeeded. (The advantage of calling it anyway is that we can just
# process all of the files with a loop, rather than having separate processing for modules with and without indices.)
os.system("makeindex " + module + ".idx")
if os.system("pdflatex -interaction=nonstopmode " + module + ".tex") > 0:
raise Exception("Latex failed")
os.chdir(cwd)
# The old script called latex (rather than pdflatex), followed by dvips and ps2pdf
# - pdflatex works better that latex followed by dvips and ps2pdf for producing pdf files if there are no figures
# (gets links right in table of contents) | christianurich/VIBe2UrbanSim | 3rdparty/opus/src/waterdemand/docs/latex/build_docs.py | Python | gpl-2.0 | 1,657 | 0.007846 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Ref: http://doc.qt.io/qt-5/modelview.html#2-1-a-read-only-table
import sys
from PyQt5.QtCore import Qt, QAbstractTableModel, QVariant
from PyQt5.QtWidgets import QApplication, QTableView, QVBoxLayout, QWidget
class MyModel(QAbstractTableModel):
def __init__(self, parent):
super().__init__(parent)
def rowCount(self, parent):
return 2
def columnCount(self, parent):
return 3
def data(self, index, role):
if role == Qt.DisplayRole:
return "({},{})".format(index.row(), index.column())
return QVariant()
if __name__ == '__main__':
app = QApplication(sys.argv)
window = QWidget()
table_view1 = QTableView()
table_view2 = QTableView()
my_model = MyModel(None)
table_view1.setModel(my_model)
table_view2.setModel(my_model)
# http://doc.qt.io/qt-5/model-view-programming.html#sharing-selections-among-views
table_view2.setSelectionModel(table_view1.selectionModel()) # <- Share the SelectionModel
# Set the layout
vbox = QVBoxLayout()
vbox.addWidget(table_view1)
vbox.addWidget(table_view2)
window.setLayout(vbox)
# Show
window.show()
# The mainloop of the application. The event handling starts from this point.
# The exec_() method has an underscore. It is because the exec is a Python keyword. And thus, exec_() was used instead.
exit_code = app.exec_()
# The sys.exit() method ensures a clean exit.
# The environment will be informed, how the application ended.
sys.exit(exit_code) | jeremiedecock/snippets | python/pyqt/pyqt5/widget_QTableView_share_selection_in_two_views.py | Python | mit | 1,608 | 0.003109 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-02-07 20:55
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('questions', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('tests', '0003_test_user'),
]
operations = [
migrations.CreateModel(
name='Answer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('answer', models.CharField(max_length=50)),
('question', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='questions.Question', verbose_name=b'Question')),
('question_owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='owner', to=settings.AUTH_USER_MODEL, verbose_name=b'Question owner')),
('tests', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tests.Test', verbose_name=b'Test')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='user', to=settings.AUTH_USER_MODEL, verbose_name=b'User')),
],
),
]
| MauricioDinki/hatefull | hatefull/apps/answers/migrations/0001_initial.py | Python | bsd-3-clause | 1,380 | 0.003623 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.