repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
---|---|---|---|---|---|---|---|---|
jkyeung/XlsxWriter
|
xlsxwriter/test/comparison/test_table19.py
|
Python
|
bsd-2-clause
| 1,269 | 0 |
###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2016, John McNamara, jmcnamara@cpan.org
#
from ..excel_comparsion_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.maxDiff = None
filename = 'table19.xlsx'
test_dir = 'xlsxwriter/test/comparison/'
self.got_filename = test_dir + '_test_' + filename
self.exp_filename = test_dir + 'xlsx_files/' + filename
self.ignore_files = []
self.ignore_elements = {}
def test_create_file(self):
"""Test the creation of a simple XlsxWriter file with tables."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
worksheet.set_column('C:F', 10.288)
worksheet.add_table('C3:F13',
{'columns': [{},
{},
|
{},
{'header': " Column4 "}]})
workbook
|
.close()
self.assertExcelEqual()
|
YAmikep/django-xmlmapping
|
xmlmapping/models.py
|
Python
|
bsd-3-clause
| 8,995 | 0.002779 |
# Django
from django.db import models
# Third-party apps
import jsonfield # http://pypi.python.org/pypi/django-jsonfield/
from lxml import etree # http://lxml.de/
# Internal
from .log import default_logger as logger
from .utils.introspection import ModelFactory
from .utils.serializers import deserialize_function
from .utils.xmlhelper import XMLHelper
from .utils import sum_dicts
class Mapping(models.Model):
"""A mapping configuration."""
data_map = jsonfield.JSONField(default='{}') # need a default value
label = models.CharField(max_length=255, unique=True) # label for reference
def __unicode__(self):
return u'%s' % (
self.label,
)
@property
def log_desc(self):
return u'<Mapping: %s>' % (self,)
def load_xml(self, xml, root_path=None):
"""Loads a piece of XML in the DB, i.e. map XML data to a Django Model.
Args:
xml: a string being the XML data to load
root_path: the root (dotted path) of the XML data. Not mandatory but needed when the XML is not the root as defined in the mapping.
e.g. If you defined a mapping for rss.channel.item
and the XML you are passing actually starts with the channel element, you must then set root_path to rss.channel
Returns:
A dict summarizing the number of objects created per element-mapping.
#The number of created Models per "element-mapping"
"""
log_desc = '%s - Loading XML' % (self.log_desc,)
try:
# Parse the XML
root = etree.fromstring(xml, parser=etree.XMLParser())
except Exception as e:
logger.error('%s => XML cannot be parsed. [KO]\n%s' % (log_desc, e))
return 0
nb_created = {k: 0 for k in self.data_map.keys()}
# For each element-mapping
for e_path, conf in self.data_map.iteritems():
nb_created[e_path] = nb_created[e_path] + self._map_elements_by_path(e_path, conf, root, root_path)
logger.info('%s => %s' % (log_desc, ' ; '.join(['%s: %s objects created' % (k, v) for (k, v) in nb_created.items()])))
return nb_created
def load_xml_chunks(self, xml_chunks, root_path):
"""Loads a collection of XML chunks being all of the same kind.
Args:
xml_chunks: a list of XML string data to load
root_path: the root (dotted path) of the XML data. Not mandatory but needed when the XML is not the root as defined in the mapping.
e.g. If you defined a mapping for rss.channel.item
and the XML you are passing actually starts with the channel element, you must then set root_path to rss.channel
Returns:
A dict summarizing the number of objects created per element-mapping.
#The number of created Models per "element-mapping"
TODO: Make it more efficient instead of a simple loop.
"""
log_desc = '%s - Loading XML chunks' % (self.log_desc,)
logger.info('%s => start' % (log_desc,))
nb_created = {}
for xml in xml_chunks:
nb_created = sum_dicts(nb_created, self.load_xml(xml, root_path))
logger.info('%s => end' % (log_desc,))
return nb_created
def _map_elements_by_path(self, path, conf, node, node_path):
"""Maps all the elements matching the path in the node with the mapping configuration.
Args:
path: the path of the elements to seek
conf: the mapping configuration
node: the node from which to seek
node_path: the path of the node
Returns:
The number of Models created in the DB for all the found elements.
"""
# Get the configuration
get_id = conf.get('get_id', None)
models = conf.get('models', None)
if models is None:
logger.error('%s => No models found in the configuration. [KO]\nconfiguration=%s' % (log_desc, conf))
return 0
log_desc = '%s - Mapping all the elements matching path=%s to %s Models' % (self.log_desc, path, len(models))
# Get all the matching elements
elems = XMLHelper.get_elements(path, node, node_path)
# Log if no elements were found.
if not elems:
logger.warning('%s => No elements found. node_path=%s' % (log_desc, node_path))
return 0
nb_created = 0
for elem in elems:
nb_created = nb_created + self._map_element(elem, models, get_id)
nb_elems = len(elems)
nb_targeted = nb_elems * len(models)
logger.info('%s => Found: %s, Targeted Object
|
s: %s, Created Objects: %s %s' % (
log_desc,
nb_elems,
nb_targeted,
nb_created,
(nb_targeted == nb_created and ['[OK]'] or ['=> numbers different [KO]'])[0]
)
)
return nb_created
def _map_element(self, element,
|
models, get_id=None):
"""Maps an element to several models.
Args:
element: an XML element
models: the models to mapped
get_id: the function to use to calculate the ID of the element to identify it amongst the other.
Returns:
The number of Models created in the DB for the passed element.
"""
elem_id = '(id:%s) ' % (self._resolve_get_id(get_id)(element),)
status = {k: '[KO]' for k in models.keys()}
nb_created = 0
for app_model, fields in models.iteritems():
try:
ins = self._map_to_model(element, app_model, fields)
status[app_model] = 'pk=%s' % (ins.pk)
nb_created = nb_created + 1
logger.info('%s - Mapping the element %sto the Model %s with fields %s => object created, pk=%s [0K]' % (
self.log_desc,
elem_id,
app_model,
fields,
ins.pk,
)
)
except Exception as err:
logger.error('%s - Mapping the element %sto the Model %s with fields %s => Cannot be mapped. [K0]\n%s' % (
self.log_desc,
elem_id,
app_model,
fields,
err,
)
)
logger.info('%s - Element %smapped to %s Models => %s' % (
self.log_desc,
elem_id,
len(models),
' ; '.join(['%s: %s' % (k, v) for (k, v) in status.items()]),
)
)
return nb_created
def _map_to_model(self, element, app_model, fields):
"""Maps an element to a Model.
Args:
element: the XML element to map
app_model: the model to map defined by: app_label.model_name
fields: the fields mapping
Returns:
The instance of the created Model.
"""
ins = ModelFactory.create(app_model)
self._map_to_fields(element, ins, fields)
ins.save()
return ins
def _map_to_fields(self, element, ins, fields):
"""Maps an element to the fields.
Args:
element: the XML element to map
ins: the instance of the created Model
fields: the fields mapping
"""
for field, configuration in fields.items():
if isinstance(configuration, basestring):
setattr(ins, field, XMLHelper.get_text_unescape(element, configuration))
elif isinstance(configuration, list):
values = (XMLHelper.get_text_unescape(element, v) for v in configuration)
setattr(ins, field, ' '.join(values))
elif isinstance(configuration, dict):
pas
|
JackRamey/LaserPony
|
dbutils.py
|
Python
|
mit
| 1,980 | 0.007071 |
"""Usage: dbutils.py [-dfh]
Options:
-d --dropall Deletes all collections in the database. Use this very wisely.
-f --force Forces all questions to 'yes'
-h --help show this
"""
import sys
from docopt import docopt
from laserpony import app
from laserpony.util import db
##UTILITY FUNCTIONS
#snagged this from http://code.activestate.com/recipes/577058/
def query_yes_no(question, default="yes"):
"""Ask a yes/no question via raw_input() and return their answer.
"question" is a string that is presented to the user.
"default" is the presumed answer if the user just hits <Enter>.
It must be "yes" (the default), "no" or None (meaning
an answer is required of the user).
The "answer" return value is one of "yes" or "no".
"""
valid = {"yes":True, "y":True, "ye":True,
"no":False, "n":False}
if default == None:
prompt = " [y/n] "
elif default == "yes":
prompt = " [Y/n] "
elif default == "no":
prompt = " [y/N]
|
"
else:
raise ValueError("invalid default answer: '%s'" % default)
while True:
sys.stdout.write(question + prompt)
choice = raw_input().lower()
if default is not None and choice == '':
return valid[default]
elif choice in valid:
return valid[choice]
else:
sys.stdout.write("Please respond with 'yes' or 'no' "\
"(or 'y'
|
or 'n').\n")
# Switch over to the virtual environment version of python
arguments = docopt(__doc__, argv=sys.argv[1:], version='0.1')
delete_db_message = "Are you absolutely sure you want to delete the entire database?"
if arguments['--dropall']:
if arguments['--force']:
db.connection.drop_database(app.config['MONGODB_SETTINGS']['DB'])
else:
if query_yes_no(delete_db_message, default="no"):
db.connection.drop_database(app.config['MONGODB_SETTINGS']['DB'])
|
bally12345/enigma2
|
lib/python/Components/DiskInfo.py
|
Python
|
gpl-2.0
| 1,054 | 0.032258 |
from GUIComponent import GUIComponent
from Variab
|
leText import VariableText
from os import statvfs
from enigma import eLabel
# TODO: Harddisk.py has similiar functions, but only similiar.
# fix this to use same code
class DiskInfo(VariableText, GUIComponent):
FREE = 0
USED = 1
SIZE = 2
def __init__(self, path, type, update = True):
GUIComponent.__init__(self)
VariableText.__init__(self)
self.type = type
self.path = path
if update:
self.upda
|
te()
def update(self):
try:
stat = statvfs(self.path)
except OSError:
return -1
if self.type == self.FREE:
try:
percent = '(' + str((100 * stat.f_bavail) // stat.f_blocks) + '%)'
free = stat.f_bfree * stat.f_bsize
if free < 10000000:
free = _("%d Kb") % (free >> 10)
elif free < 10000000000:
free = _("%d Mb") % (free >> 20)
else:
free = _("%d Gb") % (free >> 30)
self.setText(" ".join((free, percent, _("free diskspace"))))
except:
# occurs when f_blocks is 0 or a similar error
self.setText("-?-")
GUI_WIDGET = eLabel
|
YudingZhou/Yo-Fatty
|
ITA/graphic/chapter22_elementary_test.py
|
Python
|
gpl-2.0
| 1,375 | 0 |
'''
Copy right (c)
|
zhouyuding1990@gmail.com
'''
import unittest
from chapter22_elementary import Vertex
from chapter22_elementary import breath_first_search_input_adjacency_list
def printX(x):
print x
class MyTestCase(unittest.TestCase
|
):
def test_something(self):
self.assertEqual(True, False)
def test_run_BFS_on_adjacency_list(self):
''' 4X4 vertex'''
v1 = Vertex(1)
v3 = Vertex(3)
v4 = Vertex(4)
v6 = Vertex(6)
v7 = Vertex(7)
v10 = Vertex(10)
v14 = Vertex(14)
v15 = Vertex(15)
v1.ADJ_ = [v4, v6]
v3.ADJ_ = [v6, v7]
v4.ADJ_ = [v1]
v6.ADJ_ = [v1, v3, v7, v10]
v7.ADJ_ = [v3, v6, v10]
v10.ADJ_ = [v6, v7, v14, v15]
v14.ADJ_ = [v10, v15]
v15.ADJ_ = [v14]
print """
input graph
0 1 2 3
+---+---+---+---+
0 | | * | | * |
+---+---+---+---+
1 | * | | * | * |
+---+---+---+---+
2 | | | * | |
+---+---+---+---+
3 | | | * | * |
+---+---+---+---+
"""
G = [v1, v3, v4, v6, v7, v10, v14, v15]
print "start with v3"
breath_first_search_input_adjacency_list(G, v3, lambda u: printX(u))
if __name__ == '__main__':
unittest.main()
|
sputnick-dev/weboob
|
modules/citelis/module.py
|
Python
|
agpl-3.0
| 2,036 | 0 |
# -*- coding: utf-8 -*-
# Copyright(C) 2013 Laurent Bachelier
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along wit
|
h weboob. If not, see <http://www.gnu.org/licenses/>.
from weboob.tools.backend import Module, BackendConfig
from weboob.tools.value import ValueBackendPassword
from weboob.capabilities.bank import CapBank, AccountNotFound
from .browser import CitelisBrowser
__all__ = ['CitelisModule']
class CitelisModule(Module,
|
CapBank):
NAME = 'citelis'
DESCRIPTION = u'Citélis'
MAINTAINER = u'Laurent Bachelier'
EMAIL = 'laurent@bachelier.name'
LICENSE = 'AGPLv3+'
VERSION = '1.1'
BROWSER = CitelisBrowser
CONFIG = BackendConfig(
ValueBackendPassword('merchant_id', label='Merchant ID', masked=False),
ValueBackendPassword('login', label='Account ID', masked=False),
ValueBackendPassword('password', label='Password'))
def create_default_browser(self):
return self.create_browser(self.config['merchant_id'].get(),
self.config['login'].get(),
self.config['password'].get())
def iter_accounts(self):
return self.browser.get_accounts_list()
def get_account(self, _id):
for account in self.iter_accounts():
if account.id == _id:
return account
raise AccountNotFound()
def iter_history(self, account):
return self.browser.iter_history(account)
|
xingjian-f/Leetcode-solution
|
190. Reverse Bits.py
|
Python
|
mit
| 231 | 0.030303 |
class Solution(object):
d
|
ef reverseBits(self, n):
"""
:type n: int
:rtype: int
"""
ret = 0
for i in range(32):
ret += (n%2) * 2**(31-i)
n /= 2
ret
|
urn ret
|
rdo-infra/ci-config
|
ci-scripts/infra-setup/roles/rrcockpit/files/telegraf_py3/influxdb_utils.py
|
Python
|
apache-2.0
| 487 | 0 |
import time
from datetime import datetime
def format_ts_from_float(ts):
return int(ts) * 1000000000
def format_ts_from_date(ts):
return
|
format_ts_from_float(time.mktime(ts.timetuple()))
def format_ts_from_str(ts, pattern='%Y-%m-%d %H:%M:%S'):
return format_ts_from_date(datetime.strptime(ts, pattern))
def format_ts_from_last_modified(ts, pattern='%a, %d %b %Y %H:%M:%S %Z'):
ts = datetime.strptime(ts, pattern)
retur
|
n int(time.mktime(ts.timetuple()) * 1000)
|
maximilianofaccone/puppy-siberian
|
usr/share/bleachbit/CleanerML.py
|
Python
|
gpl-3.0
| 8,532 | 0.00082 |
# vim: ts=4:sw=4:expandtab
# BleachBit
# Copyright (C) 2014 Andrew Ziem
# http://bleachbit.sourceforge.net
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Create cleaners from CleanerML (markup language)
"""
import os
import sys
import traceback
import xml.dom.minidom
import Cleaner
import Common
from Action import ActionProvider
from Common import _
from General import boolstr_to_bool, getText
from FileUtilities import listdir
class CleanerML:
"""Create a cleaner from CleanerML"""
def __init__(self, pathname, xlate_cb=None):
"""Create cleaner from XML in pathname.
If xlate_cb is set, use it as a callback for each
translate-able string.
"""
self.action = None
self.cleaner = Cleaner.Cleaner()
self.option_id = None
self.option_name = None
self.option_description = None
self.option_warning = None
self.xlate_cb = xlate_cb
if None == self.xlate_cb:
self.xlate_cb = lambda x, y = None: None # do nothing
dom = xml.dom.minidom.parse(pathname)
self.handle_cleaner(dom.getElementsByTagName('cleaner')[0])
def get_cleaner(self):
"""Return the created cleaner"""
return self.cleaner
def os_match(self, os_str):
"""Return boolean whether operating system matches"""
# If blank or if in .pot-creation-mode, return true.
if len(os_str) == 0 or None != self.xlate_cb:
return True
# Otherwise, check platform.
if os_str == 'linux' and sys.platform.startswith('linux'):
return True
if os_str == 'windows' and sys.platform == 'win32':
return True
return False
def handle_cleaner(self, cleaner):
"""<cleaner> element"""
if not self.os_match(cleaner.getAttribute('os')):
return
self.cleaner.id = cleaner.getAttribute('id')
self.handle_cleaner_label(cleaner.getElementsByTagName('label')[0])
description = cleaner.getElementsByTagName('description')
if description and description[0].parentNode == cleaner:
self.handle_cleaner_description(description[0])
for option in cleaner.getElementsByTagName('option'):
try:
self.handle_cleaner_option(option)
except:
print str(sys.exc_info()[1])
print option.toxml()
self.handle_cleaner_running(cleaner.getElementsByTagName('running'))
def handle_cleaner_label(self, label):
"""<label> element under <cleaner>"""
self.cleaner.name = _(getText(label.childNodes))
translate = label.getAttribute('translate')
if translate and boolstr_to_bool(translate):
self.xlate_cb(self.cleaner.name)
def handle_cleaner_description(self, description):
"""<description> element under <cleaner>"""
self.cleaner.description = _(getText(description.childNodes))
self.xlate_cb(self.cleaner.description)
def handle_cleaner_running(self, running_elements):
"""<running> element under <cleaner>"""
# example: <running type="command">opera</running>
for running in running_elements:
detection_type = running.getAttribute('type')
value = getText(running.childNodes)
self.cleaner.add_running(detection_type, value)
def handle_cleaner_option(self, option):
"""<option> element"""
self.option_id = option.getAttribute('id')
self.option_description = None
self.option_name = None
self.handle_cleaner_option_label(
option.getElementsByTagName('label')[0])
description = option.getElementsByTagName('description')
self.handle_cleaner_option_description(description[0])
warning = option.getElementsByTagName('warning')
if warning:
self.handle_cleaner_option_warning(warning[0])
if self.option_warning:
self.cleaner.set_warning(self.option_id, self.option_warning)
for action in option.getElementsByTagName('action'):
self.handle_cleaner_option_action(action)
self.cleaner.add_option(
self.option_id, self.option_name, self.option_description)
def handle_cleaner_option_label(self, label):
"""<label> element under <option>"""
self.option_name = _(getText(label.childNodes))
translate = label.getAttribute('translate')
translators = label.getAttribute('translators')
if not translate or boolstr_to_bool(translate):
self.xlate_cb(self.option_name, translators)
def handle_cleaner_option_description(self, description):
"""<description> element under <option>"""
self.option_description = _(getText(description.childNodes))
self.xlate_cb(self.option_description)
translators = description.getAttribute('translators')
self.xlate_cb(self.option_description, translators)
def handle_cleaner_option_warning(self, warning):
"""<warning> element under <option>"""
self.option_warning = _(getText(warning.childNodes))
self.xlate_cb(self.option_warning)
def handle_cleaner_option_action(self, action_node):
"""<action> element under <option>"""
command = action_node.getAttribute('co
|
mmand')
provider = None
for actionplugin in ActionProvider.plugins:
if actionplugin.action_key == command:
|
provider = actionplugin(action_node)
if None == provider:
raise RuntimeError("Invalid command '%s'" % command)
self.cleaner.add_action(self.option_id, provider)
def list_cleanerml_files(local_only=False):
"""List CleanerML files"""
cleanerdirs = (Common.local_cleaners_dir,
Common.personal_cleaners_dir)
if not local_only and Common.system_cleaners_dir:
cleanerdirs += (Common.system_cleaners_dir, )
for pathname in listdir(cleanerdirs):
if not pathname.lower().endswith('.xml'):
continue
import stat
st = os.stat(pathname)
if sys.platform != 'win32' and stat.S_IMODE(st[stat.ST_MODE]) & 2:
print "warning: ignoring cleaner '%s' because it is world writable" % pathname
continue
yield pathname
def load_cleaners():
"""Scan for CleanerML and load them"""
for pathname in list_cleanerml_files():
try:
xmlcleaner = CleanerML(pathname)
except:
print "Error reading file '%s'" % pathname
traceback.print_exc()
else:
cleaner = xmlcleaner.get_cleaner()
if cleaner.is_usable():
Cleaner.backends[cleaner.id] = cleaner
else:
print "debug: '%s' is not usable" % pathname
def pot_fragment(msgid, pathname, translators=None):
"""Create a string fragment for generating .pot files"""
if translators:
translators = "#. %s\n" % translators
else:
translators = ""
ret = '''%s#: %s
msgid "%s"
msgstr ""
''' % (translators, pathname, msgid)
return ret
def create_pot():
"""Create a .pot for translation using gettext"""
f = open('../po/cleanerml.pot', 'w')
for pathname in listdir('../cleaners'):
if not pathname.lower().endswith(".xml"):
continue
strings = []
try:
CleanerML(pathname,
lambda newstr, translators=None:
strings.append([newstr, translators])
|
n6g7/scrapy-itemagic
|
itemagic/magic.py
|
Python
|
mit
| 1,649 | 0.032141 |
from extractors import XPathExtractor
from parser import Parser
from rules import ConstRule, Map, MapRule, SubPathRule, UrlRule, XPathRule
def is_list(obj):
return isinstance(obj, (list, tuple))
def is_str(obj):
return isinstance(obj, (str, unicode))
def parse_xpath_rule(line):
l = len(line)
if l == 2:
# Basic XPath
if is_str(line[1]):
return XPathRule(line[0], line[1])
# SubPath
elif is_list(line[1]):
sub_rules = [parse_xpath_rule(sub_rule) for sub_rule in line[1]]
return SubPathRule(line[0], *sub_rules)
elif l == 3:
# Keyword XPath
if isinstance(line[2], dict):
return XPathRule(line[0], line[1], **line[2])
# MapRule
elif is_list(line[2]):
maps = []
for map_line in line[2]:
len_map = len(map_line)
if len_map == 3:
maps.append(Map(map_line[0], map_line[1], XPathExtractor(map_line[2])))
elif len_map == 4:
join = map_line[3].pop('join', None)
maps.append(Map(map_line[0], map_line[1], XPathExtractor(map_line[2], **map_line[3]), **{'join': join}))
extractor = XPathExtractor(line[1]) if is_str(line[1]) else line[1]
return MapRule(line[0], extractor, *maps)
print 'Unknown rule : %r' % (line,)
def itemagic(const=None, url=N
|
one, xpath=None, *args):
rules = []
# Build const rules
if is_list(const):
for line in const:
rules.append(ConstRule(line[0], line[1]))
elif isinstance(const, dict):
for field in const:
rules.append(ConstRule(field, const[field]))
# Build url rule
if is_str(url):
rules.append(UrlRule(url))
# Build xpath rules
if
|
is_list(xpath):
for line in xpath:
rules.append(parse_xpath_rule(line))
return Parser(*rules)
|
claudiodriussi/DynaQ
|
dynaq/workspace.py
|
Python
|
lgpl-3.0
| 5,345 | 0.002058 |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
#
# workspace.py
#
# Copyright (c) 2014
# Author: Claudio Driussi <claudio.driussi@gmail.com>
#
from sqlalchemy.ext.declarative import declarative_base
from .db import *
class WorkSpace(object):
"""Encapsulate an whole SQLAlchemy orm from an DynaQ db definition object"""
def __init__(self, db, engine):
"""init the workspace
:param db: DynaQ db definition object
:param engine: SQLAlchemy engine string
:return: None
"""
self.db = db
self.engine = engine
self.metadata = sa.MetaData()
self.Base = declarative_base(self.engine, self.metadata)
self.tables = {}
def generate_orm(self, prefix='', pref_tabels={}, defaults={}):
"""Generate the SQLAlche
|
my orm objects
the objects are stored in self.tables dictionary
:param prefix: an optional prefix for table names ie: if the table
name is "user" and pr
|
efix is "data_" the name become "data_user"
:param pref_tabels: an optional dict for prefix bypass names for
singles names ie: if pref_tabels is {'zip_codes': ''} the name of
zip table is "zip_codes" even if prefix is "data_"
:param defaults: function for handle default values (not handled yet)
:return: an self.sa_obj() objet for convenient handle of orm classes
"""
# build objects
self.tables = {}
for table in self.db.tables.values():
self.tables[table.alias] = \
type(table.name.capitalize(),(self.Base,),
self._set_table(table, prefix, pref_tabels, defaults))
# build relations
for alias in self.tables:
self._set_retations(alias)
return self.sa_obj()
def _set_table(self, table, prefix='', pref_tabels={}, defaults={}):
"""Create a SQLAlchemy class object
This private method called from self.generate_orm method is the core
for SQLAlchemy objects creation, all supported features are implemented
here.
:param table: the DynaQ table for class generation
:param prefix: same of generate_orm
:param pref_tabels: same of generate_orm
:param defaults: same of generate_orm
:return: the class object for the table
"""
def get_name(tname):
s = tname.replace('_'+USRFLD_KEY,'')
pref = pref_tabels[s] if s in pref_tabels else prefix
return pref + tname
table_data = {}
table_data['__tablename__'] = get_name(table.name)
table_data['__dqt__'] = table
for f in table.fields:
foreignkey = None
if isinstance(f.type, Table):
foreignkey = "%s.%s" % (get_name(f.type.name), f.type.key.name)
db_type = f.get_type()
sa_type = db_type.sa_type
if db_type.length and sa_type in [sa.Numeric, sa.Float]:
sa_type = sa_type(db_type.length, f.get('decimals'))
if db_type.length and sa_type in [sa.String, sa.String, sa.CHAR, sa.LargeBinary, sa.Text,]:
sa_type = sa_type(db_type.length)
if foreignkey:
c = sa.Column(sa_type, sa.ForeignKey(foreignkey))
else:
c = sa.Column(sa_type, primary_key=f == table.key)
c.__dqf__ = f
default = defaults.get(f.get('default'), None)
if default:
c.ColumnDefault(default)
table_data[f.name] = c
ii = []
for i in table.indexes:
if i.name == 'primary':
continue
ii.append(sa.Index('idx_%s_%s' % (table.alias, i.name), *i.fields))
# if needed add more table args
if ii:
table_data['__table_args__'] = tuple(ii)
return table_data
def _set_retations(self, alias):
"""Create the orm relationships.
This private method called from self.generate_orm method generate the
one to many relations for each related field of the table pointed by
alias parameter. It handle "cascade referential integrity" if the
related field has the property "child == True"
:param alias: alias name of the table
:return: None
"""
for field in self.db.tables[alias].fields:
if field.get('child'):
parent = self.tables[field.type.alias]
child = self.tables[alias]
setattr(parent, alias,
sa.orm.relationship(child,
backref=parent.__tablename__,
cascade="all, delete, delete-orphan"))
def sa_obj(self):
"""Build a convenient object for accessing to SqlAlchemy ORM objects
Example:
ws = dq.WorkSpace(db, engine)
ws.generate_orm()
o = ws.sa_obj()
now if in your definition is a table called users, you can do:
user = o.users()
:return: the container object
"""
t = {}
for k,v in self.tables.items():
t[k] = v
return type('WSO', (object,), t)
def session(self):
"""Return a session instance for the workspace"""
return sa.orm.sessionmaker(bind=self.engine)()
|
spulec/moto
|
moto/transcribe/responses.py
|
Python
|
apache-2.0
| 7,998 | 0.0005 |
import json
from moto.core.responses import BaseResponse
from moto.core.utils import amzn_request_id
from .models import transcribe_backends
class TranscribeResponse(BaseResponse):
@property
def transcribe_backend(self):
return transcribe_backends[self.region]
@property
def request_params(self):
try:
return json.loads(self.body)
except ValueError:
return {}
@amzn_request_id
def start_transcription_job(self):
name = self._get_param("TranscriptionJobName")
response = self.transcribe_backend.start_transcription_job(
transcription_job_name=name,
language_code=self._get_param("LanguageCode"),
media_sample_rate_hertz=self._get_param("MediaSampleRateHertz"),
media_format=self._get_param("MediaFormat"),
media=self._get_param("Media"),
output_bucket_name=self._get_param("OutputBucketName"),
output_key=self._get_param("OutputKey"),
output_encryption_kms_key_id=self._get_param("OutputEncryptionKMSKeyId"),
settings=self._get_param("Settings"),
model_settings=self._get_param("ModelSettings"),
job_execution_settings=self._get_param("JobExecutionSettings"),
content_redaction=self._get_param("ContentRedaction"),
identify_language=self._get_param("IdentifyLanguage"),
language_options=self._get_param("LanguageOptions"),
)
return json.dumps(response)
@amzn_request_id
def start_medical_transcription_job(self):
name = self._get_param("MedicalTranscriptionJobName")
response = self.transcribe_backend.start_medical_transcription_job(
medical_transcription_job_name=name,
language_code=self._get_param("LanguageCode"),
media_sample_rate_hertz=self._get_param("MediaSampleRateHertz"),
media_format=self._get_param("MediaFormat"),
media=self._get_param("Media"),
output_bucket_name=self._get_param("OutputBucketName"),
output_encryption_kms_key_id=self._get_param("OutputEncryptionKMSKeyId"),
settings=self._get_param("Settings"),
specialty=self._get_param("Specialty"),
type=self._get_param("Type"),
)
return json.dumps(response)
@amzn_request_id
def list_transcription_jobs(self):
state_equals = self._get_param("Status")
job_name_contains = self._get_param("JobNameContains")
next_token = sel
|
f._get_param("NextToken")
max_results = self._get_param("MaxResults")
response = se
|
lf.transcribe_backend.list_transcription_jobs(
state_equals=state_equals,
job_name_contains=job_name_contains,
next_token=next_token,
max_results=max_results,
)
return json.dumps(response)
@amzn_request_id
def list_medical_transcription_jobs(self):
status = self._get_param("Status")
job_name_contains = self._get_param("JobNameContains")
next_token = self._get_param("NextToken")
max_results = self._get_param("MaxResults")
response = self.transcribe_backend.list_medical_transcription_jobs(
status=status,
job_name_contains=job_name_contains,
next_token=next_token,
max_results=max_results,
)
return json.dumps(response)
@amzn_request_id
def get_transcription_job(self):
transcription_job_name = self._get_param("TranscriptionJobName")
response = self.transcribe_backend.get_transcription_job(
transcription_job_name=transcription_job_name
)
return json.dumps(response)
@amzn_request_id
def get_medical_transcription_job(self):
medical_transcription_job_name = self._get_param("MedicalTranscriptionJobName")
response = self.transcribe_backend.get_medical_transcription_job(
medical_transcription_job_name=medical_transcription_job_name
)
return json.dumps(response)
@amzn_request_id
def delete_transcription_job(self):
transcription_job_name = self._get_param("TranscriptionJobName")
response = self.transcribe_backend.delete_transcription_job(
transcription_job_name=transcription_job_name
)
return json.dumps(response)
@amzn_request_id
def delete_medical_transcription_job(self):
medical_transcription_job_name = self._get_param("MedicalTranscriptionJobName")
response = self.transcribe_backend.delete_medical_transcription_job(
medical_transcription_job_name=medical_transcription_job_name
)
return json.dumps(response)
@amzn_request_id
def create_vocabulary(self):
vocabulary_name = self._get_param("VocabularyName")
language_code = self._get_param("LanguageCode")
phrases = self._get_param("Phrases")
vocabulary_file_uri = self._get_param("VocabularyFileUri")
response = self.transcribe_backend.create_vocabulary(
vocabulary_name=vocabulary_name,
language_code=language_code,
phrases=phrases,
vocabulary_file_uri=vocabulary_file_uri,
)
return json.dumps(response)
@amzn_request_id
def create_medical_vocabulary(self):
vocabulary_name = self._get_param("VocabularyName")
language_code = self._get_param("LanguageCode")
vocabulary_file_uri = self._get_param("VocabularyFileUri")
response = self.transcribe_backend.create_medical_vocabulary(
vocabulary_name=vocabulary_name,
language_code=language_code,
vocabulary_file_uri=vocabulary_file_uri,
)
return json.dumps(response)
@amzn_request_id
def get_vocabulary(self):
vocabulary_name = self._get_param("VocabularyName")
response = self.transcribe_backend.get_vocabulary(
vocabulary_name=vocabulary_name
)
return json.dumps(response)
@amzn_request_id
def get_medical_vocabulary(self):
vocabulary_name = self._get_param("VocabularyName")
response = self.transcribe_backend.get_medical_vocabulary(
vocabulary_name=vocabulary_name
)
return json.dumps(response)
@amzn_request_id
def list_vocabularies(self):
state_equals = self._get_param("StateEquals")
name_contains = self._get_param("NameContains")
next_token = self._get_param("NextToken")
max_results = self._get_param("MaxResults")
response = self.transcribe_backend.list_vocabularies(
state_equals=state_equals,
name_contains=name_contains,
next_token=next_token,
max_results=max_results,
)
return json.dumps(response)
@amzn_request_id
def list_medical_vocabularies(self):
state_equals = self._get_param("StateEquals")
name_contains = self._get_param("NameContains")
next_token = self._get_param("NextToken")
max_results = self._get_param("MaxResults")
response = self.transcribe_backend.list_medical_vocabularies(
state_equals=state_equals,
name_contains=name_contains,
next_token=next_token,
max_results=max_results,
)
return json.dumps(response)
@amzn_request_id
def delete_vocabulary(self):
vocabulary_name = self._get_param("VocabularyName")
response = self.transcribe_backend.delete_vocabulary(
vocabulary_name=vocabulary_name
)
return json.dumps(response)
@amzn_request_id
def delete_medical_vocabulary(self):
vocabulary_name = self._get_param("VocabularyName")
response = self.transcribe_backend.delete_medical_vocabulary(
vocabulary_name=vocabulary_name
)
return json.dumps(response)
|
Alberto-Beralix/Beralix
|
i386-squashfs-root/usr/share/hplip/base/mdns.py
|
Python
|
gpl-3.0
| 10,078 | 0.003671 |
# -*- coding: utf-8 -*-
#
# (c) Copyright 2003-2007 Hewlett-Packard Development Company, L.P.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# Author: Don Welch
#
# RFC 1035
# Std Lib
import sys
import time
import socket
import select
import struct
import random
import re
import cStringIO
# Local
from g import *
import utils
MAX_ANSWERS_PER_PACKET = 24
QTYPE_A = 1
QTYPE_TXT = 16
QTYPE_SRV = 33
QTYPE_AAAA = 28
QTYPE_PTR = 12
QCLASS_IN = 1
def read_utf8(offset, data, l):
return offset+l, data[offset:offset+l].decode('utf-8')
def read_data(offset, data, l):
return offset+l, data[offset:offset+l]
def read_data_unpack(offset, data, fmt):
l = struct.calcsize(fmt)
return offset+l, struct.unpack(fmt, data[offset:offset+l])
def read_name(offset, data):
result = ''
off = offset
next = -1
first = off
while True:
l = ord(data[off])
off += 1
if l == 0:
break
t = l & 0xC0
if t == 0x00:
off, utf8 = read_utf8(off, data, l)
result = ''.join([result, utf8, '.'])
elif t == 0xC0:
if next < 0:
next = off + 1
off = ((l & 0x3F) << 8) | ord(data[off])
if off >= first:
log.error("Bad domain name (circular) at 0x%04x" % off)
break
first = off
else:
log.error("Bad domain name at 0x%04x" % off)
break
if next >= 0:
offset = next
else:
offset = off
return offset, result
def write_name(packet, name):
for p in name.split('.'):
utf8_string = p.encode('utf-8')
packet.write(struct.pack('!B', len(utf8_string)))
packet.write(utf8_string)
def create_outgoing_packets(answers):
index = 0
num_questions = 1
first_packet = True
packets = []
packet = cStringIO.StringIO()
answer_record = cStringIO.StringIO()
while True:
packet.seek(0)
packet.truncate()
num_answers = len(answers[index:index+MAX_ANSWERS_PER_PACKET])
if num_answers == 0 and num_questions == 0:
break
flags = 0x0200 # truncated
if len(answers) - index <= MAX_ANSWERS_PER_PACKET:
flags = 0x0000 # not truncated
# ID/FLAGS/QDCOUNT/ANCOUNT/NSCOUNT/ARCOUNT
packet.write(struct.pack("!HHHHHH", 0x0000, flags, num_questions, num_answers, 0x0000, 0x0000))
if num_questions:
# QNAME
write_name(packet, "_pdl-datastream._tcp.local") # QNAME
packet.write(struct.pack("!B", 0x00))
# QTYPE/QCLASS
packet.write(struct.pack("!HH", QTYPE_
|
PTR, QCLASS_IN))
first_record = True
for d in answers[index:index+MAX_ANSWERS_PER_PACKET]:
answer_record.seek(0)
answer_recor
|
d.truncate()
# NAME
if not first_packet and first_record:
first_record = False
write_name(answer_record, "_pdl-datastream._tcp.local")
answer_record.write(struct.pack("!B", 0x00))
else:
answer_record.write(struct.pack("!H", 0xc00c)) # Pointer
# TYPE/CLASS
answer_record.write(struct.pack("!HH", QTYPE_PTR, QCLASS_IN))
# TTL
answer_record.write(struct.pack("!I", 0xffff))
rdlength_pos = answer_record.tell()
# RDLENGTH
answer_record.write(struct.pack("!H", 0x0000)) # (adj later)
# RDATA
write_name(answer_record, d)
answer_record.write(struct.pack("!H", 0xc00c)) # Ptr
# RDLENGTH
rdlength = answer_record.tell() - rdlength_pos - 2
answer_record.seek(rdlength_pos)
answer_record.write(struct.pack("!H", rdlength))
answer_record.seek(0)
packet.write(answer_record.read())
packets.append(packet.getvalue())
index += 20
if first_packet:
num_questions = 0
first_packet = False
return packets
def detectNetworkDevices(ttl=4, timeout=10):
mcast_addr, mcast_port ='224.0.0.251', 5353
found_devices = {}
answers = []
try:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
x = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
x.connect(('1.2.3.4', 56))
intf = x.getsockname()[0]
x.close()
s.setblocking(0)
ttl = struct.pack('B', ttl)
except socket.error:
log.error("Network error")
return {}
try:
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
except (AttributeError, socket.error):
pass
try:
s.setsockopt(socket.SOL_IP, socket.IP_MULTICAST_TTL, ttl)
s.setsockopt(socket.SOL_IP, socket.IP_MULTICAST_IF, socket.inet_aton(intf) + socket.inet_aton('0.0.0.0'))
s.setsockopt(socket.SOL_IP, socket.IP_MULTICAST_LOOP ,1)
except Exception, e:
log.error("Unable to setup multicast socket for mDNS: %s" % e)
return {}
now = time.time()
next = now
last = now + timeout
delay = 1
while True:
now = time.time()
if now > last:
break
if now >= next:
try:
for p in create_outgoing_packets(answers):
log.debug("Outgoing: (%d)" % len(p))
log.log_data(p, width=16)
s.sendto(p, 0, (mcast_addr, mcast_port))
except socket.error, e:
log.error("Unable to send broadcast DNS packet: %s" % e)
next += delay
delay *= 2
update_spinner()
r, w, e = select.select([s], [], [s], 0.5)
if not r:
continue
data, addr = s.recvfrom(16384)
if data:
update_spinner()
y = {'num_devices' : 1, 'num_ports': 1, 'product_id' : '', 'mac': '',
'status_code': 0, 'device2': '0', 'device3': '0', 'note': ''}
log.debug("Incoming: (%d)" % len(data))
log.log_data(data, width=16)
offset = 0
offset, (id, flags, num_questions, num_answers, num_authorities, num_additionals) = \
read_data_unpack(offset, data, "!HHHHHH")
log.debug("Response: ID=%d FLAGS=0x%x Q=%d A=%d AUTH=%d ADD=%d" %
(id, flags, num_questions, num_answers, num_authorities, num_additionals))
for question in range(num_questions):
update_spinner()
offset, name = read_name(offset, data)
offset, (typ, cls) = read_data_unpack(offset, data, "!HH")
log.debug("Q: %s TYPE=%d CLASS=%d" % (name, typ, cls))
fmt = '!HHiH'
for record in range(num_answers + num_authorities + num_additionals):
update_spinner()
offset, name = read_name(offset, data)
offset, info = read_data_unpack(offset, data, "!HHiH")
if info[0] == QTYPE_A: # ipv4 address
offset, result = read_data(offset, data, 4)
ip = '.'.join([str(ord(x)) for x in result])
log.debug("A: %s" % ip)
y['ip'] = ip
elif info[0] == QTYPE_PTR: # PTR
offset, name = read_name(offset, data)
log.d
|
MagicForest/Python
|
src/training/Core2/Chapter14ExecutionEnvironment/hello.py
|
Python
|
apache-2.0
| 88 | 0.022727 |
def say_hello():
print 'Hello'
if
|
__name__ == '__main__
|
':
say_hello()
|
SamaraCardoso27/eMakeup
|
backend/appengine/routes/home.py
|
Python
|
mit
| 492 | 0.010163 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from config.template_middleware import TemplateResponse
from gaecookie.deco
|
rator import no_csrf
from gaepermission.decorator import login_not_required
@login_not_required
@no_csrf
def index():
return TemplateResponse()
def insertStudent():
return TemplateResponse(template_path="/student/insert_student.html")
def
|
searchStudent():
return TemplateResponse(template_path="/student/search_student.html")
|
adieu/authentic2
|
authentic2/auth2_auth/auth2_ssl/views.py
|
Python
|
agpl-3.0
| 462 | 0.008658 |
import functools
import registration.views
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.conf import
|
settings
import forms
def register(request):
'''Registration page for SSL auth without CA'''
next = request.GET.get(REDIRECT_FIELD_NAME,
|
settings.LOGIN_REDIRECT_URL)
return registration.views.register(request, success_url=next,
form_class=functools.partial(forms.RegistrationForm,
request=request))
|
cgvarela/pyserverlib
|
kontalklib/logging.py
|
Python
|
gpl-3.0
| 1,793 | 0.003904 |
# -*- coding: utf-8 -*-
'''Twisted logging to Python loggin bridge.'''
'''
Kontalk Pyserver
Copyright (C) 2011 Kontalk Devteam <devteam@kontalk.org>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
from twisted.python import log
LEVEL_DEBUG = 1
LEVEL_INFO = 1 << 1
LEVEL_WARN = 1 << 2
LEVEL_ERROR = 1 << 3
# all levels
LEVEL_ALL = LEVEL_DEBUG | LEVEL_INFO | LEVEL_WARN | LEVEL_ERROR
level = 0
def init(cfg):
'''Initializes loggi
|
ng system.'''
global level
l = cfg['server']['log.levels']
if 'ALL' in l:
level = LEVEL_ALL
else:
if 'DEBUG' in l:
level |= LEVEL_DEBUG
if 'INFO' in l:
level |= LEVEL_INFO
if 'WARN' in l:
l
|
evel |= LEVEL_WARN
if 'ERROR' in l:
level |= LEVEL_ERROR
def debug(*args, **kwargs):
global level
if level & LEVEL_DEBUG:
log.msg(*args, **kwargs)
def info(*args, **kwargs):
global level
if level & LEVEL_INFO:
log.msg(*args, **kwargs)
def warn(*args, **kwargs):
global level
if level & LEVEL_WARN:
log.msg(*args, **kwargs)
def error(*args, **kwargs):
global level
if level & LEVEL_ERROR:
log.msg(*args, **kwargs)
|
mancoast/CPythonPyc_test
|
fail/301_pickletester.py
|
Python
|
gpl-3.0
| 35,665 | 0.001514 |
import unittest
import pickle
import pickletools
import copyreg
from test.support import TestFailed, TESTFN, run_with_locale
from pickle import bytes_types
# Tests that try a number of pickle protocols should have a
# for proto in protocols:
# kind of outer loop.
protocols = range(pickle.HIGHEST_PROTOCOL + 1)
# Return True if opcode code appears in the pickle, else False.
def opcode_in_pickle(code, pickle):
for op, dummy, dummy in pickletools.genops(pickle):
if op.code == code.decode("latin-1"):
return True
return False
# Return the number of times opcode code appears in pickle.
def count_opcode(code, pickle):
n = 0
for op, dummy, dummy in pickletools.genops(pickle):
if op.code == code.decode("latin-1"):
n += 1
return n
# We can't very well test the extension registry without putting known stuff
# in it, but we have to be careful to restore its original state. Code
# should do this:
#
# e = ExtensionSaver(extension_code)
# try:
# fiddle w/ the extension registry's stuff for extension_code
# finally:
# e.restore()
class ExtensionSaver:
# Remember current registration for code (if any), and remove it (if
# there is one).
def __init__(self, code):
self.code = code
if code in copyreg._inverted_registry:
self.pair = copyreg._inverted_registry[code]
copyreg.remove_extension(self.pair[0], self.pair[1], code)
else:
self.pair = None
# Restore previous registration for code.
def restore(self):
code = self.code
curpair = copyreg._inverted_registry.get(code)
if curpair is not None:
copyreg.remove_extension(curpair[0], curpair[1], code)
pair = self.pair
if pair is not None:
copyreg.add_extension(pair[0], pair[1], code)
class C:
def __eq__(self, other):
return self.__dict__ == other.__dict__
import __main__
__main__.C = C
C.__module__ = "__main__"
class myint(int):
def __init__(self, x):
self.str = str(x)
class initarg(C):
def __init__(self, a, b):
self.a = a
self.b = b
def __getinitargs__(self):
return self.a, self.b
class metaclass(type):
pass
class use_metaclass(object, metaclass=metaclass):
pass
# DATA0 .. DATA2 are the pickles we expect under the various protocols, for
# the object returned by create_data().
DATA0 = (
b'(lp0\nL0L\naL1L\naF2.0\nac'
b'builtins\ncomplex\n'
b'p1\n(F3.0\nF0.0\ntp2\nRp'
b'3\naL1L\naL-1L\naL255L\naL-'
b'255L\naL-256L\naL65535L\na'
b'L-65535L\naL-65536L\naL2'
b'147483647L\naL-2147483'
b'647L\naL-2147483648L\na('
b'Vabc\np4\ng4\nccopyreg'
b'\n_reconstructor\np5\n('
b'c__main__\nC\np6\ncbu'
b'iltins\nobject\np7\nNt'
b'p8\nRp9\n(dp10\nVfoo\np1'
b'1\nL1L\nsVbar\np12\nL2L\nsb'
b'g9\ntp13\nag13\naL5L\na.'
)
# Disassembly of DATA0
DATA0_DIS = """\
0: ( MARK
1: l LIST (MARK at 0)
2: p PUT 0
5: L LONG 0
9: a APPEND
10: L LONG 1
14: a APPEND
15: F FLOAT 2.0
20: a APPEND
21: c GLOBAL 'builtins complex'
39: p PUT 1
42: ( MARK
43: F FLOAT 3.0
48: F FLOAT 0.0
53: t TUPLE (MARK at 42)
54: p PUT 2
57: R REDUCE
58: p PUT 3
61: a APPEND
62: L LONG 1
66: a APPEND
67: L LONG -1
72: a APPEND
73: L LONG 255
79: a APPEND
80: L LONG -255
87: a APPEND
88: L LONG -256
95: a APPEND
96: L LONG 65535
104: a APPEND
105: L LONG -65535
114: a APPEND
115: L LONG -65536
124: a APPEND
125: L LONG 2147483647
138: a APPEND
139: L LONG -2147483647
153: a APPEND
154: L LONG -2147483648
168: a APPEND
169: ( MARK
170: V UNICODE 'abc'
175: p PUT 4
178: g GET 4
181: c GLOBAL 'copyreg _reconstructor'
205: p PUT 5
208: ( MARK
209: c GLOBAL '__main__ C'
221: p PUT 6
224: c GLOBAL 'builtins object'
241: p PUT 7
244: N NONE
245: t TUPLE (MARK at 208)
246: p PUT 8
249: R REDUCE
250: p PUT 9
253: ( MARK
254: d DICT (MARK at 253)
255: p PUT 10
259: V UNICODE 'foo'
264: p PUT 11
268: L LONG 1
272: s SETITEM
273: V UNICODE 'bar'
278: p PUT 12
282: L LONG 2
286: s SETITEM
287: b BUILD
288: g GET 9
291: t TUPLE (MARK at 169)
292: p PUT 13
296: a APPEND
297: g GET 13
301: a APPEND
302: L LONG 5
306: a APPEND
307: . STOP
highest protocol among opcodes = 0
"""
DATA1 = (
b']q\x00(K\x00K\x01G@\x00\x00\x00\x00\x00\x00\x00c'
b'builtins\ncomplex\nq\x01'
b'(G@\x08\x00\x00\x00\x00\x00\x00G\x00\x00\x00\x00\x00\x00\x00\x00t'
b'q\x02Rq\x03K\x01J\xff\xff\xff\xffK\xffJ\x01\xff\xff\xffJ'
b'\x00\xff\xff\xffM\xff\xffJ\x01\x00\xff\xffJ\x00\x00\xff\xffJ\xff\xff'
b'\xff\x7fJ\x01\x00\x00\x80J\x00\x00\x00\x80(X\x03\x00\x00\x00ab'
b'cq\x04h\x04ccopyreg\n_reco'
b'nstructor\nq\x05(c__main'
b'__\nC\nq\x06cbuiltins\n'
b'object\nq\x07Ntq\x08Rq\t}q\n('
b'X\x03\x00\x00\x00fooq\x0bK\x01X\x03\x00\x00\x00bar'
b'q\x0cK\x02ubh\ttq\rh\rK\x05e.'
)
# Disassembly of DATA1
DATA1_DIS = """\
0: ] EMPTY_LIST
1: q BINPUT 0
3: ( MARK
4: K BININT1 0
6: K BININT1 1
8: G BINFLOAT 2.0
17: c GLOBAL 'builtins complex'
35: q BINPUT 1
37: ( MARK
38: G BINFLOAT 3.0
47: G BINFLOAT 0.0
56: t TUPLE (MARK at 37)
57: q BINPUT 2
59: R REDUCE
60: q BINPUT 3
62: K BININT1 1
64: J BINI
|
NT -1
69: K BININT1 255
71: J BININT -255
76: J BININT -256
81: M BININT2 65535
84: J BININT -65535
89: J BININT -65536
94: J BININT 2147483647
99: J BININT -2147483647
104: J BININT -2147483648
109: ( MARK
110: X
|
BINUNICODE 'abc'
118: q BINPUT 4
120: h BINGET 4
122: c GLOBAL 'copyreg _reconstructor'
146: q BINPUT 5
148: ( MARK
149: c GLOBAL '__main__ C'
161: q BINPUT 6
163: c GLOBAL 'builtins object'
180: q BINPUT 7
182: N NONE
183: t TUPLE (MARK at 148)
184: q BINPUT 8
186: R REDUCE
187: q BINPUT 9
189: } EMPTY_DICT
190: q BINPUT 10
192: ( MARK
193: X BINUNICODE 'foo'
201: q BINPUT 11
203: K BININT1 1
205: X BINUNICODE 'bar'
213: q BINPUT 12
215: K BININT1 2
217: u SETITEMS (MARK at 192)
218: b BUILD
219: h BINGET 9
221: t TUPLE (MARK at 109)
222: q BINPUT 13
224: h BINGET 13
226: K BININT1 5
228: e APPENDS (MARK at 3)
229: . STOP
highest protocol among opcodes = 1
"""
DATA2 = (
b'\x80\x02]q\x00(K\x00K\x01G@\x00\x00\x00\x00\x00\x00\x00c'
b'builtins\ncomplex\n'
b'q\x01G@\x08\x00\x00\x00\x00\x00\x00G\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x86q\x02Rq\x03K\x01J\xff\xff\xff\xffK\xffJ\x01\xff\xff\xff'
b'J\x00\xff\xff\xffM\xff\xffJ\x01\x00\xff\xffJ\x00\x00\xff\xffJ\xff'
b'\xf
|
pombredanne/todomvc-django
|
todo/views.py
|
Python
|
mit
| 148 | 0.033784 |
from django.views
|
.generic import TemplateView
# All todos view
class Home( TemplateView ):
# Set the view template
template_name = 'index.ht
|
ml'
|
ovnicraft/server-tools
|
base_export_manager/__manifest__.py
|
Python
|
agpl-3.0
| 808 | 0 |
# -*- coding: utf-8 -*-
# Copyright 2015 Antiun Ingeniería S.L. - Antonio Espinosa
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': "Manage model export profiles",
'category': 'Personalization',
'version': '10.0.1.0.0',
'depends': [
'web',
],
'data': [
'views/assets.xml',
'views/ir_exports.xml',
'views/ir_model.xml',
'views/ir_model_access.xml',
'views/res_groups.xml',
],
'qweb': [
"static/src/xml/base.xml",
],
'author': 'Tecnativa, '
'LasLabs, '
|
'Ursa Information Systems, '
'Odoo Community Association (OCA)',
'website': 'https://www.tecnativa.com',
'license': 'AGPL-3',
'installable':
|
True,
'application': False,
}
|
moonbury/notebooks
|
github/Numpy/Chapter3/atr.py
|
Python
|
gpl-3.0
| 599 | 0.008347 |
from __future__ import print_function
import numpy as np
h, l, c = np.loadtxt('data.csv', delimiter=',', usecols=(4, 5, 6), unpack=True)
N = 5
h = h[-N:]
l = l[-N:]
print("len(h)", len(h), "len(l)", len(l))
print("Close", c)
previousclose = c[-N -1: -1]
print("len(previousclose)", len(previousclose))
print("Previous close", previousclose)
truerange = np.maximum(h - l, h - previousclose, previousclose - l)
print("True range", truerange)
atr = np.zeros(N)
atr
|
[0] = np.mean(truerange)
for i in range(1, N):
|
atr[i] = (N - 1) * atr[i - 1] + truerange[i]
atr[i] /= N
print("ATR", atr)
|
levythu/swift
|
swift/common/storage_policy.py
|
Python
|
apache-2.0
| 33,131 | 0 |
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import string
import textwrap
import six
from six.moves.configparser import ConfigParser
from swift.common.utils import (
config_true_value, SWIFT_CONF_FILE, whataremyips, list_from_csv)
from swift.common.ring import Ring, RingData
from swift.common.utils import quorum_size
from swift.common.exceptions import RingValidationError
from pyeclib.ec_iface import ECDriver, ECDriverError, VALID_EC_TYPES
LEGACY_POLICY_NAME = 'Policy-0'
VALID_CHARS = '-' + string.ascii_letters + string.digits
DEFAULT_POLICY_TYPE = REPL_POLICY = 'replication'
EC_POLICY = 'erasure_coding'
DEFAULT_EC_OBJECT_SEGMENT_SIZE = 1048576
class BindPortsCache(object):
def __init__(self, swift_dir, bind_ip):
self.swift_dir = swift_dir
self.mtimes_by_ring_path = {}
self.portsets_by_ring_path = {}
self.my_ips = set(whataremyips(bind_ip))
def all_bind_ports_for_node(self):
"""
Given an iterable of IP addresses identifying a storage backend server,
return a set of all bind ports defined in all rings for this storage
backend server.
The caller is responsible for not calling this method (which performs
at least a stat on all ring files) too frequently.
"""
# NOTE: we don't worry about disappearing rings here because you can't
# ever delete a storage policy.
for policy in POLICIES:
# NOTE: we must NOT use policy.load_ring to load the ring. Users
# of this utility function will not need the actual ring data, just
# the bind ports.
#
# This is duplicated with Ring.__init__ just a bit...
serialized_path = os.path.join(self.swift_dir,
policy.ring_name + '.ring.gz')
try:
new_mtime = os.path.getmtime(serialized_path)
except OSError:
continue
old_mtime = self.mtimes_by_ring_path.get(serialized_path)
if not old_mtime or old_mtime != new_mtime:
self.portsets_by_ring_path[serialized_path] = set(
dev['port']
for dev in RingData.load(serialized_path,
metadata_only=True).devs
if dev and dev['ip'] in self.my_ips)
self.mtimes_by_ring_path[serialized_path] = new_mtime
# No "break" here so that the above line will update the
# mtimes_by_ring_path entry for any ring that changes, not just
# the first one we notice.
# Return the requested set of ports from our (now-freshened) cache
return six.moves.reduce(set.union,
self.portsets_by_ring_path.values(), set())
class PolicyError(ValueError):
def __init__(self, msg, index=None):
if index is not None:
msg += ', for index %r' % index
super(PolicyError, self).__init__(msg)
def _get_policy_string(base, policy_index):
if policy_index == 0 or policy_index is None:
return_string = base
else:
return_string = base + "-%d" % int(policy_index)
return return_string
def get_policy_string(base, policy_or_index):
"""
Helper function to construct a string from a base and the policy.
Used to encode the policy index into either a file name or a
directory name by various modules.
:param base: the base string
:param policy_or_index: StoragePolicy instance, or an index
(string or int), if None the legacy
storage Policy-0 is assumed.
:returns: base name with policy index added
:raises: PolicyError if no policy exists with the given policy_index
"""
if isinstance(policy_or_index, BaseStoragePolicy):
policy = policy_or_index
else:
policy = POLICIES.get_by_index(policy_or_index)
if policy is None:
raise PolicyError("Unknown policy", index=policy_or_index)
return _get_policy_string(base, int(policy))
def split_policy_string(policy_string):
"""
Helper function to convert a string representing a base and a
policy. Used to decode the policy from either a file name or
a directory name by various modules.
:param policy_string: base name with policy index added
:raises: PolicyError if given index does not map to a valid policy
:returns: a tuple, in the form (base, policy) where base is the base
string and policy is the Stora
|
gePolicy instance for the
index encoded in the policy_string.
"""
if '-' in policy_string:
base, policy_index = policy_string.rsplit('-', 1)
el
|
se:
base, policy_index = policy_string, None
policy = POLICIES.get_by_index(policy_index)
if get_policy_string(base, policy) != policy_string:
raise PolicyError("Unknown policy", index=policy_index)
return base, policy
class BaseStoragePolicy(object):
"""
Represents a storage policy. Not meant to be instantiated directly;
implement a derived subclasses (e.g. StoragePolicy, ECStoragePolicy, etc)
or use :func:`~swift.common.storage_policy.reload_storage_policies` to
load POLICIES from ``swift.conf``.
The object_ring property is lazy loaded once the service's ``swift_dir``
is known via :meth:`~StoragePolicyCollection.get_object_ring`, but it may
be over-ridden via object_ring kwarg at create time for testing or
actively loaded with :meth:`~StoragePolicy.load_ring`.
"""
policy_type_to_policy_cls = {}
def __init__(self, idx, name='', is_default=False, is_deprecated=False,
object_ring=None, aliases=''):
# do not allow BaseStoragePolicy class to be instantiated directly
if type(self) == BaseStoragePolicy:
raise TypeError("Can't instantiate BaseStoragePolicy directly")
# policy parameter validation
try:
self.idx = int(idx)
except ValueError:
raise PolicyError('Invalid index', idx)
if self.idx < 0:
raise PolicyError('Invalid index', idx)
self.alias_list = []
if not name or not self._validate_policy_name(name):
raise PolicyError('Invalid name %r' % name, idx)
self.alias_list.append(name)
if aliases:
names_list = list_from_csv(aliases)
for alias in names_list:
if alias == name:
continue
self._validate_policy_name(alias)
self.alias_list.append(alias)
self.is_deprecated = config_true_value(is_deprecated)
self.is_default = config_true_value(is_default)
if self.policy_type not in BaseStoragePolicy.policy_type_to_policy_cls:
raise PolicyError('Invalid type', self.policy_type)
if self.is_deprecated and self.is_default:
raise PolicyError('Deprecated policy can not be default. '
'Invalid config', self.idx)
self.ring_name = _get_policy_string('object', self.idx)
self.object_ring = object_ring
@property
def name(self):
return self.alias_list[0]
@name.setter
def name_setter(self, name):
self._validate_policy_name(name)
self.alias_list[0] = name
@property
def aliases(self):
return ", ".join(self.alias_list)
def __int__(self):
return self.idx
def __cmp__(self, other):
return cmp(self.idx, int(other))
def __repr__(self):
return ("%s(%
|
xod442/sample_scripts
|
get-xapi.py
|
Python
|
gpl-2.0
| 3,401 | 0.000588 |
#!/usr/bin/env python3
###
# (C) Copyright 2014 Hewlett-Packard Development Company, L.P.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTA
|
BILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM
|
, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
###
import sys
if sys.version_info < (3, 2):
raise Exception('Must use Python 3.2 or later')
import hpOneView as hpov
from pprint import pprint
def acceptEULA(con):
# See if we need to accept the EULA before we try to log in
con.get_eula_status()
try:
if con.get_eula_status() is True:
print('EULA display needed')
con.set_eula('no')
except Exception as e:
print('EXCEPTION:')
print(e)
def login(con, credential):
# Login with givin credentials
try:
con.login(credential)
except:
print('Login failed')
def getapi(sts):
version = sts.get_version()
print('currentVersion: ', version['currentVersion'])
print('minimumVersion: ', version['minimumVersion'])
def main():
# Arguments supplied from command line
if args:
credential = {'userName': args.user, 'password': args.passwd}
con = hpov.connection(args.host)
sts = hpov.settings(con)
if args.proxy:
con.set_proxy(args.proxy.split(':')[0], args.proxy.split(':')[1])
if args.cert:
con.set_trusted_ssl_bundle(args.cert)
login(con, credential)
acceptEULA(con)
getapi(sts)
else:
credential = "#"
if __name__ == '__main__':
import sys
import argparse
parser = argparse.ArgumentParser(add_help=True, description='Usage')
parser.add_argument('-a', '--appliance', dest='host', required=True,
help='HP OneView Appliance hostname or IP')
parser.add_argument('-u', '--user', dest='user', required=False,
default='Administrator', help='HP OneView Username')
parser.add_argument('-p', '--pass', dest='passwd', required=False,
help='HP OneView Password')
parser.add_argument('-c', '--certificate', dest='cert', required=False,
help='Trusted SSL Certificate Bundle in PEM '
'(Base64 Encoded DER) Format')
parser.add_argument('-r', '--proxy', dest='proxy', required=False,
help='Proxy (host:port format')
args = parser.parse_args()
sys.exit(main())
# vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79:
|
jvazquez/organization
|
organization/job_offers/constants.py
|
Python
|
unlicense
| 334 | 0 |
import os
# Application constants
APP_NAME = 'job_offers'
IN
|
STALL_DIR = os.path.dirname(os.path.abspath(__file
|
__))
LOG_FORMAT = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
LOG_NAME = os.path.join(INSTALL_DIR, 'job_offers.log')
# Testing fixtures
JOB_OFFER_FIXTURES = os.path.join(INSTALL_DIR, "fixtures/job_offers.json")
|
saketkc/open-ehr-django
|
open-ehr-django-src/labs/forms.py
|
Python
|
lgpl-3.0
| 4,518 | 0.019478 |
from django import forms
from open-ehr.labs.models import PatientInfo
from open-ehr.registration.forms import *
from open-ehr.report_manager.models import *
from django.forms.widgets import CheckboxSelectMultiple
class PatientInfoForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self.created_by = kwargs.pop('created_by',None)
self.tests_json_list = kwargs.pop('tests_json_list',None)
self.total_test_count = kwargs.pop('total_test_count',None)
self.belongs_to_lab = kwargs.pop('belongs_to_lab',None)
self.status_by_technician_json_list = kwargs.pop('status_by_technician_json_list',None)
self.results_field = kwargs.pop('results_field',None)
self.status_by_doctor_json_list = kwargs.pop('status_by_doctor_json_list',None)
super(PatientInfoForm, self).__init__(*args, **k
|
wargs)
self.fields['gender'] = forms.ChoiceField(choices= (('male',('Male')),('female',('Female'))))
self.fields['patient_first_name'].label="Patient's First Name:"
self.fields['patient_last_name'].label="Patient's Last Na
|
me:"
self.fields['patient_dob'].label="Date of Birth:"
self.fields['report_due_on'].label="Report due on:"
self.fields['reference_doctor_name'].label="Reference Doctor Name:"
self.fields['sample_id'].label="Sample Id:"
#report_element_categories = ReportElementCategories.objects.all()
#category_choices = ( (x.id,x.report_element_category_name) for x in report_element_categories)
#self.fields['tests_list'] = forms.MultipleChoiceField(widget=CheckboxSelectMultiple,choices=category_choices,label=("Tests to be done"))#(required=True,widget=CheckboxSelectMultiple, choices=category_choices,label=("Tests to be done"))
self.fields['tests_list-'+str(1)] = forms.CharField(widget=forms.TextInput(),label=("Test Name "+str(1)))
for i in range(2,6):
self.fields['tests_list-'+str(i)] = forms.CharField(widget=forms.TextInput(),label=("Test Name "+str(i)),required=False)
def save(self, commit=True):
instance = super(PatientInfoForm, self).save(commit=False)
if self.created_by:
instance.created_by = self.created_by
if self.tests_json_list:
instance.tests_json_list = self.tests_json_list
if self.total_test_count:
instance.total_test_count = self.total_test_count
if self.belongs_to_lab:
instance.belongs_to_lab = self.belongs_to_lab
if self.status_by_doctor_json_list:
instance.status_by_doctor_json_list = self.status_by_doctor_json_list
if self.status_by_technician_json_list:
instance.status_by_technician_json_list = self.status_by_technician_json_list
if self.results_field:
instance.results_field = self.results_field
instance.save()
class Meta:
model = PatientInfo
exclude =('tests_json_list','created_by','technician_assigned','is_complete_by_technician','is_complete_by_doctor', 'total_test_count','status_by_technician_json_list','status_by_doctor_json_list','is_verified_by_doctor','verified_by_doctor','share_count','shared_with_json_list','results_field')
class PatientResultsForm(PatientInfoForm):
def __init__(self,*args,**kwargs):
super(PatientResultsForm, self).__init__(*args, **kwargs)
del self.fields['tests_list']
def render_patient_result_form(test_name_fields,report_id):
attrs_dict ={}
attrs_dict["readonly"]=True
attrs_dict["value"] = report_id
attrs_dict["required"] = True
fields={"report_id":forms.CharField(widget=forms.TextInput(attrs=attrs_dict),required=True)}
for test_id in test_name_fields:
fields[test_name_fields[test_id]] = forms.CharField(widget= TextInput({ "required": "True","class": "test_input_field" }),required=True)
return type('PatientResultsForm', (forms.BaseForm,), { 'base_fields': fields })
def render_patient_completed_form(test_name_fields,test_name_values,report_id,test_number):
attrs_dict ={}
attrs_dict["readonly"]=True
attrs_dict["value"] = report_id
attrs_dict["required"] = True
fields={"report_id":forms.CharField(widget=forms.TextInput(attrs=attrs_dict),required=True)}
for test_id in test_name_fields:
fields[test_name_fields[test_id]] = forms.CharField(widget= TextInput(attrs=attrs_dict),required=True)
return type('PatientResultsForm', (forms.BaseForm,), { 'base_fields': fields })
|
moveone/dbmail
|
python/bin/autoreplier.py
|
Python
|
gpl-2.0
| 3,784 | 0.026163 |
#!/usr/bin/python
#
# Copyright: NFG, Paul Stevens <paul@nfg.nl>, 2005
# License: GPL
#
# $Id: autoreplier.py,v 1.4 2004/12/01 10:15:58 paul Exp $
#
# Reimplementation of the famous vacation tool for dbmail
#
#
import os,sys,string,email,getopt,shelve,time,re,smtplib
from dbmail.lib import DbmailAutoreply
def usage():
print """autoreplier: dbmail autoreply replacement
-u <username> --username=<username> specify recipient
-a <alias> --alias=<alias> specify matching destination address
"""
error='AutoReplyError'
class AutoReplier:
CACHEDIR="/var/cache/dbmail"
TIMEOUT=3600*24*7
OUTHOST="localhost"
OUTPORT=25
_username=None
_messagefile=None
def __init__(self):
self.setMessage(email.message_from_file(sys.stdin))
def setUsername(self,_username): self._username=_username
def getUsername(self): return self._username
def setMessage(self,_message): self._message=_message
def getMessage(self): return self._message
def setReply(self): self._reply=DbmailAutoreply(self.getUsername()).getReply()
def getReply(self): return email.message_from_string(self._reply)
def setAlias(self,_alias): self._alias=_alias
def getAlias(self): return self._alias
def openCache(self):
file=os.path.join(self.CACHEDIR,"%s.db" % self.getUsername())
self.cache=shelve.open(file,writeback=True)
def closeCache(self): self.cache.close()
def checkSender(self,bounce_senders=[]):
for f in ('From',):
if self.getMessage().has_key(f):
header=string.lower(self.getMessage()[f])
for s in bounce_senders:
if string.find(header,s) >= 0:
return True
return False
def checkDestination(self):
for f in ('To','Cc'):
if self.getMessage().has_key(f):
header=string.lower(self.getMessage()[f])
if string.find(header,self.getAlias()) >=0:
return True
return False
def send_message(self,msg):
server=smtplib.SMTP(self.OUTHOST,self.OUTPORT)
server.sendmail(msg['From'],msg['To'],msg.as_string())
server.quit()
def do_reply(self):
m=self.getMessage()
u=self.getUsername()
if m.has_key('Reply-to'): to=m['Reply-to']
elif m.has_key('From'): to=m['From']
else: raise error, "No return address"
if self.checkSender(['daemon','mailer-daemon','postmaster']):
return
if not self.checkDestination():
return
if not self.cache.has_key(u):
self.cache[u]={}
if not self.cache[u].has_key(to) or self.cache[u][to] < int(time.time())-self.TIMEOUT:
replymsg=self.getReply()
print replymsg
replymsg['To']=to
replymsg['From']=self.getAlia
|
s()
body=replymsg.get_payload()
body="%s\n---\n\n%s\n" % ( body, self.getAlias() )
r
|
eplymsg.set_payload(body)
self.send_message(replymsg)
self.cache[u][to]=int(time.time())
def reply(self):
self.openCache()
self.do_reply()
self.closeCache()
if __name__ == '__main__':
try:
opts,args = getopt.getopt(sys.argv[1:],"u:m:a:",
["username=","alias="])
except getopt.GetoptError:
usage()
sys.exit(0)
replier=AutoReplier()
for o,a in opts:
if o in ('-u','--username'):
replier.setUsername(a)
replier.setReply()
if o in ('-a','--alias'):
replier.setAlias(a)
replier.reply()
|
PRIArobotics/HedgehogServer
|
hedgehog/server/hardware/simulated.py
|
Python
|
agpl-3.0
| 2,565 | 0.00039 |
from typing import Dict
import random
from . import HardwareAdapter, POWER
from hedgehog.protocol.messages import io
class SimulatedHardwareAdapter(HardwareAdapter):
def __init__(self, *args, simulate_sensors=False, **kwargs):
super().__init__(*args, **kwargs)
self.simulate_sensors = simulate_sensors
self.io_configs: Dict[int, int] = {}
self.emergency: bool = False
async def get_version(self):
return bytes(12), 0, 0
asy
|
nc def emergency_action(self, activate):
self.emergency = activate
async def get_emergency_state(self) -> bool:
return self.emergency
async def
|
set_io_config(self, port, flags):
self.io_configs[port] = flags
async def get_analog(self, port):
if not self.simulate_sensors:
return 0
mu, sigma = {
io.INPUT_FLOATING: (800, 60),
io.INPUT_PULLUP: (4030, 30),
io.INPUT_PULLDOWN: (80, 30),
io.OUTPUT_ON: (4050, 20),
io.OUTPUT_OFF: (50, 20),
}[self.io_configs.get(port, io.INPUT_FLOATING)]
num = int(random.gauss(mu, sigma))
if num < 0:
num = 0
if num >= 4096:
num = 4095
return num
async def get_imu_rate(self):
# TODO get_imu_rate
return 0, 0, 0
async def get_imu_acceleration(self):
# TODO get_imu_acceleration
return 0, 0, 0
async def get_imu_pose(self):
# TODO get_imu_pose
return 0, 0, 0
async def get_digital(self, port):
if not self.simulate_sensors:
return False
value = {
io.INPUT_FLOATING: False,
io.INPUT_PULLUP: True,
io.INPUT_PULLDOWN: False,
io.OUTPUT_ON: True,
io.OUTPUT_OFF: False,
}[self.io_configs.get(port, io.INPUT_FLOATING)]
return value
async def set_motor(self, port, mode, amount=0, reached_state=POWER, relative=None, absolute=None):
# TODO set motor action
pass
async def get_motor(self, port):
return 0, 0
async def set_motor_position(self, port, position):
# TODO set motor position
pass
async def set_motor_config(self, port, config):
# TODO set_motor_config
pass
async def set_servo(self, port, active, position):
# TODO set_servo
pass
async def send_uart(self, data):
# TODO send_uart
pass
async def set_speaker(self, frequency):
# TODO set_speaker
pass
|
rapilabs/django-shopfront
|
backend/django_shopfront/wsgi.py
|
Python
|
mit
| 410 | 0 |
"""
WSGI config for django_shopfront project.
It exposes the WSGI callabl
|
e as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import
|
get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "django_shopfront.settings")
application = get_wsgi_application()
|
M4rtinK/anaconda
|
tests/unit_tests/pyanaconda_tests/test_installation_tasks.py
|
Python
|
gpl-2.0
| 15,098 | 0.000795 |
#
# Martin Kolman <mkolman@redhat.com>
#
# Copyright 2016 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use, modify,
# copy, or redistribute it subject to the terms and conditions of the GNU
# General Public License v.2. This program is distributed in the hope that it
# will be useful, but WITHOUT ANY WARRANTY expressed or implied, including the
# implied warranties of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundati
|
on, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. Any Red Hat
# trademarks that are incorporated in the source code or documentation are not
# subject to the GNU General Public License and may only be used or replicated
# with the express permission of Red Hat, Inc.
#
import unittest
from pyanaconda.installation_tasks import Task
from pyanaconda.installation_tasks imp
|
ort TaskQueue
class InstallTasksTestCase(unittest.TestCase):
def setUp(self):
self._test_variable1 = 0
self._test_variable2 = 0
self._test_variable3 = 0
self._test_variable4 = None
self._test_variable5 = None
self._test_variable6 = None
self._task_started_count = 0
self._task_completed_count = 0
self._queue_started_count = 0
self._queue_completed_count = 0
def _increment_var1(self):
self._test_variable1 += 1
def _increment_var2(self):
self._test_variable2 += 1
def _increment_var3(self):
self._test_variable3 += 1
def _set_var_4(self, value):
self._test_variable4 = value
def _set_var_5(self, value):
self._test_variable5 = value
def _set_var_6(self, value):
self._test_variable6 = value
def test_task(self):
"""Check that task works correctly."""
task = Task("foo", self._set_var_5, ("anaconda",))
assert task.name == "foo"
assert task.summary == "Task: foo"
assert task.parent is None
assert task.elapsed_time is None
# check initial state of the testing variables
assert self._test_variable4 is None
assert self._test_variable5 is None
assert self._test_variable6 is None
# check task state
assert not task.done
assert not task.running
# connect callbacks
task.started.connect(self._set_var_4)
task.completed.connect(self._set_var_6)
# check if the task is executed correctly
task.start()
assert task.done
assert not task.running
assert self._test_variable5 == "anaconda"
assert self._test_variable4 is task
assert self._test_variable6 is task
# it should be possible to execute the task only once
task.start()
assert task.done
assert not task.running
assert task.elapsed_time is not None
assert self._test_variable5 == "anaconda"
assert self._test_variable4 is task
assert self._test_variable6 is task
def test_task_kwargs(self):
"""Check that works correctly with kwargs."""
def custom_function(arg1, foo=None):
self._set_var_5((arg1, foo))
task = Task("foo", custom_function, task_args=("anaconda",), task_kwargs={"foo": "bar"})
assert task.name == "foo"
assert task.summary == "Task: foo"
assert task.parent is None
assert task.elapsed_time is None
# check initial state of the testing variables
assert self._test_variable4 is None
assert self._test_variable5 is None
assert self._test_variable6 is None
# check task state
assert not task.done
assert not task.running
# connect callbacks
task.started.connect(self._set_var_4)
task.completed.connect(self._set_var_6)
# check if the task is executed correctly
task.start()
assert task.done
assert not task.running
assert self._test_variable5 == ("anaconda", "bar")
assert self._test_variable4 is task
assert self._test_variable6 is task
# it should be possible to execute the task only once
task.start()
assert task.done
assert not task.running
assert task.elapsed_time is not None
assert self._test_variable5 == ("anaconda", "bar")
assert self._test_variable4 is task
assert self._test_variable6 is task
def test_task_no_args(self):
"""Check if task with no arguments works correctly."""
task = Task("foo", self._increment_var1)
assert task.name == "foo"
assert task.summary == "Task: foo"
assert task.parent is None
assert task.elapsed_time is None
# check initial state of the testing variables
assert self._test_variable1 == 0
assert self._test_variable4 is None
assert self._test_variable5 is None
assert self._test_variable6 is None
# check task state
assert not task.done
assert not task.running
# connect callbacks
task.started.connect(self._set_var_4)
task.completed.connect(self._set_var_6)
# check if the task is executed correctly
task.start()
assert task.done
assert not task.running
assert self._test_variable1 == 1
assert self._test_variable4 is task
assert self._test_variable6 is task
# it should be possible to execute the task only once
task.start()
assert task.done
assert not task.running
assert task.elapsed_time is not None
assert self._test_variable1 == 1
assert self._test_variable4 is task
assert self._test_variable6 is task
def test_task_subclass_light(self):
"""Check if a Task subclass with custom run_task() method works."""
class CustomPayloadTask(Task):
def __init__(self, name):
super(CustomPayloadTask, self).__init__(name, task=None, task_args=[])
self.var1 = 0
self.var2 = None
# We define a custom run_task method and override it with our own "payload",
# as this is more lightweight than overriding the full start() method and
# we get all the locking and signal triggering for free.
def run_task(self):
self.var1 += 1
self.var1 += 1
self.var2 = "anaconda"
task = CustomPayloadTask("custom payload task")
# connect callbacks
task.started.connect(self._set_var_4)
task.completed.connect(self._set_var_6)
# verify initial state
assert task.var1 == 0
assert task.var2 is None
# run the custom task
task.start()
# verify that the custom payload was run
assert task.var1 == 2
assert task.var2 == "anaconda"
# verify that the started/completed signals were triggered
assert self._test_variable4 is task
assert self._test_variable6 is task
def test_task_subclass_heavy(self):
"""Check if a Task subclass with custom start() method works."""
class CustomStartTask(Task):
def __init__(self, name):
super(CustomStartTask, self).__init__(name, task=None, task_args=[])
self.var1 = 0
self.var2 = None
# We define a custom start method and override it with our own "payload".
# This is more "heavy" than overriding just run_task() method and
# we generally need to implement all the locking and signal triggering.
# On the other hand it can potentially provide more fine-grained control
# over how the task is processed.
def start(self):
self.var1 += 1
self.var1 += 1
self.var2 = "anaconda"
task = CustomStartTask("custom payload task")
# connect callbacks
task.s
|
cloudtools/troposphere
|
troposphere/cloudtrail.py
|
Python
|
bsd-2-clause
| 2,138 | 0.001871 |
# Copyright (c) 2012-2022, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
#
# *** Do not modify - this file is autogenerated ***
from . import AWSObject, AWSProperty, PropsDictType, Tags
from .validators import boolean
class DataResource(AWSProperty):
"""
`DataResource <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudtrail-trail-dataresource.html>`__
"""
props: PropsDictType = {
"Type": (str, True),
"Values": ([str], False),
}
class EventSelector(AWSProperty):
"""
`EventSelector <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudtrail-trail-eventselector.html>`__
"""
props: PropsDictType = {
"DataResources": ([DataResource], False),
"ExcludeManagementEventSources": ([str], False),
"IncludeManagementEvents": (boolean, False),
"ReadWriteType": (str, False),
}
class InsightSelector(AWSProperty):
"""
`InsightSelector <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudtrail-trail-insightselector.html>`__
"""
props: PropsDictType = {
"InsightType": (str, False),
}
class Trail(AWSObject):
"""
`Trail <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cloudtrail-trail.html>`__
"""
resource_type = "AWS::CloudTrail::Trail"
props: PropsDictType = {
|
"CloudWatchLogsLogGroupArn": (str, False),
"CloudWatchLogsRoleArn": (str, False),
"EnableLogFileValidation": (boolean, False),
"EventSelectors": ([EventSelector], False),
"IncludeGlobalServiceEvents": (boolean, False),
"InsightSelectors": ([InsightSelector], False),
"IsLogging": (boolean, True),
"IsMultiRegionTrail": (boolean, False),
"IsOrganizationTrail": (boolean, False),
"KMSKeyId": (str, False),
"S3BucketName": (
|
str, True),
"S3KeyPrefix": (str, False),
"SnsTopicName": (str, False),
"Tags": (Tags, False),
"TrailName": (str, False),
}
|
joshmoore/openmicroscopy
|
components/tools/OmeroPy/scripts/omero/import_scripts/Populate_ROI.py
|
Python
|
gpl-2.0
| 1,244 | 0.012862 |
"""
components/tools/OmeroPy/scripts/omero/import_scripts/Populate_Plate_Roi.py
Uses the omero.util.populate_roi functionality to parse all the
measurement files attached to a plate, and generate server-side
rois.
params:
Plate_ID: id of the plate which should be parsed.
Copyright 2009 Glencoe Software, Inc. All rights reserved.
Use is subject to license terms supplied in LICENSE.txt
"""
import omero.scripts as scripts
from omero.util.populate_roi import *
client = scripts.client('Populate_ROI.py',
scripts.Long("Plate_ID", optional = False,
description = "ID of a valid plate with attached results files"),
version = "4.2.0",
contact = "ome-users@lists.openmicroscopy.org.uk",
description
|
= """Generates regions of interest from the measurement files associated with a plate
This script is executed by t
|
he server on initial import, and should typically not need
to be run by users.""")
factory = PlateAnalysisCtxFactory(client.getSession())
analysis_ctx = factory.get_analysis_ctx(client.getInput("Plate_ID").val)
n_measurements = analysis_ctx.get_measurement_count()
for i in range(n_measurements):
measurement_ctx = analysis_ctx.get_measurement_ctx(i)
measurement_ctx.parse_and_populate()
|
erinxocon/Text-Parsing-Function-Dispatcher
|
tpfd/parser.py
|
Python
|
mit
| 2,830 | 0.00424 |
#coding=utf-8
"""
parser.py
This contains the main Parser class that can be instantiated to create rules.
"""
from .rules import RuleMap
from .compat import basestring
class Parser(object):
"""
Parser exposes a couple methods for reading in strings.
Currently only parse_file is working.
"""
def __init__(self):
"""Initalizer"""
self.debug = False
self._parse_rule_map = RuleMap(list)
self._find_rule_map = RuleMap(list)
def on_parse(self, eventname):
"""
Decorator fo
|
r rules. Calls the associated functions when the rule
is invoked via parse found
"""
def parse_decorator(func):
"""Event decorator closure thing"""
self._parse_rule_map.add_rule(eventname, func)
return func
return parse_decorator
def
|
on_find(self, eventname):
"""
Decorator for rules. Calls the associated functions when the rule
is invoked via parse found
"""
def find_decorator(func):
"""Event decorator closure thing"""
self._find_rule_map.add_rule(eventname, func)
return func
return find_decorator
def parse_file(self, file):
"""Parses through a file"""
with open(file, 'r') as f:
for line in f:
self._parse_rule_map.query_parse(line)
def iter_parse(self, iterable):
"""Parses an interator/generator"""
for item in iterable:
self._parse_rule_map.query_parse(item)
def parse_string(self, string):
"""Parses and int or string"""
return self._parse_rule_map.query_parse(string)
def parse(self, item):
"""Magical method that automatically chooses parse string or iter parse"""
if isinstance(item, basestring):
return self.parse_string(item)
else:
self.iter_parse(item)
def find_string(self, string):
"""finds an int or string based on input pattern"""
return self._find_rule_map.query_find(string)
def iter_find(self, iterable):
"""Finds an string based on an input pattern and interable/generator"""
for item in iterable:
self._find_rule_map.query_find(item)
def find_file(self, file):
"""find a string based on an input pattern from a file"""
with open(file, 'r') as f:
for line in f:
self._parse_rule_map.query_parse(line)
def find(self, item):
"""Magical method that chooses between iter_find and find_string"""
if isinstance(item, basestring):
return self.find_string(item)
else:
self.iter_find(item)
|
ACJTeam/enigma2
|
lib/python/Components/InputDevice.py
|
Python
|
gpl-2.0
| 8,051 | 0.026581 |
from config import config, ConfigSlider, ConfigSubsection, ConfigYesNo, ConfigText, ConfigInteger
from SystemInfo import SystemInfo
from fcntl import ioctl
import os
import struct
# asm-generic/ioctl.h
IOC_NRBITS = 8L
IOC_TYPEBITS = 8L
IOC_SIZEBITS = 13L
IOC_DIRBITS = 3L
IOC_NRSHIFT = 0L
IOC_TYPESHIFT = IOC_NRSHIFT+IOC_NRBITS
IOC_SIZESHIFT = IOC_TYPESHIFT+IOC_TYPEBITS
IOC_DIRSHIFT = IOC_SIZESHIFT+IOC_SIZEBITS
IOC_READ = 2L
def EVIOCGNAME(length):
return (IOC_READ<<IOC_DIRSHIFT)|(length<<IOC_SIZESHIFT)|(0x45<<IOC_TYPESHIFT)|(0x06<<IOC_NRSHIFT)
class inputDevices:
def __init__(self):
self.Devices = {}
self.currentDevice = ""
self.getInputDevices()
def getInputDevices(self):
devices = os.listdir("/dev/input/")
for evdev in devices:
try:
buffer = "\0"*512
self.fd = os.open("/dev/input/" + evdev, os.O_RDWR | os.O_NONBLOCK)
self.name = ioctl(self.fd, EVIOCGNAME(256), buffer)
self.name = self.name[:self.name.find("\0")]
os.close(self.fd)
except (IOError,OSError), err:
print '[iInputDevices] getInputDevices ' + evdev + ' <ERROR: ioctl(EVIOCGNAME): ' + str(err) + ' >'
self.name = None
if self.name:
self.Devices[evdev] = {'name': self.name, 'type': self.getInputDeviceType(self.name),'enabled': False, 'configuredName': None }
def getInputDeviceType(self,name):
if "remote control" in name:
return "remote"
elif "keyboard" in name:
return "keyboard"
elif "mouse" in name:
return "mouse"
else:
print "Unknown device type:",name
return None
def getDeviceName(self, x):
if x in self.Devices.keys():
return self.Devices[x].get("name", x)
else:
return "Unknown device name"
def getDeviceList(self):
return sorted(self.Devices.iterkeys())
def setDeviceAttribute(self, device, attribute, value):
#print "[iInputDevices] setting for device", device, "attribute", attribute, " to value", value
if device in self.Devices:
self.Devices[device][attribute] = value
def getDeviceAttribute(self, device, attribute):
if device in self.Devices:
if attribute in self.Devices[device]:
return self.Devices[device][attribute]
return None
def setEnabled(self, device, value):
oldval = self.getDeviceAttribute(device, 'enabled')
#print "[iInputDevices] setEnabled for device %s to %s from %s" % (device,value,oldval)
self.setDeviceAttribute(device, 'enabled', value)
if oldval is True and value is False:
self.setDefaults(device)
def setName(self, device, value):
#print "[iInputDevices] setName for device %s to %s" % (device,value)
self.setDeviceAttribute(device, 'configuredName', value)
#struct input_event {
# struct timeval time; -> ignored
# __u16 type; -> EV_REP (0x14)
# __u16 code; -> REP_DELAY (0x00) or REP_PERIOD (0x01)
# __s32 value; -> DEFAULTS: 700(REP_DELAY) or 100(REP_PERIOD)
#}; -> size = 16
def setDefaults(self, device):
print "[iInputDevices] setDefaults for device %s" % device
self.setDeviceAttribute(device, 'configuredName', None)
event_repeat = struct.pack('iihhi', 0, 0, 0x14, 0x01, 100)
event_delay = struct.pack('iihhi', 0, 0, 0x14, 0x00, 700)
fd = os.open("/dev/input/" + device, os.O_RDWR)
os.write(fd, event_repeat)
os.write(fd, event_delay)
os.close(fd)
def setRepeat(self, device, value): #REP_PERIOD
if self.getDeviceAttribute(device, 'enabled'):
print "[iInputDevices] setRepeat for device %s to %d ms" % (device,value)
event = struct.pack('iihhi', 0, 0, 0x14, 0x01, int(value))
fd = os.open("/dev/input/" + device, os.O_RDWR)
os.write(fd, event)
os.close(fd)
def setDelay(self, device, value): #REP_DELAY
if self.getDeviceAttribute(device, 'enabled'):
print "[iInputDevices] setDelay for device %s to %d ms" % (device,value)
event = struct.pack('iihhi', 0, 0, 0x14, 0x00, int(value))
fd = os.open("/dev/input/" + device, os.O_RDWR)
os.write(fd, event)
os.close(fd)
class InitInputDevices:
def __init__(self):
self.currentDev
|
ice = ""
self.createConfig()
def createConfig(self, *args):
config.inputDevices = ConfigSubsection()
for device in sorted(iInputDevices.Devices.iterkeys()):
self.currentDevice = device
#print "[InitInputDevices] -> creating config entry for device: %s -> %s " % (self.currentDevice, iInputDevices.Devices[device]["name"])
self.setupConfigEntries(se
|
lf.currentDevice)
self.currentDevice = ""
def inputDevicesEnabledChanged(self,configElement):
if self.currentDevice != "" and iInputDevices.currentDevice == "":
iInputDevices.setEnabled(self.currentDevice, configElement.value)
elif iInputDevices.currentDevice != "":
iInputDevices.setEnabled(iInputDevices.currentDevice, configElement.value)
def inputDevicesNameChanged(self,configElement):
if self.currentDevice != "" and iInputDevices.currentDevice == "":
iInputDevices.setName(self.currentDevice, configElement.value)
if configElement.value != "":
devname = iInputDevices.getDeviceAttribute(self.currentDevice, 'name')
if devname != configElement.value:
cmd = "config.inputDevices." + self.currentDevice + ".enabled.value = False"
exec cmd
cmd = "config.inputDevices." + self.currentDevice + ".enabled.save()"
exec cmd
elif iInputDevices.currentDevice != "":
iInputDevices.setName(iInputDevices.currentDevice, configElement.value)
def inputDevicesRepeatChanged(self,configElement):
if self.currentDevice != "" and iInputDevices.currentDevice == "":
iInputDevices.setRepeat(self.currentDevice, configElement.value)
elif iInputDevices.currentDevice != "":
iInputDevices.setRepeat(iInputDevices.currentDevice, configElement.value)
def inputDevicesDelayChanged(self,configElement):
if self.currentDevice != "" and iInputDevices.currentDevice == "":
iInputDevices.setDelay(self.currentDevice, configElement.value)
elif iInputDevices.currentDevice != "":
iInputDevices.setDelay(iInputDevices.currentDevice, configElement.value)
def setupConfigEntries(self,device):
cmd = "config.inputDevices." + device + " = ConfigSubsection()"
exec cmd
cmd = "config.inputDevices." + device + ".enabled = ConfigYesNo(default = False)"
exec cmd
cmd = "config.inputDevices." + device + ".enabled.addNotifier(self.inputDevicesEnabledChanged,config.inputDevices." + device + ".enabled)"
exec cmd
cmd = "config.inputDevices." + device + '.name = ConfigText(default="")'
exec cmd
cmd = "config.inputDevices." + device + ".name.addNotifier(self.inputDevicesNameChanged,config.inputDevices." + device + ".name)"
exec cmd
cmd = "config.inputDevices." + device + ".repeat = ConfigSlider(default=100, increment = 10, limits=(0, 500))"
exec cmd
cmd = "config.inputDevices." + device + ".repeat.addNotifier(self.inputDevicesRepeatChanged,config.inputDevices." + device + ".repeat)"
exec cmd
cmd = "config.inputDevices." + device + ".delay = ConfigSlider(default=700, increment = 100, limits=(0, 5000))"
exec cmd
cmd = "config.inputDevices." + device + ".delay.addNotifier(self.inputDevicesDelayChanged,config.inputDevices." + device + ".delay)"
exec cmd
iInputDevices = inputDevices()
config.plugins.remotecontroltype = ConfigSubsection()
config.plugins.remotecontroltype.rctype = ConfigInteger(default = 0)
class RcTypeControl():
def __init__(self):
if SystemInfo["RcTypeChangable"] and os.path.exists('/proc/stb/info/boxtype'):
self.isSupported = True
self.boxType = open('/proc/stb/info/boxtype', 'r').read().strip()
if config.plugins.remotecontroltype.rctype.value != 0:
self.writeRcType(config.plugins.remotecontroltype.rctype.value)
else:
self.isSupported = False
def multipleRcSupported(self):
return self.isSupported
def getBoxType(self):
return self.boxType
def writeRcType(self, rctype):
if self.isSupported and rctype > 0:
open('/proc/stb/ir/rc/type', 'w').write('%d' % rctype)
def readRcType(self):
rc = 0
if self.isSupported:
rc = open('/proc/stb/ir/rc/type', 'r').read().strip()
return int(rc)
iRcTypeControl = RcTypeControl()
|
yaybu/touchdown
|
touchdown/aws/route53/alias_target.py
|
Python
|
apache-2.0
| 689 | 0 |
# Copyright 2015 Isotoma Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may n
|
ot use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE
|
-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from touchdown.core.resource import Resource
class AliasTarget(Resource):
resource_name = "alias_target"
|
perryl/morph
|
cliapp/__init__.py
|
Python
|
gpl-2.0
| 1,188 | 0 |
# Copyright (C) 2011 Lars Wirzenius
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to t
|
he Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
__version__ = '1.20130808'
from fmt import TextFormat
from settings import (Settings, log_group_name, config_group_name,
perf_group_name)
from runcmd import runcmd, runcmd_unchecked, shell_quote, ssh_runcmd
from app import Application, AppException
# The plugin system
from hook import Hook, FilterHook
from hookmgr import HookManager
from plugin
|
import Plugin
from pluginmgr import PluginManager
__all__ = locals()
|
valohai/minique
|
minique/cli.py
|
Python
|
mit
| 1,399 | 0.000715 |
import argparse
import logging
from typing import List, Optional
from redis import StrictRedis
from minique.compat import sentry_sdk
from minique.work.worker import Worker
def get_parser() -> argparse.ArgumentParser:
parser = argparse.ArgumentParser()
parser.add_argument("-u", "--redis-url", required=True)
parser.add_argument("-q", "--queues", nargs="+", required=True)
parser.add_argument("--allow-callable", nargs="+", required=True)
parser.add_argument("--si
|
ngle-tick", action="store_true")
return parser
def main(argv: Optional[List[str]] = None) -> None:
parser = get_parser()
args = parser.parse_args(argv)
logging.basicConfig(datefmt="%Y-%m-%d %H:%M:%S", level=logging.INFO)
redis = StrictRedis.from_url(args.redis_url)
worker = Worker.for_queue_names(redis=redis, queue_names=args.queues)
worker.allowed_callable_patterns = set(args.allow_callable)
worker.log
|
.info("Worker initialized")
if sentry_sdk:
try:
sentry_sdk.init()
except Exception as exc:
worker.log.warning("Failed to initialize Sentry: %s", exc, exc_info=True)
else:
hub = sentry_sdk.hub.Hub.current
if hub and hub.client and hub.client.dsn:
worker.log.info("Sentry configured with a valid DSN")
if args.single_tick:
worker.tick()
else:
worker.loop()
|
rainforestapp/destimator
|
destimator/tests/test.py
|
Python
|
mit
| 7,733 | 0.001681 |
from __future__ import print_function, unicode_literals
import os
import shutil
import zipfile
import datetime
import tempfile
import subprocess
from copy import deepcopy
import pytest
import numpy as np
from numpy.testing import assert_almost_equal
from sklearn.dummy import DummyClassifier
from destimator import DescribedEstimator, utils
@pytest.fixture
def features():
return np.zeros([10, 3])
@pytest.fixture
def labels():
labels = np.zeros(10)
labels[5:] = 1.0
return labels
@pytest.fixture
def clf(features, labels):
clf = DummyClassifier(strategy='constant', constant=0.0)
clf.fit(features, labels)
return clf
@pytest.fixture
def clf_described(clf, features, labels, feature_names):
return DescribedEstimator(clf, features, labels, features, labels, feature_names)
@pytest.fixture
def feature_names():
return ['one', 'two', 'three']
@pytest.fixture
def metadata_v1():
return {
'metadata_version': 1,
'created_at': '2016-01-01-00-00-00',
'feature_names': ['f0', 'f1', 'f2'],
'vcs_hash': 'deadbeef',
'distribution_info': {
'python': 3.5,
'packages': [],
},
}
@pytest.fixture
def metadata_v2():
return {
'metadata_version': 2,
'created_at': '2016-02-01-00-00-00',
'feature_names': ['f0', 'f1', 'f2'],
'vcs_hash': 'deadbeef',
'distribution_info': {
'python': 3.5,
'packages': [],
},
'performance_scores': {
'precision': [0.7],
'recall': [0.8],
'fscore': [0.9],
'support': [100],
'roc_auc': 0.6,
'log_loss': 0.5,
}
}
class TestDescribedEstimator(object):
def test_init(self, clf_described):
assert clf_described.n_training_samples_ == 10
assert clf_described.n_features_ == 3
def test_init_error(self, clf, features, labels, feature_names):
with pytest.raises(ValueError):
wrong_labels = np.zeros([9, 1])
DescribedEstimator(clf, features, wrong_labels, features, labels, feature_names)
with pytest.raises(ValueError):
wrong_feature_names = ['']
DescribedEstimator(clf, features, labels, features, labels, wrong_feature_names)
def test_eq(self, clf, features, labels, feature_names, metadata_v1, metadata_v2):
d1 = DescribedEstimator(clf, features, labels, features, labels, compute_metadata=False, metadata=metadata_v1)
d1b = DescribedEstimator(clf, features, labels, features, labels, compute_metadata=False, metadata=metadata_v1)
assert d1 == d1b
d2 = DescribedEstimator(clf, features, labels, features, labels, compute_metadata=False, metadata=metadata_v2)
|
assert d1 != d2
meta
|
data_v1a = dict(metadata_v1)
metadata_v1a['metadata_version'] = 3
d1a = DescribedEstimator(clf, features, labels, features, labels, compute_metadata=False, metadata=metadata_v1a)
assert d1 != d1a
def test_from_file(self, clf_described):
save_dir = tempfile.mkdtemp()
try:
file_path = clf_described.save(save_dir)
destimator = DescribedEstimator.from_file(file_path)
assert destimator == clf_described
finally:
shutil.rmtree(save_dir)
def test_is_compatible(self, clf, clf_described, features, labels):
compatible = DescribedEstimator(clf, features, labels, features, labels, ['one', 'two', 'three'])
assert clf_described.is_compatible(compatible)
incompatible = DescribedEstimator(clf, features, labels, features, labels, ['one', 'two', 'boom'])
assert not clf_described.is_compatible(incompatible)
def test_metadata(self, clf, features, labels, feature_names):
clf_described = DescribedEstimator(clf, features, labels, features, labels, feature_names)
d = clf_described.metadata
assert d['feature_names'] == feature_names
# assert type(d['metadata_version']) == str
assert type(datetime.datetime.strptime(d['created_at'], '%Y-%m-%d-%H-%M-%S')) == datetime.datetime
# assert type(d['vcs_hash']) == str
assert type(d['distribution_info']) == dict
# assert type(d['distribution_info']['python']) == str
assert type(d['distribution_info']['packages']) == list
assert type(d['performance_scores']['precision']) == list
assert type(d['performance_scores']['precision'][0]) == float
assert type(d['performance_scores']['recall']) == list
assert type(d['performance_scores']['recall'][0]) == float
assert type(d['performance_scores']['fscore']) == list
assert type(d['performance_scores']['fscore'][0]) == float
assert type(d['performance_scores']['support']) == list
assert type(d['performance_scores']['support'][0]) == int
assert type(d['performance_scores']['roc_auc']) == float
assert type(d['performance_scores']['log_loss']) == float
def test_get_metric(self, clf_described):
assert clf_described.recall == [1.0, 0.0]
assert clf_described.roc_auc == 0.5
# log_loss use epsilon 1e-15, so -log(1e-15) / 2 approximately equal 20
assert_almost_equal(clf_described.log_loss, 17.269, decimal=3)
def test_save_classifier(self, clf_described):
save_dir = tempfile.mkdtemp()
try:
saved_name = clf_described.save(save_dir)
assert os.path.dirname(saved_name) == save_dir
assert os.path.isfile(saved_name)
assert saved_name.endswith('.zip')
zf = zipfile.ZipFile(saved_name)
files_present = zf.namelist()
expected_files = [
'model.bin', 'features_train.bin', 'labels_train.bin',
'features_test.bin', 'labels_test.bin', 'metadata.json',
]
# could use a set, but this way errors are easier to read
for f in expected_files:
assert f in files_present
finally:
shutil.rmtree(save_dir)
def test_save_classifier_with_filename(self, clf_described):
save_dir = tempfile.mkdtemp()
try:
saved_name = clf_described.save(save_dir, filename='boom.pkl')
assert os.path.basename(saved_name) == 'boom.pkl.zip'
assert os.path.isfile(saved_name)
finally:
shutil.rmtree(save_dir)
def test_save_classifier_nonexistent_path(self, clf_described):
save_dir = tempfile.mkdtemp()
try:
saved_name = clf_described.save(os.path.join(save_dir, 'nope'))
os.path.dirname(saved_name) == save_dir
assert os.path.isfile(saved_name)
finally:
shutil.rmtree(save_dir)
class TestGetCurrentGitHash(object):
def test_get_current_vcs_hash(self, monkeypatch):
def fake_check_output(*args, **kwargs):
return b'thisisagithash'
monkeypatch.setattr(subprocess, 'check_output', fake_check_output)
assert utils.get_current_vcs_hash() == 'thisisagithash'
def test_get_current_vcs_hash_no_git(self, monkeypatch):
def fake_check_output(*args, **kwargs):
raise OSError()
monkeypatch.setattr(subprocess, 'check_output', fake_check_output)
assert utils.get_current_vcs_hash() == ''
def test_get_current_vcs_hash_git_error(self, monkeypatch):
def fake_check_output(*args, **kwargs):
raise subprocess.CalledProcessError(0, '', '')
monkeypatch.setattr(subprocess, 'check_output', fake_check_output)
assert utils.get_current_vcs_hash() == ''
|
planoAccess/clonedONOS
|
tools/test/scenarios/bin/verify-topo-devices.py
|
Python
|
apache-2.0
| 1,143 | 0.002625 |
#! /usr/bin/env python
import requests
import sys
import urllib
from requests.auth import HTTPBasicAuth
if len(sys.argv) != 5:
print "usage: verify-topo-links onos-node cluster-id first-index last-index"
sys.exit(1)
node = sys.argv[1]
cluster = sys.argv[2]
first = int(sys.argv[3])
last = int(sys.argv[4])
found = 0
topoRequest = requests.get('http://' + node + ':8181/onos/v1/topology/clusters/'
+ cluster
+ "/devices",
auth=HTTPBasicAuth('onos', 'rocks'))
if topoRequest.status_code != 200:
print topoRequest.text
sys.exit(1)
topoJson = topoRequest.json()
for deviceIndex in range(first, last+1):
lookingFor = "of:" + format(deviceIndex, '016x')
prin
|
t lookingFor
|
for arrayIndex in range(0, len(topoJson["devices"])):
device = topoJson["devices"][arrayIndex]
if device == lookingFor:
found = found + 1
print "Match found for " + device
break
if found == last - first:
sys.exit(0)
print "Found " + str(found) + " matches, need " + str(last - first)
sys.exit(2)
|
michellab/Sire
|
wrapper/Squire/__init__.py
|
Python
|
gpl-2.0
| 155 | 0.012903 |
#############################
##
## The Sq
|
uire module
##
## (C) Christopher Woods
##
import Sire.MM
import Sire.System
from
|
Sire.Squire._Squire import *
|
YAtOff/python0-reloaded
|
projects/hard/phonebook/phonebook.py
|
Python
|
mit
| 5,283 | 0.000934 |
"""
Телефонен указател
Задачата е да се напишат функции, които работят като телефонен указател.
Телефонният указател трябва да се съхранява във файл.
Телефоните се представят като речник с две полете:
- `name` - име на човек
- `phone` - телефоне номер
Например:
{
'name': 'Ivan',
'phone': '0884-35-45-55'
}
Телефонният указател представлява списък от записи:
[
{
'name': 'Ivan',
'phone': '0884-35-45-55'
},
{
'name': 'Pesho',
'phone': '0887-33-44-55'
}
]
Телефонният указател да се съхранява във файл в JSON формат.
"""
import json
import os.path
import sys
def load_contacts_from_file(filename):
if os.path.exists(filename):
f = open(filename)
contacts = json.loads(f.read())
f.close()
else:
contacts = []
return contacts
def save_contacts_to_file(filename, contacts):
f = open(filename, 'w')
f.write(json.dumps(contacts))
f.close()
def set_phone(filename, name, phone):
"""
Задава телефонен номер `phone` на човек с име `name`.
Ако съществува човек с име `name` променя телефона му.
Ако не съществува - добавя нов запис в телефонния указател.
Функцията трябва да:
- прочете указателя от файла с име `filename`
- модифицаира указателя
- запише указателя във файла с име `filename`
Пример:
>>> set_phone('phonebook.json', 'Ivan', '0895-11-11-11') # doctest: +SKIP
"""
contacts = load_contacts_from_file(filename)
index = find_contact_index(contacts, name)
if index != -1:
contacts[index]['phone'] = phone
else:
insert_contact(contacts, name, phone)
save_contacts_to_file(filename, contacts)
def remove_phone(filename, name):
"""
Изтрива записа (ако цъществува) за човек с име `name` от телефонния указател.
Функцията трябва да:
- прочете указателя от файла с име `filename`
- модифицаира указателя
- запише указателя във файла с име `filename`
Пример:
>>> remove_phone('phonebook.json', 'Ivan') # doctest: +SKIP
"""
contacts = load_contacts_from_file(filename)
index = find_contact_index(contacts, name)
if index != -1:
contacts.pop(index)
save_contacts_to_file(filename, contacts)
def find_phone(filename, name):
"""
Намире телефонния номер на човек с име `name`.
Връща като резултат телефонния номер или `None` ако не го открие.
Функцията трябва да:
- прочете указателя от файла с име `filename`
- потърси и върне телефонния номер
Пример:
>>> find_phone('phonebook.json', 'Ivan') # doctest: +SKIP
'0895-11-11-11'
"""
contacts = load_contacts_from_file(filename)
index = find_contact_index(contacts, name)
if index != -1:
return contacts[index]['phone']
def find_contact_index(contacts, name):
"""
Функция, която намира позицията на контакт в списъка (по име).
Връща като резултат пизицията или `-1`, ако не успее да намери контакт с такова име.
>>> contacts = [{'name': 'Pesho', 'phone': 1}, {'name': 'Gosho', 'phone': 2}]
>>> find_contact_index(contacts, 'Gosho')
1
>>> find_contact_index(contacts, 'Ivan')
-1
"""
pass
def insert_contact(contacts, name, phone):
"""
Функция, която вмъква нов контакт в спи
|
съка с контакти,
като запазва списъка сортиран по име.
>>> contacts = []
>>> insert_contact(contacts, 'Pesho', 1)
>>> insert_contact(contacts, 'Gosho', 2)
>>> contacts[0]['name']
'Gosho'
>>> contacts[1]['name']
'Pesho'
>>> insert_contact(contacts, 'Boby', 3)
>>> contacts[0]['name']
'Boby'
>>> contacts[1]['name']
'Gosho'
>
|
>> contacts[2]['name']
'Pesho'
>>> insert_contact(contacts, 'Tosho', 4)
>>> contacts[3]['name']
'Tosho'
"""
pass
def do_command(command, *args):
result = {
'set': set_phone,
'remove': remove_phone,
'find': find_phone
}[command](*args)
if result:
print(result)
if __name__ == '__main__':
command = sys.argv[1]
if command == 'test':
import doctest
doctest.testmod()
else:
do_command(*sys.argv[1:])
|
anbangr/trusted-juju
|
juju/providers/orchestra/tests/test_files.py
|
Python
|
agpl-3.0
| 6,631 | 0 |
from cStringIO import StringIO
from twisted.internet.defer import fail, succeed
from twisted.web.error import Error
from juju.errors import FileNotFound, ProviderError, ProviderInteractionError
from juju.lib.testing import TestCase
from juju.providers.orchestra import MachineProvider
from .test_digestauth import GetPageAuthTestCase
class SomeError(Exception):
pass
def get_file_storage(custom_config=None):
config = {"orchestra-server": "somewhereel.se",
"orchestra-user": "fallback-user",
"orchestra-pass": "fallback-pass",
"acquired-mgmt-class": "acquired",
"available-mgmt-class": "available"}
if custom_config is None:
config["storage-url"] = "http://somewhe.re"
config["storage-user"] = "user"
config["storage-pass"] = "pass"
else:
config.update(custom_config)
provider = MachineProvider("blah", config)
return provider.get_file_storage()
class FileStorageGetTest(TestCase):
def setUp(self):
self.uuid4_m = self.mocker.replace("uuid.uuid4")
self.getPage = self.mocker.replace("twisted.web.client.getPage")
def test_get_url(self):
self.mocker.replay()
fs = get_file_storage()
self.assertEquals(fs.get_url("angry/birds"),
"http://somewhe.re/angry/birds")
def test_get_url_fallback(self):
self.mocker.replay()
fs = get_file_storage({})
self.assertEquals(fs.get_url("angry/birds"),
"http://somewhereel.se/webdav/angry/birds")
def test_get(self):
self.getPage("http://somewhe.re/rubber/chicken")
self.mocker.result(succeed("pulley"))
self.mocker.replay()
fs = get_file_storage()
d = fs.get("rubber/chicken")
def verify(result):
self.assertEquals(result.read(), "pulley")
d.addCallback(verify)
return d
def check_get_error(self, result, err_type, err_message):
self.getPage("http://somewhe.re/rubber/chicken")
self.mocker.result(result)
self.mocker.replay()
fs = get_file_storage()
d = fs.get("rubber/chicken")
self.assertFailure(d, err_type)
def verify(error):
self.assertEquals(str(error), err_message)
d.addCallback(verify)
return d
def test_get_error(self):
return self.check_get_error(
fail(SomeError("pow!")),
ProviderInteractionError,
"Unexpected SomeError interacting with provider: pow!")
def test_get_404(self):
return self.check_get_error(
fail(Error("404")),
FileNotFound,
"File was not found: 'http://somewhe.re/rubber/chicken'")
def test_get_bad_code(self):
return self.check_get_error(
fail(Error("999")),
ProviderError,
"Unexpected HTTP 999 trying to GET "
"http://somewhe.re/rubber/chicken")
class FileStoragePutTest(GetPageAuthTestCase):
def setup_mock(self):
self.uuid4_m = self.mocker.replace("uuid.uuid4")
def get_file_storage(self, with_user=True):
storage_url = self.get_base_url()
custom_config = {"storage-url": storage_url}
if with_user:
custom_config["storage-user"] = "user"
custom_config["storage-pass"] = "pass"
return get
|
_file_storage(custom_config)
def test_no_auth_error(self):
self.add_plain("peregrine", "PUT", "", "croissant", 999)
fs = self.get_file_storage()
d = fs.put("peregrine", StringIO("croissant"))
self.assertFailure(d, ProviderError)
def verify(error):
self.assertIn("Unexpected HTTP 999 trying to PUT ", str(error))
d.addCallback(verify)
return d
def test_no_auth_201(self):
self.add_plai
|
n("peregrine", "PUT", "", "croissant", 201)
fs = self.get_file_storage()
d = fs.put("peregrine", StringIO("croissant"))
d.addCallback(self.assertEquals, True)
return d
def test_no_auth_204(self):
self.add_plain("peregrine", "PUT", "", "croissant", 204)
fs = self.get_file_storage()
d = fs.put("peregrine", StringIO("croissant"))
d.addCallback(self.assertEquals, True)
return d
def auth_common(self, username, status, with_user=True):
self.setup_mock()
self.uuid4_m()
self.mocker.result("dinner")
self.mocker.replay()
url = self.get_url("possum")
def check(response):
self.assertTrue(response.startswith(
'Digest username="%s", realm="sparta", nonce="meh", uri="%s"'
% (username, url)))
self.assertIn(
'qop="auth", nc="00000001", cnonce="dinner"', response)
self.add_auth(
"possum", "PUT", "", "Digest realm=sparta, nonce=meh, qop=auth",
check, expect_content="canabalt", status=status)
fs = self.get_file_storage(with_user)
return fs.put("possum", StringIO("canabalt"))
def test_auth_error(self):
d = self.auth_common("user", 808)
self.assertFailure(d, ProviderError)
def verify(error):
self.assertIn("Unexpected HTTP 808 trying to PUT", str(error))
d.addCallback(verify)
return d
def test_auth_bad_credentials(self):
d = self.auth_common("user", 401)
self.assertFailure(d, ProviderError)
def verify(error):
self.assertEquals(
str(error),
"The supplied storage credentials were not accepted by the "
"server")
d.addCallback(verify)
return d
def test_auth_201(self):
d = self.auth_common("user", 201)
d.addCallback(self.assertEquals, True)
return d
def test_auth_204(self):
d = self.auth_common("user", 204)
d.addCallback(self.assertEquals, True)
return d
def test_auth_fallback_error(self):
d = self.auth_common("fallback-user", 747, False)
self.assertFailure(d, ProviderError)
def verify(error):
self.assertIn("Unexpected HTTP 747 trying to PUT", str(error))
d.addCallback(verify)
return d
def test_auth_fallback_201(self):
d = self.auth_common("fallback-user", 201, False)
d.addCallback(self.assertEquals, True)
return d
def test_auth_fallback_204(self):
d = self.auth_common("fallback-user", 204, False)
d.addCallback(self.assertEquals, True)
return d
|
pagekite/PyPagekite
|
droiddemo.py
|
Python
|
agpl-3.0
| 4,921 | 0.009348 |
#!/usr/bin/python -u
from __future__ import absolute_import
from __future__ import print_function
#
# droiddemo.py, Copyright 2010-2013, The Beanstalks Project ehf.
# http://beanstalks-project.net/
#
# This is a proof-of-concept PageKite enabled HTTP server for Android.
# It has been developed and tested in the SL4A Python environment.
#
DOMAIN='phone.bre.pagekite.me'
SECRET='ba4e5430'
SOURCE='/sdcard/
|
sl4a/scripts/droiddemo.py'
#
#############################################################################
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This p
|
rogram is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#############################################################################
#
import six
from six.moves.urllib import unquote
from six.moves.urllib.parse import urlparse
import android
import pagekite
import os
class UiRequestHandler(pagekite.UiRequestHandler):
CAMERA_PATH = '/sdcard/dcim/.thumbnails'
HOME = ('<html><head>\n'
'<script type=text/javascript>'
'lastImage = "";'
'function getImage() {'
'xhr = new XMLHttpRequest();'
'xhr.open("GET", "/latest-image.txt", true);'
'xhr.onreadystatechange = function() {'
'if (xhr.readyState == 4) {'
'if (xhr.responseText && xhr.responseText != lastImage) {'
'document.getElementById("i").src = lastImage = xhr.responseText;'
'}'
'setTimeout("getImage()", 2000);'
'}'
'};'
'xhr.send(null);'
'}'
'</script>\n'
'</head><body onLoad="getImage();" style="text-align: center;">\n'
'<h1>Android photos!</h1>\n'
'<img id=i height=80% src="http://www.android.com/images/opensourceproject.gif">\n'
'<br><a href="/droiddemo.py">source code</a>'
'| <a href="/status.html">kite status</a>\n'
'</body></head>')
def listFiles(self):
mtimes = {}
for item in os.listdir(self.CAMERA_PATH):
iname = '%s/%s' % (self.CAMERA_PATH, item)
if iname.endswith('.jpg'):
mtimes[iname] = os.path.getmtime(iname)
files = list(six.iterkeys(mtimes))
files.sort(key=lambda iname: mtimes[iname])
return files
def do_GET(self):
(scheme, netloc, path, params, query, frag) = urlparse(self.path)
p = unquote(path)
if p.endswith('.jpg') and p.startswith(self.CAMERA_PATH) and ('..' not in p):
try:
jpgfile = open(p)
self.send_response(200)
self.send_header('Content-Type', 'image/jpeg')
self.send_header('Content-Length', '%s' % os.path.getsize(p))
self.send_header('Cache-Control', 'max-age: 36000')
self.send_header('Expires', 'Sat, 1 Jan 2011 12:00:00 GMT')
self.send_header('Last-Modified', 'Wed, 1 Sep 2011 12:00:00 GMT')
self.end_headers()
data = jpgfile.read()
while data:
try:
sent = self.wfile.write(data[0:15000])
data = data[15000:]
except Exception:
pass
return
except Exception as e:
print('%s' % e)
pass
if path == '/latest-image.txt':
flist = self.listFiles()
self.begin_headers(200, 'text/plain')
self.end_headers()
self.wfile.write(flist[-1])
return
elif path == '/droiddemo.py':
try:
pyfile = open(SOURCE)
self.begin_headers(200, 'text/plain')
self.end_headers()
self.wfile.write(pyfile.read().replace(SECRET, 'mysecret'))
except IOError as e:
self.begin_headers(404, 'text/plain')
self.end_headers()
self.wfile.write('Could not read %s: %s' % (SOURCE, e))
return
elif path == '/':
self.begin_headers(200, 'text/html')
self.end_headers()
self.wfile.write(self.HOME)
return
return pagekite.UiRequestHandler.do_GET(self)
class DroidKite(pagekite.PageKite):
def __init__(self, droid):
pagekite.PageKite.__init__(self)
self.droid = droid
self.ui_request_handler = UiRequestHandler
def Start(host, secret):
ds = DroidKite(android.Android())
ds.Configure(['--defaults',
'--httpd=localhost:9999',
'--backend=http:%s:localhost:9999:%s' % (host, secret)])
ds.Start()
Start(DOMAIN, SECRET)
|
solanolabs/rply
|
tests/test_ztranslation.py
|
Python
|
bsd-3-clause
| 2,200 | 0.000455 |
import py
try:
|
from pypy.rpython.test.test_llinterp import interpret
except ImportError:
py.test.skip('Needs PyPy to be on the PYTHONPATH')
from rply import ParserGenerator, Token
from rply.errors import ParserGeneratorWarning
from .base import BaseTests
from .utils import FakeLexer, BoxInt, ParserState
class TestTran
|
slation(BaseTests):
def run(self, func, args):
return interpret(func, args)
def test_basic(self):
pg = ParserGenerator(["NUMBER", "PLUS"])
@pg.production("main : expr")
def main(p):
return p[0]
@pg.production("expr : expr PLUS expr")
def expr_op(p):
return BoxInt(p[0].getint() + p[2].getint())
@pg.production("expr : NUMBER")
def expr_num(p):
return BoxInt(int(p[0].getstr()))
with self.assert_warns(ParserGeneratorWarning, "1 shift/reduce conflict"):
parser = pg.build()
def f(n):
return parser.parse(FakeLexer([
Token("NUMBER", str(n)),
Token("PLUS", "+"),
Token("NUMBER", str(n))
])).getint()
assert self.run(f, [12]) == 24
def test_state(self):
pg = ParserGenerator(["NUMBER", "PLUS"], precedence=[
("left", ["PLUS"]),
])
@pg.production("main : expression")
def main(state, p):
state.count += 1
return p[0]
@pg.production("expression : expression PLUS expression")
def expression_plus(state, p):
state.count += 1
return BoxInt(p[0].getint() + p[2].getint())
@pg.production("expression : NUMBER")
def expression_number(state, p):
state.count += 1
return BoxInt(int(p[0].getstr()))
parser = pg.build()
def f():
state = ParserState()
return parser.parse(FakeLexer([
Token("NUMBER", "10"),
Token("PLUS", "+"),
Token("NUMBER", "12"),
Token("PLUS", "+"),
Token("NUMBER", "-2"),
]), state=state).getint() + state.count
assert self.run(f, []) == 26
|
jleclanche/pywow
|
game/currencies/__init__.py
|
Python
|
cc0-1.0
| 702 | 0.029915 |
# -*- coding: utf-8 -*-
"""
Enchants
- CurrencyTypes.dbc
"""
from .. import *
class Currency(Mod
|
el):
pass
class CurrencyTooltip(Tooltip):
def tooltip(self):
self.append("name", self.obj.getName())
self.append("description", self.obj.getDescription(), color=YELLOW)
return self.flush()
Currency.Tooltip = CurrencyTooltip
class CurrencyProxy(object):
"""
WDBC proxy for currencies
"""
def __init__(self, cls):
from pywow import wdbc
self.__file = wdbc.get("CurrencyT
|
ypes.dbc", build=-1)
def get(self, id):
return self.__file[id]
def getDescription(self, row):
return row.description_enus
def getName(self, row):
return row.name_enus
Currency.initProxy(CurrencyProxy)
|
tommy-u/chaco
|
chaco/tests/test_colormapped_scatterplot.py
|
Python
|
bsd-3-clause
| 2,802 | 0 |
import unittest
from unittest2 import skip
from numpy import alltrue, arange
from enable.compiled_path import CompiledPath
# Chaco imports
from chaco.api import (ArrayDataSource, ColormappedScatterPlot, DataRange1D,
LinearMapper, PlotGraphicsContext, jet)
class TestColormappedScatterplot(unittest.TestCase):
def setUp(self):
self.index = ArrayDataSource(arange(10))
self.value = ArrayDataSource(arange(10))
self.color_data = Array
|
DataSource(arange(10))
self.size_data = arange(10)
self.index_range = DataRange1D()
self.index_range.add(self.index)
self.index_mapper = LinearMapper(range=self.index_range)
self.value_range = DataRange1D()
self.value_range.add(self.value)
self.value_mapper = LinearMapper(range=self.value_range)
self.color_range = Da
|
taRange1D()
self.color_range.add(self.color_data)
self.color_mapper = jet(self.color_range)
self.scatterplot = ColormappedScatterPlot(
index=self.index,
value=self.value,
index_mapper=self.index_mapper,
value_mapper=self.value_mapper,
color_data=self.color_data,
marker_size=self.size_data,
color_mapper=self.color_mapper,
)
self.scatterplot.outer_bounds = [50, 50]
self.gc = PlotGraphicsContext((50, 50))
def test_scatter_render(self):
""" Coverage test to check basic case works """
self.gc.render_component(self.scatterplot)
actual = self.gc.bmp_array[:, :, :]
self.assertFalse(alltrue(actual == 255))
def test_scatter_circle(self):
""" Coverage test to check circles work """
self.scatterplot.marker = 'circle'
self.gc.render_component(self.scatterplot)
actual = self.gc.bmp_array[:, :, :]
self.assertFalse(alltrue(actual == 255))
@skip
def test_scatter_custom(self):
""" Coverage test to check custom markers work...
XXX ...which apparently they currently don't. See #232.
"""
# build path
path = CompiledPath()
path.begin_path()
path.move_to(-5, -5)
path.line_to(-5, 5)
path.line_to(5, 5)
path.line_to(5, -5)
path.line_to(-5, -5)
self.scatterplot.marker = 'custom'
self.scatterplot.custom_symbol = path
self.gc.render_component(self.scatterplot)
actual = self.gc.bmp_array[:, :, :]
self.assertFalse(alltrue(actual == 255))
def test_colormap_updated(self):
""" If colormapper updated then we need to redraw """
self.color_mapper.updated = True
self.assertFalse(self.scatterplot.draw_valid)
if __name__ == "__main__":
unittest.main()
|
ManuelLR/Notmail_Bot
|
repository/repository.py
|
Python
|
gpl-3.0
| 4,697 | 0.00149 |
# Copyright 2017 by Notmail Bot contributors. All rights reserved.
#
# This file is part of Notmail Bot.
#
# Notmail Bot is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Notmail Bot is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Notmail Bot. If not, see <http:#www.gnu.org/licenses/>.
import os
from tinydb import TinyDB, Query
from repository.email_server import EmailServer
from repository.user import User
from repository.account import parse_accounts_to_json, parse_json_to_accounts
db = None
def get_dbc():
return db
def set_dbc(dbc):
global db
db = dbc
class DBC:
def __init__(self, path=None):
if path is None:
self.db = TinyDB(os.path.join('config', 'tmail-bot.json'))
else:
self.db = TinyDB(path)
def get_table(self, table_name):
return self.db.table(table_name)
def purge(self):
self.db.purge_tables()
def insert_email_server(self, name, host, port, protocol):
email_servers = self.db.table('EmailServers')
email_servers.insert({'name': name, 'host': host, 'port': port, 'protocol': protocol})
return EmailServer(name, host, port, protocol)
def search_email_server(self, name, protocol):
email_servers = self.db.table('EmailServers')
query = Query()
search = email_servers.search(query.name == name and query.protocol == protocol)
result = eval(str(search))[0] # We suppose that names + protocol will be unique
email_server = EmailServer(name, result['host'], result['port'], result['protocol'])
return email_server
def update_email_server(self, email_server):
email_servers = self.db.table('EmailServers')
query = Query()
email_servers.update({'host': email_server.host, 'port': email_server.port},
query.name == email_server.name and query.protocol == email_server.protocol)
def remove_email_server(self, name, protocol):
email_servers = self.db.table('EmailServers')
query = Query()
email_servers.remove(query.name == name and query.protocol == protocol)
def insert_user(self, id, accounts):
users = self.db.table('Users')
users.insert({'id': id, 'accounts': parse_accounts_to_json(accounts)})
return User(id, accounts)
def search_user(self, id):
users = self.db.table('Users')
query = Query()
search = users.search(query.id == id)
result = eval(str(search))[0]
|
user = User(id, parse_json_to_accounts(result['accounts']))
return user
def update_user(self, user):
users = self.db.table('Users')
query = Query()
users.update({'id': user.id, 'accounts': parse_accounts_to_json(user.accounts)},
query.id == user.id)
def get_all_users(self):
users = self.db.table('Users')
res = []
for a in users.all():
res.append(User(a['id'], p
|
arse_json_to_accounts(a['accounts'])))
return res
def remove_user(self, user_id):
users = self.db.table('Users')
query = Query()
users.remove(query.id == user_id)
def get_accounts_of_user(self, user):
user = self.search_user(user.id)
return user.accounts
def get_account_of_user(self, user, username):
user = self.search_user(user.id)
result = None
for account in user.accounts:
if account.username == username:
result = account
break
return result
def add_account_to_user(self, user, account):
user = self.search_user(user.id)
user.add_account(account)
self.update_user(user)
def update_account_of_user(self, user, account):
user = self.search_user(user.id)
user.update_account(account)
self.update_user(user)
def remove_account_of_user(self, user, account):
user = self.search_user(user.id)
user.remove_account(account)
self.update_user(user)
def get_email_server_of_account(self, account, protocol):
email_server = self.search_email_server(account.name, protocol)
return email_server
|
haddocking/disvis
|
disvis/main.py
|
Python
|
apache-2.0
| 23,344 | 0.004112 |
#! usr/bin/python
from __future__ import print_function, division, absolute_import
from os import remove
from os.path import join, abspath
from sys import stdout, exit
from time import time
import multiprocessing as mp
from argparse import ArgumentParser
import logging
import numpy as np
from disvis import DisVis, PDB, Volume
from disvis.rotations import proportional_orientations, quat_to_rotmat
from disvis.helpers import mkdir_p
def parse_args():
"""Parse the command-line arguments."""
p = ArgumentParser()
p.add_argument('receptor', type=file,
help='PDB-file containin
|
g fixed chain.')
p.add_argument('ligand', type=file,
help='PDB-file containing scanning chain.')
p.add_argument('restraints', type=file,
help='File containing the distance restraints')
p.add_argument('-a', '
|
--angle', dest='angle', type=float, default=15, metavar='<float>',
help='Rotational sampling density in degrees. Default is 15 degrees.')
p.add_argument('-vs', '--voxelspacing', dest='voxelspacing', metavar='<float>',
type=float, default=1,
help='Voxel spacing of search grid in angstrom. Default is 1A.')
p.add_argument('-ir', '--interaction-radius',
dest='interaction_radius', type=float, default=3.0, metavar='<float>',
help='Radius of the interaction space for each atom in angstrom. '
'Atoms are thus considered interacting if the distance is '
'larger than the vdW radius and shorther than or equal to '
'vdW + interaction_radius. Default is 3A.')
p.add_argument('-cv', '--max-clash',
dest='max_clash', type=float, default=200, metavar='<float>',
help='Maximum allowed volume of clashes. Increasing this '
'number results in more allowed complexes. '
'Default is 200 A^3.')
p.add_argument('-iv', '--min-interaction',
dest='min_interaction', type=float, default=300, metavar='<float>',
help='Minimal required interaction volume for a '
'conformation to be considered a '
'complex. Increasing this number results in a '
'stricter counting of complexes. '
'Default is 300 A^3.')
p.add_argument('-d', '--directory', dest='directory', metavar='<dir>',
type=abspath, default='.',
help='Directory where results are written to. '
'Default is current directory.')
p.add_argument('-p', '--nproc', dest='nproc', type=int, default=1, metavar='<int>',
help='Number of processors used during search.')
p.add_argument('-g', '--gpu', dest='gpu', action='store_true',
help='Use GPU-acceleration for search. If not available '
'the CPU-version will be used with the given number '
'of processors.')
help_msg = ("File containing residue number for which interactions will be counted. "
"The first line holds the receptor residue, "
"and the second line the ligand residue numbers.")
p.add_argument('-is', '--interaction-selection', metavar='<file>',
dest='interaction_selection', type=file, default=None,
help=help_msg)
help_msg = ("Number of minimal consistent restraints for which an interaction "
"or occupancy analysis will be performed. "
"Default is number of restraints minus 1.")
p.add_argument('-ic', '--interaction-restraints-cutoff', metavar='<int>',
dest='interaction_restraints_cutoff', type=int, default=None,
help=help_msg)
p.add_argument('-oa', '--occupancy-analysis', dest='occupancy_analysis',
action='store_true',
help=("Perform an occupancy analysis, ultimately providing "
"a volume where each grid point represents the "
"normalized probability of that spot being occupied by the ligand."
)
)
return p.parse_args()
def parse_interaction_selection(fid, pdb1, pdb2):
"""Parse the interaction selection file, i.e. all residues for which an
interaction analysis is performed."""
resi1 = [int(x) for x in fid.readline().split()]
resi2 = [int(x) for x in fid.readline().split()]
pdb1_sel = pdb1.select('name', ('CA', "O3'")).select('resi', resi1)
pdb2_sel = pdb2.select('name', ('CA', "O3'")).select('resi', resi2)
if (len(resi1) != pdb1_sel.natoms) or (len(resi2) != pdb2_sel.natoms):
msg = ("Some selected interaction residues where either missing in the PDB file "
"or had alternate conformers. Please check your input residues and remove alternate conformers.")
raise ValueError(msg)
return pdb1_sel, pdb2_sel
def parse_restraints(fid, pdb1, pdb2):
"""Parse the restraints file."""
dist_restraints = []
for line in fid:
# ignore comments and empty lines
line = line.strip()
if line.startswith('#') or not line:
continue
chain1, resi1, name1, chain2, resi2, name2, mindis, maxdis = line.split()
pdb1_sel = pdb1.select('chain', chain1).select('resi',
int(resi1)).select('name', name1).duplicate()
pdb2_sel = pdb2.select('chain', chain2).select('resi',
int(resi2)).select('name', name2).duplicate()
if pdb1_sel.natoms == 0 or pdb2_sel.natoms == 0:
raise ValueError("A restraint selection was not found in line:\n{:s}".format(str(line)))
dist_restraints.append([pdb1_sel, pdb2_sel, float(mindis), float(maxdis)])
fid.close()
return dist_restraints
class Joiner(object):
def __init__(self, directory):
self.directory = directory
def __call__(self, fname):
"""Join fname with set directory."""
return join(self.directory, fname)
class Results(object):
"""Simple container"""
pass
def run_disvis_instance(queue, receptor, ligand, distance_restraints, rotmat,
weights, n, pdb1_sel, pdb2_sel, args):
"""Run a single DisVis instance."""
dv = DisVis()
dv.receptor = receptor
dv.ligand = ligand
dv.distance_restraints = distance_restraints
dv.rotations = rotmat
dv.weights = weights
dv.voxelspacing = args.voxelspacing
dv.interaction_radius = args.interaction_radius
dv.max_clash = args.max_clash
dv.min_interaction = args.min_interaction
dv.interaction_restraints_cutoff = args.interaction_restraints_cutoff
if args.interaction_selection is not None:
dv.receptor_interaction_selection = pdb1_sel
dv.ligand_interaction_selection = pdb2_sel
dv.occupancy_analysis = args.occupancy_analysis
dv.search()
# Save results to file, to be combined later
joiner = Joiner(args.directory)
fname = joiner('accessible_interaction_space_{:d}.mrc').format(n)
dv.accessible_interaction_space.tofile(fname)
fname = joiner('violations_{:d}.npy').format(n)
np.save(fname, dv.violations)
if dv.interaction_matrix is not None:
fname = joiner('interaction_matrix_{:d}.npy'.format(n))
np.save(fname, dv.interaction_matrix)
if dv.occupancy_analysis:
for key, value in dv.occupancy_grids.iteritems():
fname = joiner('occupancy_{:d}_{:d}.mrc'.format(key, n))
value.tofile(fname)
queue.put(dv.accessible_complexes)
def mp_cpu_disvis(receptor, ligand, rotmat, weights, distance_restraints,
pdb1_sel, pdb2_sel, args):
"""Run several DisVis instances, each with a subset of all rotations."""
# multi-threaded CPU version
try:
max_cpu = mp.cpu_count()
jobs = min(max_cpu, args.nproc)
except NotImplementedError:
jobs = args.nproc
# in case more processes are requested than the number
# of rotations sampled
nrot = rotmat.shape[0]
if jobs > nrot:
jobs = nrot
nrot_per_job = nrot//jobs
write('Number of processors used: {:d}'.format(jobs))
write('Number of rotations per job: {:d}'.format(nrot_per_job))
wri
|
thouska/spotpy
|
spotpy/examples/getting_started.py
|
Python
|
mit
| 1,975 | 0.013671 |
# -*- coding: utf-8 -*-
'''
Copyright 2015 by Tobias Houska
This file is part of Statistical Parameter Estimation Tool (SPOTPY).
:author: Tobias Houska
This class holds the example code from the getting_started web-documention.
'''
from __future__ import print_function, division, absolute_import, unicode_literals
# Getting started
#To start your experience with SPOT you need to have SPOT installed. Please see the [Installation chapter](index.md) for further details.
#To use SPOT we have to import it and use one of the pre-build examples:
import spotpy # Load the SPOT package in
|
to your working storage
from spotpy.examples.spot_setup_rosenbrock import spot_setup # Import the two dimensional Rosenbrock example
#The example comes along with parameter boundaries, the Rosenbrock function, the optimal value of the function and RMSE as a likelihood.
#So we can directly start to analyse the Rosenbrock function with one of the algorithms. We start with a simple Monte Carlo sampling:
if __name__ == '__main__':
|
# Give Monte Carlo algorithm the example setup and saves results in a RosenMC.csv file
#spot_setup.slow = True
sampler = spotpy.algorithms.mc(spot_setup(), dbname='RosenMC', dbformat='ram')
#Now we can sample with the implemented Monte Carlo algortihm:
sampler.sample(10000) # Sample 100.000 parameter combinations
results=sampler.getdata()
#Now we want to have a look at the results. First we want to know, what the algorithm has done during the 10.000 iterations:
#spot.analyser.plot_parametertrace(results) # Use the analyser to show the parameter trace
spotpy.analyser.plot_parameterInteraction(results)
posterior=spotpy.analyser.get_posterior(results)
spotpy.analyser.plot_parameterInteraction(posterior)
#spotpy.analyser.plot_posterior_parametertrace(results, threshold=0.9)
print(spotpy.analyser.get_best_parameterset(results))
|
scalable-networks/ext
|
pybombs/app_store.py
|
Python
|
gpl-2.0
| 5,226 | 0.020666 |
#!/usr/bin/env python
#
# Copyright 2013 Tim O'Shea
#
# This file is part of PyBOMBS
#
# PyBOMBS is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# PyBOMBS is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PyBOMBS; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from PyQt4.QtCore import Qt;
from PyQt4 import QtCore
import PyQt4.QtGui as QtGui
import sys
import os.path
from mod_pybombs import *;
recipe_loader.load_all();
class AppList(QtGui.QWidget):
def __init__(self, parent, name):
super(AppList, self).__init__()
self.parent = parent;
self.lay = QtGui.QGridLayout();
self.setLayout(self.lay);
self.width = 8;
self.idx = 0;
self.cbd = {};
def cb(self):
self._cb();
def addButton(self, name, callback):
self._cb = callback;
pkgimg = "img/" + name + ".png";
if os.path.exists(pkgimg):
pixmap = QtGui.QPixmap(pkgimg);
else:
defaultimg = "img/unknown.png";
pixmap = QtGui.QPixmap(defaultimg);
icon = QtGui.QIcon(pixmap);
button = QtGui.QToolButton();
action = QtGui.QAction( icon, str(name), self );
action.setStatusTip('Install App')
button.setDefaultAction(action);
button.setToolButtonStyle(Qt.ToolButtonTextUnderIcon);
button.setIconSize(QtCore.QSize(100,100
|
));
button.setAutoRaise(True);
self.connect(action, QtCore.SIGNAL("triggered()"), callback);
self.lay.addWidget(button, self.idx/self.width, self.idx%self.width);
self.idx =
|
self.idx + 1;
class Installer:
def __init__(self, parent, name):
self.parent = parent;
self.name = name;
def cb(self):
print "installing "+ self.name;
install(self.name);
self.parent.refresh();
class Remover:
def __init__(self, parent, name):
self.parent = parent;
self.name = name;
def cb(self):
print "removing "+ self.name;
remove(self.name);
self.parent.refresh();
class ASMain(QtGui.QWidget):
#class ASMain(QtGui.QMainWindow):
def __init__(self):
super(ASMain, self).__init__()
self.setWindowTitle("Python Build Overlay Managed Bundle System - APP STORE GUI");
self.layout = QtGui.QVBoxLayout(self);
self.setLayout(self.layout);
self.menu = QtGui.QMenuBar(self);
pixmap = QtGui.QPixmap("img/logo.png")
lbl = QtGui.QLabel(self)
lbl.setPixmap(pixmap)
l2 = QtGui.QHBoxLayout();
l2.addWidget(QtGui.QLabel(" "));
l2.addWidget(lbl);
l2.addWidget(QtGui.QLabel(" "));
self.tw = QtGui.QTabWidget(self);
self.layout.setMargin(0);
self.layout.addWidget(self.menu);
self.layout.addLayout(l2);
self.layout.addWidget(self.tw);
# Populate Apps
self.populate_tabs();
# Populate the menu
exitAction = QtGui.QAction(QtGui.QIcon('exit.png'), '&Exit', self)
exitAction.triggered.connect(QtGui.qApp.quit)
fileMenu = self.menu.addMenu('&File');
fileMenu.addAction(exitAction);
reloadAction = QtGui.QAction('&Refresh State', self)
reloadAction.triggered.connect(self.reload_op)
toolsMenu = self.menu.addMenu('&Tools');
toolsMenu.addAction(reloadAction);
self.show();
def reload_op(self):
inv.loadc();
recipe_loader.load_all();
self.refresh();
def refresh(self):
self.populate_tabs();
def populate_tabs(self):
self.tw.clear();
#categories = ["baseline", "common"]
categories = ["common"]
cbs = {};
pages = [];
for c in categories:
pages.append( "Available %s Apps"%(c) );
pages.append( "Installed %s Apps"%(c) );
#pages = ["Available Apps", "Installed Apps"];
tabw = [];
for p in pages:
pp = AppList(self, p);
tabw.append(pp);
self.tw.addTab(pp, p);
catpkg = get_catpkgs()
for c in categories:
cbs[c] = {};
cidx = categories.index(c);
pkgs = catpkg[c];
pkgs.sort();
for p in pkgs:
installed = global_recipes[p].satisfy();
if(installed):
cbs[c][p] = Remover(self, p);
pcidx = 2*cidx+1;
else:
cbs[c][p] = Installer(self, p);
pcidx = 2*cidx;
tabw[pcidx].addButton(p, cbs[c][p].cb);
self.cbs = cbs;
app = QtGui.QApplication(sys.argv)
mw = ASMain();
sys.exit(app.exec_());
|
navnorth/LR-Data
|
src/payload_schema/__init__.py
|
Python
|
apache-2.0
| 265 | 0 |
from .fe
|
tch import FetchParser
from .json_ld import JsonLdParser
from .lom import LomParser
from .lrmi import LrmiParser
from .nsdl_dc import NsdlDcParser
__all__ = [
'FetchParser',
'JsonLdParser',
'LomParse
|
r',
'LrmiParser',
'NsdlDcParser',
]
|
ravik/mapbox-baseSDK
|
tests/test_geocoder.py
|
Python
|
mit
| 12,916 | 0.004026 |
# coding=utf-8
import json
import re
import responses
import pytest
import mapbox
def test_geocoder_default_name():
"""Default name is set"""
geocoder = mapbox.Geocoder()
assert geocoder.name == 'mapbox.places'
def test_geocoder_name():
"""Named dataset name is set"""
geocoder = mapbox.Geocoder('mapbox.places-permanent')
assert geocoder.name == 'mapbox.places-permanent'
def _check_coordinate_precision(coord, precision):
"""Coordinate precision is <= specified number of digits"""
if '.' not in coord:
return True
else:
return len(coord.split('.')[-1]) <= precision
@responses.activate
def test_geocoder_forward():
"""Forward geocoding works"""
responses.add(
responses.GET,
'https://api.mapbox.com/geocoding/v5/mapbox.places/1600%20pennsylvania%20ave%20nw.json?access_token=pk.test',
match_querystring=True,
body='{"query": ["1600", "pennsylvania", "ave", "nw"]}', status=200,
content_type='application/json')
response = mapbox.Geocoder(access_token='pk.test').forward('1600 pennsylvania ave nw')
assert response.status_code == 200
assert response.json()['query'] == ["1600", "pennsylvania", "ave", "nw"]
@responses.activate
def test_geocoder_forward_geojson():
"""Forward geocoding .geojson method works"""
responses.add(
responses.GET,
'https://api.mapbox.com/geocoding/v5/mapbox.places/1600%20pennsylvania%20ave%20nw.json?access_token=pk.test',
match_querystring=True,
body='{"query": ["1600", "pennsylvania", "ave", "nw"]}', status=200,
content_type='application/json')
response = mapbox.Geocoder(access_token='pk.test').forward('1600 pennsylvania ave nw')
assert response.status_code == 200
assert response.geojson() == response.json()
@responses.activate
def test_geocoder_reverse():
"""Reverse geocoding works"""
lon, lat = -77.4371, 37.5227
body = json.dumps({"query": [lon, lat]})
responses.add(
responses.GET,
'https://api.mapbox.com/geocoding/v5/mapbox.places/{0},{1}.json?access_token=pk.test'.format(lon, lat),
match_querystring=True,
body=body,
status=200,
content_type='application/json')
response = mapbox.Geocoder(access_token='pk.test').reverse(lon=lon, lat=lat)
assert response.status_code == 200
assert response.json()['query'] == [lon, lat]
@responses.activate
def test_geocoder_reverse_geojson():
"""Reverse geocoding geojson works"""
lon, lat = -77.4371, 37.5227
body = json.dumps({"query": [lon, lat]})
responses.add(
responses.GET,
'https://api.mapbox.com/geocoding/v5/mapbox.places/{0},{1}.json?access_token=pk.test'.format(lon, lat),
match_querystring=True,
body=body,
status=200,
content_type='application/json')
response = mapbox.Geocoder(access_token='pk.test').reverse(lon=lon, lat=lat)
assert response.status_code == 200
assert response.geojson() == response.json()
def test_geocoder_place_types():
"""Place types are enumerated"""
assert sorted(mapbox.Geocoder().place_types.items()) == [
('address', "A street address with house number. Examples: 1600 Pennsylvania Ave NW, 1051 Market St, Oberbaumstrasse 7."),
('country', "Sovereign states and other political entities. Examples: United States, France, China, Russia."),
('district', "Second order administrative division. Only used when necessary. Examples: Tianjin, Beijing"),
('locality', "A smaller area within a place that possesses official status and boundaries. Examples: Oakleigh (Melbourne)"),
('neighborhood', 'A smaller area within a place, often without formal boundaries. Examples: Montparnasse, Downtown, Haight-Ashbury.'),
('place', "City, town, village or other municipality relevant to a country's address or postal system. Examples: Cleveland, Saratoga Springs, Berlin, Paris."),
('poi', "Places of interest including commercial venues, major landmarks, parks, and other features. Examples: Subway Restaurant, Yosemite National Park, Statue of Liberty."),
('poi.landmark', "Places of interest that are particularly notable or long-lived like parks, places of worship and museums. A strict subset of the poi place type. Examples: Yosemite National Park, Statue of Liberty."),
('postcode', "Postal code, varies by a country's postal system. Examples: 20009, CR0 3RL."),
('region', "First order administrative divisions within a country, usually provinces or states. Examples: California, Ontario, Essonne.")]
def test_validate_country_codes_err():
try:
mapbox.Geocoder()._validate_country_codes(('us', 'bogus'))
except mapbox.InvalidCountryCodeError as err:
assert str(err) == "bogus"
def test_validate_country():
assert mapbox.Geocoder()._validate_country_codes(
('us', 'br')) == {'country': 'us,br'}
def test_validate_place_types_err():
try:
mapbox.Geocoder()._validate_place_types(('address', 'bogus'))
except mapbox.InvalidPlaceTypeError as err:
assert str(err) == "bogus"
def test_validate_place_types():
assert mapbox.Geocoder()._validate_place_types(
('address', 'poi')) == {'types': 'address,poi'}
@responses.activate
def test_geocoder_forward_types():
"""Type filtering of forward geocoding works"""
responses.add(
responses.GET,
'https://api.mapbox.com/geocoding/v5/mapbox.places/1600%20pennsylvania%20ave%20nw.json?types=address,country,place,poi.landmark,postcode,region&access_token=pk.test',
match_querystring=True,
body='{"query": ["1600", "pennsylvania", "ave", "nw"]}', status=200,
content_type='application/json')
response = mapbox.Geocoder(
access_token='pk.test').forward(
'1600 pennsylvania ave nw',
types=('address', 'country', 'place', 'poi.landmark', 'postcode', 'region'))
assert response.status_code == 200
assert response.json()['query'] == ["1600", "pennsylvania", "ave", "nw"]
@responses.activate
def test_geocoder_reverse_types():
"""Type filtering of reverse geocoding works"""
lon, lat = -77.4371, 37.5227
body = json.dumps({"query": [lon, lat]})
responses.add(
responses.GET,
'https://api.mapbox.com/geocoding/v5/mapbox.places/{0},{1}.json?types=address,country,place,poi.landmark,postcode,region&access_token=pk.test'.format(lon, lat),
match_querystring=True,
body=body,
status=200,
content_type='application/json'
|
)
response = mapbox.Geocoder(
access_token='pk.test').reverse(
lon=lon, lat=lat,
types=('address', 'country', 'place', 'poi.landmark', 'postcode', 'region'))
assert response.status_code == 200
assert response.json()['query'] == [lon, lat]
|
@responses.activate
def test_geocoder_forward_proximity():
"""Proximity parameter works"""
responses.add(
responses.GET,
'https://api.mapbox.com/geocoding/v5/mapbox.places/1600%20pennsylvania%20ave%20nw.json?proximity=0.0,0.0&access_token=pk.test',
match_querystring=True,
body='{"query": ["1600", "pennsylvania", "ave", "nw"]}', status=200,
content_type='application/json')
response = mapbox.Geocoder(
access_token='pk.test').forward(
'1600 pennsylvania ave nw', lon=0, lat=0)
assert response.status_code == 200
assert response.json()['query'] == ["1600", "pennsylvania", "ave", "nw"]
@responses.activate
def test_geocoder_proximity_rounding():
"""Proximity parameter is rounded to 3 decimal places"""
responses.add(
responses.GET,
'https://api.mapbox.com/geocoding/v5/mapbox.places/1600%20pennsylvania%20ave%20nw.json',
match_querystring=False,
body='{"query": ["1600", "pennsylvania", "ave", "nw"]}', status=200,
content_type='application/json')
response = mapbox.Geocoder(
access_token='pk.test').forward(
'1600 pennsylvania ave nw', lon=0.123456, lat=0.987654)
# check coordinate precision f
|
MarcAndreJean/PCONC
|
Modules/04-02-CPU.py
|
Python
|
mit
| 15,906 | 0 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Projet : Editeur, Compilateur et Micro-Ordinateur pour
un langage assembleur.
Nom du fichier : 04-02-CPU.py
Identification : 04-02-CPU
Titre : CPU
Auteurs : Francis Emond, Malek Khattech,
Mamadou Dia, Marc-André Jean
Date : 16-04-2017
Description : CPU du Micro-Ordinateur.
Le module ``CPU``
================================
Ce module contient la classe CPU qui est la représentation du CPU du
micro-ordinateur. C'est cette classe qui gère les calculs, le transfert
de la mémoire et l'exécution des instructions d'un programme.
"""
__author__ = "Francis Emond, Malek Khattech, Mamadou Dia, Marc-Andre Jean"
__version__ = "1.0"
__status__ = "Production"
# Importation des modules nécessaires.
try:
modEnum = __import__("05-Enum")
modBus = __import__("04-01-Bus")
modALU = __import__("04-03-ALU")
except ImportError:
import importlib
modEnum = importlib.import_module("Modules.05-Enum")
modBus = importlib.import_module("Modules.04-01-Bus")
modALU = importlib.import_module("Modules.04-03-ALU")
# Redéfinition.
OPCODE = modEnum.OPCODE
MODE = modEnum.MODE
REGISTRE = modEnum.REGISTRE
ADRESSAGE = modEnum.ADRESSAGE
# À partir du bit le moins significatif :
# - Parity
# - Sign
# - Carry (overflow)
# - Zero
# - CND
# Enumération pour le registre STATUS.
STATUS = modEnum.enum(PARITY=0x0001, # 0b 0000 0000 0000 0001
SIGN=0x0002, # 0b 0000 0000 0000 0010
CARRY=0x0004, # 0b 0000 0000 0000 0100
ZERO=0x0008, # 0b 0000 0000 0000 1000
CND=0x0010) # 0b 0000 0000 0001 0000
class CPU:
"""
class CPU
========================
Cette classe contient la classe cpu. Elle représente le CPU
telle que présentée dans le document de
spécification.
:example:
>>> test = CPU(modBus.Bus())
"""
|
def __init__(self, bus):
"""
Constructeur de la classe CPU.
Le constructeur initialise les composants du CPU dont l'ALU.
Elle s'occupe aussi de lier le CPU avec le bus en entrée.
:example:
>>> test = CPU(modBus.Bus())
:param bus: Le bus du MicroOrdinateur.
:type bus: Bus
|
"""
self.event = False
# Connexion avec le bus.
self.bus = bus
self.bus.register(self)
# Création de l'ALU.
self.alu = modALU.ALU()
# Création des registres.
self.regP = 0x0000 # Program counter.
self.regI = 0x0000 # Instruction register.
self.regS = 0x0000 # Status Register.
# Registres A, B, C, D
self.regA = 0x0000
self.regB = 0x0000
self.regC = 0x0000
self.regD = 0x0000
# Fin.
return
def _getReg(self, registre):
"""
Lit le registre en argument avec la valeur.
Cette fonction lit et retourne la valeur du registre en argument.
:param registre: Registre à lire.
:type registre: int (16 bits)
:rtype: Valeur dudit registre.
:rtype: int (16 bits)
"""
if registre == REGISTRE.A:
return self.regA
elif registre == REGISTRE.B:
return self.regB
elif registre == REGISTRE.C:
return self.regC
elif registre == REGISTRE.D:
return self.regD
else:
raise Exception()
# Fin impossible.
return
def _setReg(self, registre, valeur):
"""
Modifie le registre en argument avec la valeur.
Cette fonction modifie la valeur du registre en argument avec
la valeur en argument.
:param registre: Registre à modifier.
:type registre: int (16 bits)
:param valeur: Valeur à assigner.
:type valeur: int (16 bits)
"""
if registre == REGISTRE.A:
self.regA = valeur
elif registre == REGISTRE.B:
self.regB = valeur
elif registre == REGISTRE.C:
self.regC = valeur
elif registre == REGISTRE.D:
self.regD = valeur
else:
return None
return
def clock(self):
"""
Récepteur pour le signal clock.
Cette fonction est appelé lorsqu'un coup d'horloge est émit
sur le bus. Elle gère la réinitialisation du CPU si le bus est
en mode RESET. Sinon le CPU fetch la prochaine instruction.
:example:
>>> bus = modBus.Bus()
>>> test = CPU(bus)
>>> test.clock()
>>> bus.clock()
>>> bus.event()
"""
# On réinitialise le CPU si le bus est en mode reset.
if self.bus.mode == MODE.RESET:
# Registres program.
self.regP = 0x0000 # Program counter.
self.regI = 0x0000 # Instruction register.
self.regS = 0x0000 # Status Register.
# Registres A, B, C, D
self.regA = 0x0000
self.regB = 0x0000
self.regC = 0x0000
self.regD = 0x0000
# On fetch la prochaine instruction si le bus est en mode INERTE.
elif self.bus.mode == MODE.INERTE:
self._fetch()
self._decode()
self._execute()
# Fin de la fonction.
return
def _readAddress(self):
"""
Cette fonction fetch une valeur d'une adresse.
Cette fonction va chercher la valeur à une adresse selon le
mode d'adressage.
"""
adressage = self.regI & 0x00F0
self.bus.mode = MODE.READ
# 1. L'argument est l'adresse d'un registre.
if adressage == ADRESSAGE.ADDR_OF_REG:
self.bus.data = self._getReg(self.bus.data)
return
# 2. L'argument est l'adresse d'un registre qui pointe vers une
# adresse.
elif adressage == ADRESSAGE.ADDR_FROM_REG:
# On fetch l'adresse indiquer dans ce registre.
self.bus.address = self._getReg(self.bus.data)
# 3. L'argument est une adresse.
elif adressage == ADRESSAGE.ADDR:
# On retourne l'adresse.
self.bus.address = self.bus.data
# 4. L'argument est une adresse qui pointe vers une adresse.
elif adressage == ADRESSAGE.ADDR_FROM_ADDR:
# Double-fetch.
self.bus.address = self.bus.data
self.bus.event()
self.bus.mode = MODE.READ
self.bus.address = self.bus.data
# Fetch la valeur à cette adresse.
self.bus.event()
return
def _fetch(self):
"""
Cette fonction fetch la prochaine instruction à exécuter.
Cette fonction permet prépare le bus pour que la mémoire lit
la prochaine instruction à exécuter.
"""
# On place le bus en mode lecture pour la prochaine adresse.
self.bus.mode = MODE.READ
self.bus.address = self.regP
# On envoie le signal au bus.
self.bus.event()
# On lit l'instruction dans le bus.
self.regI = self.bus.data
# Fin.
return
def _decode(self):
"""
Cette fonction décode l'instruction courante.
Cette fonction refait un fetch pour les commandes néccessitant
l'argument de droite (16 bits), sinon elle peut exécuter
celle-ci.
"""
# On vérifie si l'opération n'a pas besoin d'argument droit:
if {OPCODE.NOT: True,
OPCODE.EZ: True,
OPCODE.NZ: True,
OPCODE.NOP: True,
OPCODE.HLT: True}.get(self.regI, False):
return # On quitte pour aller à l'étape d'exécution.
# Sinon on fetch l'argument de droit:
else:
# On place le bus en mode lecture.
self.bus.mode = MODE.READ
|
kkoci/orthosie
|
inventory/migrations/0002_auto_20151206_2111.py
|
Python
|
gpl-3.0
| 438 | 0 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2015-12-07 02:11
from __future__ import unicode_literals
from django.db import migrations, models
class Migratio
|
n(migrations.Migration):
dependencies = [
('inventory', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='produce',
name='plu',
|
field=models.IntegerField(unique=True),
),
]
|
KhronosGroup/COLLADA-CTS
|
StandardDataSets/collada/library_visual_scenes/visual_scene/node/_reference/_reference_node_translate_xyz_cube/_reference_node_translate_xyz_cube.py
|
Python
|
mit
| 3,826 | 0.007057 |
# Copyright (c) 2012 The Khronos Group Inc.
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and /or associated documentation files (the "Materials "), to deal in the Materials without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Materials, and to permit persons to whom the Materials are furnished to do so, subject to
# the following conditions:
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Materials.
# THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURP
|
OSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
# See Core.Logic.FJudgementContext for the information
# of the 'context' parameter.
# This sample judging object does the following:
#
# JudgeB
|
aseline: just verifies that the standard steps did not crash.
# JudgeSuperior: also verifies that the validation steps are not in error.
# JudgeExemplary: same as intermediate badge.
# We import an assistant script that includes the common verifications
# methods. The assistant buffers its checks, so that running them again
# does not incurs an unnecessary performance hint.
from StandardDataSets.scripts import JudgeAssistant
# Please feed your node list here:
tagLst = []
attrName = ''
attrVal = ''
dataToCheck = ''
class SimpleJudgingObject:
def __init__(self, _tagLst, _attrName, _attrVal, _data):
self.tagList = _tagLst
self.attrName = _attrName
self.attrVal = _attrVal
self.dataToCheck = _data
self.status_baseline = False
self.status_superior = False
self.status_exemplary = False
self.__assistant = JudgeAssistant.JudgeAssistant()
def JudgeBaseline(self, context):
# No step should not crash
self.__assistant.CheckCrashes(context)
# Import/export/validate must exist and pass, while Render must only exist.
self.__assistant.CheckSteps(context, ["Import", "Export", "Validate"], ["Render"])
if (self.__assistant.GetResults() == False):
self.status_baseline = False
return False
# Compare the rendered images between import and export
# Then compare images against reference test to check for non-equivalence
self.__assistant.CompareRenderedImages(context)
self.status_baseline = self.__assistant.DeferJudgement(context)
return self.status_baseline
# To pass intermediate you need to pass basic, this object could also include additional
# tests that were specific to the intermediate badge.
def JudgeSuperior(self, context):
self.status_superior = self.status_baseline
return self.status_superior
# To pass advanced you need to pass intermediate, this object could also include additional
# tests that were specific to the advanced badge
def JudgeExemplary(self, context):
self.status_exemplary = self.status_superior
return self.status_exemplary
# This is where all the work occurs: "judgingObject" is an absolutely necessary token.
# The dynamic loader looks very specifically for a class instance named "judgingObject".
#
judgingObject = SimpleJudgingObject(tagLst, attrName, attrVal, dataToCheck);
|
MaxVanDeursen/tribler
|
Tribler/Test/Community/Multichain/__init__.py
|
Python
|
lgpl-3.0
| 77 | 0 |
"""
This pa
|
ckage conta
|
ins tests for the Multichain community in Tribler.
"""
|
zhangyubaka/tweepy_favbot
|
bot.py
|
Python
|
mit
| 923 | 0.006501 |
#!/usr/bin/env python3
# -*- coding: utf8 -*-
import tweepy
consumer_key = ""
consumer_secret = ""
access_token = ""
access_token_secret = ""
auth = tweepy.OAut
|
hHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth)
def tdata():
userid = str(input("Please input id who you want fav attack\n"))
count = input("input number you want to fav!\n")
return userid, count
def main():
t = tdata()
tl = []
fav = api.user_timeline(id = t[0], count = t[1])
for status in fav:
tl.append(status.id_str)
try:
pool.map(api.create_fav
|
orite, tl)
except tweepy.error.TweepError as e:
if e.args[0][0]['code'] == 139:
print("You have already favorited this status! \n")
else:
print(e.reason)
finally:
print("Done!")
if __name__ == "__main__":
main()
|
googleapis/python-datacatalog
|
samples/generated_samples/datacatalog_v1_generated_data_catalog_modify_entry_overview_sync.py
|
Python
|
apache-2.0
| 1,496 | 0.000668 |
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for ModifyEntryOverview
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-datacatalog
# [START datacatalog_v1_generated_DataCatalog_ModifyEntryOverview_sync]
from google.cloud import datacatalog_v1
def sample_modify_entry_overview():
# Create a client
client = datacatalog_v1.DataCatalogClient()
# I
|
nitialize request argu
|
ment(s)
request = datacatalog_v1.ModifyEntryOverviewRequest(
name="name_value",
)
# Make the request
response = client.modify_entry_overview(request=request)
# Handle the response
print(response)
# [END datacatalog_v1_generated_DataCatalog_ModifyEntryOverview_sync]
|
hiteshgarg14/Django-Social-Website
|
bookmarks/common/decorators.py
|
Python
|
mit
| 478 | 0.004184 |
from django.http import HttpResponseBadRequest, HttpResponse
"""
Build custom deco
|
rators for your views if you find that you are repeating
the same checks in multiple views.
"""
def ajax_required(f):
def wrap(request, *args, **kwargs):
if not request.is_ajax():
#return HttpResponse("hi")
return HttpRespo
|
nseBadRequest()
return f(request, *args, **kwargs)
wrap.__doc__ = f.__doc__
wrap.__name__ = f.__name__
return wrap
|
gforcada/jenkins-job-builder
|
jenkins_jobs/sphinx/yaml.py
|
Python
|
apache-2.0
| 4,991 | 0 |
# Copyright 2012 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# Most of this code originated in sphinx.domains.python and
# sphinx.ext.autodoc and has been only slightly adapted for use in
# subclasses here.
# :copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS.
# :license: BSD, see LICENSE for details.
import re
from sphinx import addnodes
from sphinx.domains.python import _pseudo_parse_arglist
from sphinx.domains.python import PyModulelevel
from sphinx.ext.autodoc import Documenter
from sphinx.ext.autodoc import FunctionDocumenter
from sphinx.locale import _
yaml_sig_re = re.compile(r'yaml:\s*(.*)')
class PyYAMLFunction(PyModulelevel):
def handle_signature(self, sig, signode):
"""Transform a Python signature into RST nodes.
Return (fully qualified name of the thing, classname if any).
If inside a class, the current class name is handled intelligently:
* it is stripped from the displayed name if present
* it is added to the full name (return value) if not present
"""
name_prefix = None
name = sig
arglist = None
retann = None
# determine module and class name (if applicable), as well as full name
modname = self.options.get(
'module', self.env.temp_data.get('py:module'))
classname = self.env.temp_data.get('py:class')
fullname = name
signode['module'] = modname
signode['class'] = classname
signode['fullname'] = fullname
sig_prefix = self.get_signature_prefix(sig)
if sig_prefix:
signode += addnodes.desc_annotation(sig_prefix, sig_prefix)
if name_prefix:
signode += addnodes.desc_addname(name_prefix, name_prefix)
anno = self.options.get('annotation')
signode += addnodes.desc_name(name, name)
if not arglist:
if self.needs_arglist():
# for callables, add an empty parameter list
signode += addnodes.desc_parameterlist()
if retann:
signode += addnodes.desc_returns(retann, retann)
if anno:
signode += addnodes.desc_annotation(' ' + anno, ' ' + anno)
return fullname, name_prefix
_pseudo_parse_arglist(signode, arglist)
if retann:
signode += addnodes.desc_returns(retann, retann)
if anno:
signode += addnodes.desc_annotation(' ' + anno, ' ' + anno)
return fullname, name_prefix
def get_index_text(self, modname, name_cls):
return _('%s (in module %s)') % (name_cls[0], modname)
class YAMLFunctionDocumenter(FunctionDocumenter):
priority = FunctionDocumenter.priority + 10
objtype = 'yamlfunction'
directivetype = 'yamlfunction'
@classmethod
def can_document_member(cls, member, membername, isattr, parent):
if not FunctionDocumenter.can_document_member(member, membername,
isattr, parent):
return False
if member.__doc__ is not None and yaml_sig_re.match(member.__doc__):
return True
return False
def _fin
|
d_signature(self, encoding=None):
docstrings = Documenter.get_doc(self, encoding, 2)
if len(docstrings) != 1:
return
doclines = docstring
|
s[0]
setattr(self, '__new_doclines', doclines)
if not doclines:
return
# match first line of docstring against signature RE
match = yaml_sig_re.match(doclines[0])
if not match:
return
name = match.group(1)
# ok, now jump over remaining empty lines and set the remaining
# lines as the new doclines
i = 1
while i < len(doclines) and not doclines[i].strip():
i += 1
setattr(self, '__new_doclines', doclines[i:])
return name
def get_doc(self, encoding=None, ignore=1):
lines = getattr(self, '__new_doclines', None)
if lines is not None:
return [lines]
return Documenter.get_doc(self, encoding, ignore)
def format_signature(self):
result = self._find_signature()
self._name = result
return ''
def format_name(self):
return self._name
def setup(app):
app.add_autodocumenter(YAMLFunctionDocumenter)
app.add_directive_to_domain('py', 'yamlfunction', PyYAMLFunction)
|
nwjs/chromium.src
|
third_party/android_deps/libs/com_google_firebase_firebase_messaging/3pp/fetch.py
|
Python
|
bsd-3-clause
| 1,389 | 0.00072 |
#!/usr/bin/env python
# Copyright 2021 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This is generated, do not edit. Update BuildConfigGenerator.groovy and
# 3ppFetch.template instead.
from __future__ import print_function
import argparse
import json
import os
_FILE_URL = 'https://dl.google.com/dl/android/maven2/com/google/firebase/firebase-messaging/21.0.1/firebase-messaging-21.0.1.aar'
_FILE_NAME = 'firebase-messaging-21.0.1.aar'
_FILE_VERSION = '21.0.1'
def do_latest():
print(_FILE_VERSION)
def get_download_url(version):
if _FILE_URL.endswith('.jar'):
ext = '.jar'
elif _FILE_URL.endswith('.aar'):
ext = '.aar'
else:
raise Exception('Unsup
|
ported extension for %s' % _FILE_URL)
partial_manifest = {
'url': [_FILE_URL],
'name': [_FILE_NAME],
'ext': ext,
}
print(json.dumps(par
|
tial_manifest))
def main():
ap = argparse.ArgumentParser()
sub = ap.add_subparsers()
latest = sub.add_parser("latest")
latest.set_defaults(func=lambda _opts: do_latest())
download = sub.add_parser("get_url")
download.set_defaults(
func=lambda _opts: get_download_url(os.environ['_3PP_VERSION']))
opts = ap.parse_args()
opts.func(opts)
if __name__ == '__main__':
main()
|
sigma-random/pwnypack
|
tests/__init__.py
|
Python
|
mit
| 90 | 0 |
i
|
mport pwny
def setup():
pwny.target.assume(pwny.Target(arch=pwny.Target.Arch.x
|
86))
|
retoo/pystructure
|
tests/python/typeinference/import_star_definitions.py
|
Python
|
lgpl-2.1
| 74 | 0.027027 |
class Class(object):
pass
def func():
return 3.14
CONSTANT = 42
| ||
McSinyx/hsg
|
others/other/nuoc.py
|
Python
|
gpl-3.0
| 941 | 0.002125 |
#!/usr/bin/env python3
from heapq import heapify, heappop, heappush
with open('NUOC.INP') as f:
m, n = map(int, f.readline().split())
height = [[int(i) for i in line.split()] for line in f]
queue = ([(h, 0, i) for i, h in enumerate(height[0])]
+ [(h, m - 1, i) for i, h in enumerate(height[-1])]
+ [(height[i][0], i, 0) for i i
|
n range(m)]
+ [(height[i][-1], i, n - 1) for i in range(m)])
heapify(queue)
visited = ([[True] * n]
+ [[True] + [False] * (n - 2) + [True] for _ in range(m - 2)]
+ [[True] * n])
result = 0
while queue:
h, i, j = h
|
eappop(queue)
for x, y in (i + 1, j), (i - 1, j), (i, j + 1), (i, j - 1):
if 0 <= x < m and 0 <= y < n and not visited[x][y]:
result += max(0, h - height[x][y])
heappush(queue, (max(height[x][y], h), x, y))
visited[x][y] = True
with open('NUOC.OUT', 'w') as f: print(result, file=f)
|
srijanrodo/telegram-bots-python-api
|
telegram.py
|
Python
|
gpl-2.0
| 8,175 | 0.050765 |
import requests
from tel_types import User, Message, Update, UserProfilePhotos
import time
base_url = 'https://api.telegram.org/bot'
class Telegram:
def __init__(self, token):
self.call_url = base_url + token + '/'
self.token = token
self.req_timeout = 5
self.text_limit = 4096
self.last_error = ''
self.me = self.getMe()
def __method_create__(self, method_name, files = None, data = None):
url = self.call_url + method_name
try:
if files is not None:
ret = requests.post(url, files = files, data = data, timeout = self.req_timeout)
else :
ret = requests.post(url, data = data, timeout = self.req_timeout)
except requests.exceptions.ConnectionError:
self.last_error = 'Error: Network Issue'
ret = None
except requests.exceptions.Timeout:
self.last_error = 'Error: Timeout Occured'
ret = None
except Exception:
self.last_error = 'Unknown Error'
ret = None
return ret
def __method_create_json__(self, method_name, files = None, data = None):
tmp = self.__method_create__(method_name, files = files, data = data)
if tmp == None:
ret = None
else:
try:
ret = tmp.json()
except ValueError:
self.last_error = "Error: Request Failed (JSON object not returned)"
ret = None
return ret
def getMe(self):
tmp = self.__method_create_json__('getMe')
if tmp is None:
return None
if tmp['ok'] is False:
self.last_error = tmp['description']
return None
return User(tmp['result'])
def getUpdates(self, offset = None, limit = 100, timeout = 0):
data = {
'offset':offset,
'limit':limit,
'timeout':timeout
}
tmp = self.__method_create_json__('getUpdates', data = data)
if tmp is None:
return None
if tmp['ok'] is False:
self.last_error = tmp['description']
return None
return [Update(x) for x in tmp['result']]
def sendMessage(self, *args, **data):
if data == {}:
if len(args) != 1 or type(args[0])
|
!= dict:
return None
data = args[0]
if 'reply_markup' in data:
data['reply_markup'] = data['reply_markup'].json_str
tmp = self.__method_create_json__('sendMe
|
ssage', data = data)
if tmp is None:
return None
if tmp['ok'] is False:
self.last_error = tmp['description']
return None
return Message(tmp['result'])
def sendLargeMessage(self, **data):
if 'text' not in data:
return None
text = data['text']
while len(text) > self.text_limit:
send = self.split(text)
text = text[len(send):]
data['text'] = send
if self.sendMessage(data) is None:
return None
data['text'] = text
return self.sendMessage(data)
def forwardMessage(self, chat_id, from_chat_id, message_id):
data = {
'chat_id' : chat_id,
'from_chat_id' : from_chat_id,
'message_id' : message_id
}
tmp = self.__method_create_json__('forwardMessage', data = data)
if tmp is None:
return None
if tmp['ok'] is False:
self.last_error = tmp['description']
return None
return Message(tmp['result'])
def sendFiles(self, chat_id, input_file, file_type, caption = None, \
reply_to_message_id = None, reply_markup = None):
if input_file.file_id is input_file.file_o is None:
self.last_error = 'Error: No File Specified'
return None
data = {
'chat_id':chat_id,
'reply_to_message_id' : reply_to_message_id,
'reply_markup' : None if reply_markup is None else reply_markup.json_str
}
if caption is not None:
data['caption'] = caption
if input_file.file_id is not None:
files = {file_type:(None, input_file.file_id)}
else :
files = {file_type: input_file.file_o}
method_name = 'send' + file_type.title()
tmp = self.__method_create_json__(method_name, data = data, files = files)
if tmp is None:
return None
if tmp['ok'] is False:
self.last_error = tmp['description']
return None
return Message(tmp['result'])
def sendPhoto(self, chat_id, photo, caption = None, \
reply_to_message_id = None, reply_markup = None):
return self.sendFiles(chat_id, photo, 'photo', caption = caption, \
reply_to_message_id = reply_to_message_id, reply_markup = reply_markup)
def sendVideo(self, chat_id, photo, reply_to_message_id = None, reply_markup = None):
return self.sendFiles(chat_id, photo, 'video', \
reply_to_message_id = reply_to_message_id, reply_markup = reply_markup)
def sendAudio(self, chat_id, photo, reply_to_message_id = None, reply_markup = None):
return self.sendFiles(chat_id, photo, 'audio', \
reply_to_message_id = reply_to_message_id, reply_markup = reply_markup)
def sendDocument(self, chat_id, photo, reply_to_message_id = None, reply_markup = None):
return self.sendFiles(chat_id, photo, 'document',\
reply_to_message_id = reply_to_message_id, reply_markup = reply_markup)
def sendSticker(self, chat_id, photo, reply_to_message_id = None, reply_markup = None):
return self.sendFiles(chat_id, photo, 'sticker',\
reply_to_message_id = reply_to_message_id, reply_markup = reply_markup)
def sendLocation(self, chat_id, latitude, longitude, reply_to_message_id = None, \
reply_markup = None):
data = {
'chat_id': chat_id,
'latitude' : latitude,
'longitude' : longitude,
'reply_to_message_id' : reply_to_message_id,
'reply_markup' : None if reply_markup is None else reply_markup.json_str
}
tmp = self.__method_create_json__('sendLocation', data = data)
if tmp is None:
return None
if tmp['ok'] is False:
self.last_error = tmp['description']
return None
return Message(tmp['result'])
def sendChatAction(self, chat_id, action):
data = {
'chat_id' : chat_id,
'action' : action
}
tmp = self.__method_create_json__('sendChatAction', data = data)
if tmp is None:
return None
if tmp['ok'] is False:
self.last_error = tmp['description']
return None
return tmp['result']
def getUserProfilePhotos(self, user_id, offset = 0, limit = 100):
data = {
'user_id' : user_id,
'offset' : offset,
'limit' : limit
}
tmp = self.__method_create__('getUserProfilePhotos', data = data)
if tmp is None:
return None
if tmp['ok'] is False:
self.last_error = tmp['description']
return None
return UserProfilePhotos(tmp['result'])
def split(self, text):
prev = ''
new = ''
for x in text.splitlines():
prev = new
new = new + '\n' + x
if len(new) > self.text_limit:
break
return prev
def setDefaultTimeout(self, timeout):
self.req_timeout = timeout
class TelegramEventLoop(Telegram):
def __init__(self, token, confile = 'telegram.conf'):
super().__init__(token)
self.handlers = []
self.exit = False
self.nonText = None
self.confile = confile
def addHandler(self, check_msg, *funcs):
for x in funcs:
self.handlers.append((check_msg, x))
return check_msg
def mainLoop(self):
try:
f = open(self.confile, 'r')
last_update = int(f.read())
f.close()
except FileNotFoundError:
last_update = 0
if self.checkNetworkConnection() is False:
print('No Connection')
self.waitForNetworkConnection()
print('Connection Back')
while self.exit is False:
update = self.getUpdates(offset = last_update + 1)
if update == None:
update = []
print(self.last_error)
if self.checkNetworkConnection() is False:
print('No Connection')
self.waitForNetworkConnection()
print('Connection Back')
elif update != []:
last_update = update[0].update_id
for x in update:
last_update = max(last_update, x.update_id)
for (key,foo) in self.handlers:
if key(x) == True:
foo(x)
if update != []:
f = open(self.confile, 'w')
f.write(str(last_update))
f.close()
print('Exiting')
return
def setNonTextHandler(self, func):
self.nonText = func
def handleNonText(self, x):
print("Non-Text Message Arrived\n" + x.msg_type + "\nCalling default Handler")
if self.nonText is not None:
return self.nonText(x)
return
def doExit(self, *arg):
self.exit = True
def checkNetworkConnection(self):
try:
requests.get('https://www.example.com')
except requests.exceptions.ConnectionError:
return False
return True
def waitForNetworkConnection(self):
while self.checkNetworkConnection() is False:
time.sleep(1)
|
sayan801/indivo_server
|
indivo/tests/data/record.py
|
Python
|
gpl-3.0
| 1,535 | 0.013029 |
from indivo.models import Record, Demographics
from base import *
class TestRecord(TestModel):
model_fields = ['label', 'demographics', 'owner', 'external_id']
model_class = Record
def _setupargs(self, label, demographics=None, owner=None, external_id=None, extid_principal_key=None):
self.label = label
self.demographics = demographics
self.owner = owner
self.local_external_id = external_id
if extid_principal_key:
self.external_id = Record.prepare_external_id(external_id, extid_principal_key.to.raw_data['account_id'])
else:
self.external_id = None
_TEST_RECORDS =
|
[
{'label':'testing_record_label',
'demographics':ForeignKey('demographics', 'TEST_DEMOGRAPHICS', 0),
'owner':ForeignKey('account', 'TEST_ACCOUNTS', 0),
},
{'label':'test_record_label
|
2',
'demographics':ForeignKey('demographics', 'TEST_DEMOGRAPHICS', 1),
'owner':ForeignKey('account', 'TEST_ACCOUNTS', 0),
},
{'label':'empty_record',
},
{'label':'bob',
'owner':ForeignKey('account', 'TEST_ACCOUNTS', 0),
},
{'label':'jane',
'owner':ForeignKey('account', 'TEST_ACCOUNTS', 0),
},
{'label':'test_record_extid',
'demographics':ForeignKey('demographics', 'TEST_DEMOGRAPHICS', 2),
'owner':ForeignKey('account', 'TEST_ACCOUNTS', 0),
'external_id':'RECORD5_EXTID',
'extid_principal_key':ForeignKey('account', 'TEST_ACCOUNTS', 4),
},
]
TEST_RECORDS = scope(_TEST_RECORDS, TestRecord)
|
paulfitz/sheetsite
|
sheetsite/tasks/notify.py
|
Python
|
mit
| 2,767 | 0.001084 |
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
import json
import os
from sheetsite.site_queue import app
import smtplib
@app.task
def notify_one(email, subject, page, text):
print("send [%s] / %s / %s" % (email, subject, page))
server_ssl = smtplib.SMTP_SSL("smtp.gmail.com", 465)
server_ssl.ehlo() # optional, called by login()
me = os.environ['GMAIL_USERNAME']
server_ssl.login(me, os.environ['GMAIL_PASSWORD'])
msg = MIMEMultipart('alternative')
msg['Subject'] = subject
msg['From'] = me
msg['To'] = email
# Record the MIME types of both parts - text/plain and text/html.
part1 = MIMEText(text, 'plain')
part2 = MIMEText(page, 'html')
msg.attach(part1)
msg.attach(part2)
server_ssl.sendmail(me, email, msg.as_string())
server_ssl.close()
return True
@app.task
def notify_all(name, site_params, diff_html, diff_text):
print("NOTIFY_spreadsheet", site_params, name)
import daff
import jinja2
import premailer
root = os.environ['SHEETSITE_CACHE']
path = os.path.join(root, name)
print("Should look in", path)
notifications = None
for fname in ['private.json', 'public.json']:
full_fname = os.path.join(path, fname)
print("Look in", full_fname)
book = json.loads(open(full_fname).read())
if 'notifications' in book['tables']:
notifications = book['tables']['notifications']
break
if notifications is None:
print("No notifications requested")
return True
print("Notifications", notifications)
# make a html report
css = daff.DiffRender().sampleCss()
site_params = dict(site_params)
site_params['css'] = css
site_params['diff'] = diff_html
env = jinja2.Environment(loader=jinja2.PackageLoader('sheetsite', 'templates'))
template = env.get_template('update.html')
page = template.render(site_params)
page = premailer.transform(page)
site_params['diff'] = diff_t
|
ext
template = env.get_template('update.txt')
page_text = template.render(site_params)
for target in notifications['rows']:
email = target.get('EMAIL', None)
if email is None:
email = target.get('email', None)
if email is not None:
if site_params['no_notify']:
print("skip email to {}".format(
|
email))
else:
notify_one.delay(email=email,
subject="update to {}".format(site_params.get('name',
'directory')),
page=page,
text=page_text)
return True
|
ahmad88me/PyGithub
|
github/GithubApp.py
|
Python
|
lgpl-3.0
| 6,426 | 0.004669 |
############################ Copyrights and license ############################
# #
# Copyright 2020 Raju Subramanian <coder@mahesh.net> #
# #
#
|
This file is part of PyGithub. #
# http://pygithub.readthedocs.io/ #
# #
# PyGithub is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Lesser General Public License as published by the Free #
#
|
Software Foundation, either version 3 of the License, or (at your option) #
# any later version. #
# #
# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #
# details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with PyGithub. If not, see <http://www.gnu.org/licenses/>. #
# #
################################################################################
import github.GithubObject
import github.NamedUser
class GithubApp(github.GithubObject.CompletableGithubObject):
"""
This class represents github apps. The reference can be found here https://docs.github.com/en/rest/reference/apps
"""
def __repr__(self):
return self.get__repr__({"id": self._id.value, "url": self._url.value})
@property
def created_at(self):
"""
:type: datetime.datetime
"""
self._completeIfNotSet(self._created_at)
return self._created_at.value
@property
def description(self):
"""
:type: string
"""
self._completeIfNotSet(self._description)
return self._description.value
@property
def events(self):
"""
:type: list of string
"""
self._completeIfNotSet(self._events)
return self._events.value
@property
def external_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._external_url)
return self._external_url.value
@property
def html_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._html_url)
return self._html_url.value
@property
def id(self):
"""
:type: int
"""
self._completeIfNotSet(self._id)
return self._id.value
@property
def name(self):
"""
:type: string
"""
self._completeIfNotSet(self._name)
return self._name.value
@property
def owner(self):
"""
:type: :class:`Github.NamedUser.NamedUser`
"""
self._completeIfNotSet(self._owner)
return self._owner.value
@property
def permissions(self):
"""
:type: dict
"""
self._completeIfNotSet(self._permissions)
return self._permissions.value
@property
def slug(self):
"""
:type: string
"""
return self._slug.value
@property
def updated_at(self):
"""
:type: datetime.datetime
"""
self._completeIfNotSet(self._updated_at)
return self._updated_at.value
@property
def url(self):
"""
:type: string
"""
return self._url.value
def _initAttributes(self):
self._created_at = github.GithubObject.NotSet
self._description = github.GithubObject.NotSet
self._events = github.GithubObject.NotSet
self._external_url = github.GithubObject.NotSet
self._html_url = github.GithubObject.NotSet
self._id = github.GithubObject.NotSet
self._name = github.GithubObject.NotSet
self._owner = github.GithubObject.NotSet
self._permissions = github.GithubObject.NotSet
self._slug = github.GithubObject.NotSet
self._updated_at = github.GithubObject.NotSet
self._url = github.GithubObject.NotSet
def _useAttributes(self, attributes):
if "created_at" in attributes: # pragma no branch
self._created_at = self._makeDatetimeAttribute(attributes["created_at"])
if "description" in attributes: # pragma no branch
self._description = self._makeStringAttribute(attributes["description"])
if "events" in attributes: # pragma no branch
self._events = self._makeListOfStringsAttribute(attributes["events"])
if "external_url" in attributes: # pragma no branch
self._external_url = self._makeStringAttribute(attributes["external_url"])
if "html_url" in attributes: # pragma no branch
self._html_url = self._makeStringAttribute(attributes["html_url"])
if "id" in attributes: # pragma no branch
self._id = self._makeIntAttribute(attributes["id"])
if "name" in attributes: # pragma no branch
self._name = self._makeStringAttribute(attributes["name"])
if "owner" in attributes: # pragma no branch
self._owner = self._makeClassAttribute(
github.NamedUser.NamedUser, attributes["owner"]
)
if "permissions" in attributes: # pragma no branch
self._permissions = self._makeDictAttribute(attributes["permissions"])
if "slug" in attributes: # pragma no branch
self._slug = self._makeStringAttribute(attributes["slug"])
self._url = self._makeStringAttribute("/apps/" + attributes["slug"])
if "updated_at" in attributes: # pragma no branch
self._updated_at = self._makeDatetimeAttribute(attributes["updated_at"])
if "url" in attributes:
self._url = self._makeStringAttribute(attributes["url"])
|
proximate/proximate
|
messageboard.py
|
Python
|
bsd-3-clause
| 15,997 | 0.002688 |
#
# Proximate - Peer-to-peer social networking
#
# Copyright (c) 2008-2011 Nokia Corporation
#
# All rights reserved.
#
# This software is licensed under The Clear BSD license.
# See the LICENSE file for more details.
#
from gobject import timeout_add_seconds, source_remove
from random import random, randrange
from time import time
from filesharing import Share_Meta, Subscription
from plugins import Plugin, get_plugin_by_type
from support import warning
from proximateprotocol import PLUGIN_TYPE_FETCHER, PLUGIN_TYPE_FILE_SHARING, \
PLUGIN_TYPE_MESSAGE_BOARD, PLUGIN_TYPE_NOTIFICATION, \
PLUGIN_TYPE_COMMUNITY, valid_fs_gid, \
PLUGIN_TYPE_STATE, PLUGIN_TYPE_SCHEDULER
from proximatestate import normal_traffic_mode
from typevalidator import validate, ANY, OPTIONAL_KEY, ZERO_OR_MORE, ONE_OR_MORE
from utils import n_lists, remove_all
SEARCH_TIMEOUT = 15 # in seconds
CACHE_UTIL = 66 # percent of messages to spare on cleanup
MAX_MESSAGES_IN_CACHE = 256 # max number of messages in cache
CACHE_VALIDITY_PERIOD = 3600 # in seconds
SEND_VALIDITY_PERIOD = 30 # in seconds
AUTO_BROADCAST_PERIOD = 120 # in seconds
def satisfy_criteria(criteria, meta):
if criteria != None:
for (key, value) in criteria.items():
if meta.get(key) != value:
return False
return True
def search_metas(metas, criteria, keywords):
""" Note: storage may contain message from others """
msgs = []
for meta in metas:
if not satisfy_criteria(criteria, meta):
continue
if keywords == None or len(keywords) == 0:
msgs.append(meta)
continue
l = ['']
for name in ('from', 'subject', 'purpose', 'msg'):
l.append(meta[name])
l.append('')
s = '\n'.join(l).lower()
for keyword in keywords:
if s.find(keyword.lower()) >= 0:
msgs.append(meta)
break
return msgs
class Search_Context:
def __init__(self, callback, ctx=None, criteria=None, keywords=None):
self.callback = callback
self.ctx = ctx
self.criteria = criteria
self.keywords = keywords
self.checked = {}
def process(self, user, metas):
filteredmetas = search_metas(metas, self.criteria, self.keywords)
if len(filteredmetas) == 0:
return
newmetas = []
for meta in filteredmetas:
key = (user, meta['id'])
if not self.checked.has_key(key):
newmetas.append(meta)
self.checked[key] = None
if len(newmetas) > 0:
self.callback(user, newmetas, self.ctx)
class Message_Board(Plugin):
""" Notes on messages, see 'self.msgspec' below.
'replyid' is used to create a message that is related to an older
message. The 'replyid' is the old message's gid.
If 'src' and 'dst' exist, they are guaranteed in Share_Meta
validation to be strings.
If 'ttl' exists, it is guaranteed to be a non-negative integer.
"""
msgspec = {'subject': str,
'from': str,
'msg': str,
OPTIONAL_KEY('replygid'): valid_fs_gid,
OPTIONAL_KEY('url'): str,
}
queryspec = {'t': 'msgquery',
OPTIONAL_KEY('keywords'): [ONE_OR_MORE, str],
OPTIONAL_KEY('criteria'): {str: ANY},
}
queryresultspec = {'msgs': [ZERO_OR_MORE, {}]}
def __init__(self, options):
self.register_plugin(PLUGIN_TYPE_MESSAGE_BOARD)
self.community = None
self.fetcher = None
self.fs = None
self.state = None
self.notification = None
self.statusindicator = None
self.options = options
self.gui = None
self.queryidx = 0
self.keywords = []
self.searchctxs = []
self.notifications = {}
self.periodidx = 0
self.cache = {} # maps (uid, fsid) to (timestamp, meta)
def register_ui(self, ui):
self.gui = ui
def cleanup(self):
self.state.set_plugin_variable(self.name, 'watchkeywords', self.keywords)
savednotifications = {}
for (key, value) in self.notifications.items():
if value == 1:
savednotifications[key] = 1
self.state.set_plugin_variable(self.name, 'notifications', savednotifications)
def cancel_search(self, sctx):
self.searchctxs.remove(sctx)
return False
def msg_cache(self, user, metas):
t = int(time())
uid = user.get('uid')
for meta in metas:
self.cache[(uid, meta.get('id'))] = (t, meta)
if len(self.cache) <= MAX_MESSAGES_IN_CACHE:
return
timesorted = []
for (key, value) in self.cache.items():
timestamp = value[0]
timesorted.append((timestamp, key))
timesorted.sort()
ntodelete = len(timesorted) - (CACHE_UTIL * MAX_MESSAGES_IN_CACHE) / 100
for i in xrange(ntodelete):
key = timesorted[i][1]
self.cache.pop(key)
def process_results(self, reply):
metas = []
if reply == None:
return metas
if not validate(self.queryresultspec, reply):
warning('msgboard: Invalid results: %s\n' % str(reply))
return metas
for metadict in reply['msgs']:
meta = Share_Meta()
if not meta.unserialize(metadict):
warning('msgboard: Can not unserialize: %s\n' % str(metadict))
continue
if not self.validate_message(meta):
|
warning('msgboard: Invalid meta: %s\n' % str(meta))
continue
metas.append(meta)
return metas
def got_query_results(self, user, reply, ctx):
metas = self.process_results(reply)
self.msg_cache(user, metas)
for meta in metas:
if self.is_hot(meta):
self.notify_user(user, meta)
for sc
|
tx in self.searchctxs:
sctx.process(user, metas)
def handle_message(self, user, sm):
""" Handle messages that were found from other users' fileshares """
if not self.validate_message(sm):
sm['ttl'] = 0
warning('msgboard: Invalid message: %s\n' % str(sm))
return
warning('New message: %s\n' % sm['subject'])
def get_state(self):
return self.keywords
def is_hot(self, meta):
if len(self.keywords) == 0 or meta.get_priv('mine'):
return False
return len(search_metas([meta], None, self.keywords)) > 0
def modify_state(self, add, keyword):
if add:
if keyword in self.keywords:
return
self.keywords.append(keyword)
else:
remove_all(self.keywords, keyword)
self.cleanup()
def notify_user(self, user, meta):
uid = user.get('uid')
key = (uid, meta['id'])
if key in self.notifications:
return
self.notifications[key] = 0
msg = 'User %s has a message titled: %s. View it?' % (user.tag(), meta['subject'])
self.notification.notify_with_response(msg, self.view_message, (key, meta))
def view_message(self, response, msg, ctx):
(key, meta) = ctx
self.notifications[key] = 1
if response == self.notification.RESPONSE_DELETED:
return True
self.gui.view_message(meta)
return True
def all_metas(self):
metas = []
for share in self.fs.get_shares(purpose=self.name):
if share.meta.get_priv('mine'):
metas.append(share.meta)
return metas
def read_state(self):
l = self.state.get_plugin_variable(self.name, 'watchkeywords')
if l != None:
self.keywords = l
notifications = self.state.get_plugin_variable(self.name, 'notifications')
if notifications != None:
self.notifications = notifications
def handle_msgpush(self, user, request):
|
openstack/python-muranoclient
|
muranoclient/tests/unit/osc/v1/fakes.py
|
Python
|
apache-2.0
| 823 | 0 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-
|
2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implie
|
d. See the
# License for the specific language governing permissions and limitations
# under the License.
from osc_lib.tests import utils
from unittest import mock
class TestApplicationCatalog(utils.TestCommand):
def setUp(self):
super(TestApplicationCatalog, self).setUp()
self.app.client_manager.application_catalog = mock.Mock()
|
TorleifHensvold/ITGK3
|
Oving5/torleif/07_Forenkling_av_brøker.py
|
Python
|
mit
| 706 | 0.04416 |
def forenkling(a,b):
while b!=0:
|
gammel_b=b
b=a%b
a=gammel_b
#print(a,b)
return a
print(forenkling(30,20))
print(forenkling(10,2))
def gcd(a,b):
a=forenkling(a,b)
return a
def reduce_fraction(a,b):
divisor=forenkling(a,b)
a=int(a/divisor)
b=int(b/divisor)
return a,b
def main():
print('Dette forkorter brøker på formen a/b:')
a = int(input('Skriv inn et heltall a: '))
b = int(input('Skriv inn et heltall b: '))
a,b=reduce_fraction(a,b)
|
if a!=b:
print('Forkortningen av brøken gir: ',a,'/',b,sep='')
else:
print('Forkortningen av brøken gir: 1')
main()
|
LamCiuLoeng/vat
|
vatsystem/model/__init__.py
|
Python
|
mit
| 2,266 | 0.004854 |
# -*- coding: utf-8 -*-
"""The application's model objects"""
from zope.sqlalchemy import ZopeTransactionExtension
from sqlalchemy.orm import scoped_session, sessionmaker
#from sqlalchemy import MetaData
from sqlalchemy.ext.declarative import declarative_base
# Global session manager: DBSession() returns the Thread-local
# session object appropriate for the current web request.
maker = sessionmaker(autoflush=True, autocommit=False, extension=ZopeTransactionExtension())
maker2 = sessionmaker(autoflush=False, autocommit=False)
DBSession = scoped_session(maker)
DBSession2 = scoped_session(maker2)
# Base class for all of our model classes: By default, the data model is
# defined with SQLAlchemy's declarative extension, but if you need more
# control, you can switch to the traditional method.
DeclarativeBase = declarative_base()
DeclarativeBase2 = declarative_base()
# There are two convenient ways for you
|
to spare some typing.
# You can have a query property on all your model classes by doing this:
# DeclarativeBase.query = DBSession.query_property()
# Or you can use a session-aware mapper as it was used in TurboGears 1:
# DeclarativeBase = declarative_base(mapper=DBSession.mapper)
# Global metadata.
# The default metadata is the one from the declarative base.
metadata
|
= DeclarativeBase.metadata
metadata2 = DeclarativeBase2.metadata
# If you have multiple databases with overlapping table names, you'll need a
# metadata for each database. Feel free to rename 'metadata2'.
#metadata2 = MetaData()
#####
# Generally you will not want to define your table's mappers, and data objects
# here in __init__ but will want to create modules them in the model directory
# and import them at the bottom of this file.
#
######
def init_model(engine1, engine2):
"""Call me before using any of the tables or classes in the model."""
# DBSession.configure(bind=engine)
DBSession.configure(bind=engine1)
DBSession2.configure(bind=engine2)
metadata.bind = engine1
metadata2.bind = engine2
# Import your model modules here.
from vatsystem.model.auth import User, Group, Permission
from vatsystem.model.base_erp import *
from vatsystem.model.base_vat import *
from vatsystem.model.erp import *
from vatsystem.model.systemutil import *
|
AntSharesSDK/antshares-python
|
sdk/AntShares/Core/IssueTransaction.py
|
Python
|
apache-2.0
| 922 | 0 |
# -*- coding:utf-8 -*-
"""
Description:
Issue
|
Transaction
Usage:
from AntShares.Core.IssueTransaction import IssueTransaction
"""
from AntShares.Core.AssetType import AssetType
from AntShares.Helper import *
from AntShares.Core.Transaction import Transaction
from AntShares.Core.TransactionType import TransactionType
from random import randint
class IssueTransaction(Transaction):
"""docstring for IssueTransaction"""
def __init__(self, inputs, outputs):
super(IssueTransaction, self).__init__(inputs, outputs)
self.Tran
|
sactionType = TransactionType.IssueTransaction # 0x40
self.Nonce = self.genNonce()
def genNonce(self):
return random.randint(268435456, 4294967295)
def getScriptHashesForVerifying(self):
"""Get ScriptHash From SignatureContract"""
pass
def serializeExclusiveData(self, writer):
writer.writeUInt32(self.Nonce)
|
rpatterson/instrumenting
|
src/instrumenting/profilehandler.py
|
Python
|
gpl-2.0
| 4,513 | 0.000886 |
import sys
import logging
import profile
import pstats
try:
import cStringIO as StringIO
StringIO # pyflakes
except ImportError:
import StringIO
from instrumenting import utils
class BaseProfilingHandler(utils.InstrumentingHandler):
"""
Python logging handler which profiles code.
It can also optionally log profiling stats and/or dump the raw
stats to a file.
"""
def __init__(self, start=False, stop=False, functions=None,
restriction=[50], strip_dirs=True,
sort_stats=['cumulative'], print_formats=['stats'],
level=logging.NOTSET):
utils.InstrumentingHandler.__init__(self, level=level)
self.setUpProfiler()
self.start = start
self.stop = stop
self.functions = functions
self.print_formats = print_formats
def emit(self, record):
"""
Start or stop the configured profiler logging details.
If the handler is configu
|
red to start the profiler and it is
already started, a warning message is logged and it is left
running. Similarly, if the handler is configured to stop the
profiler and it is already stopped, a warning message is
logged and it is no
|
t started.
In order to avoid surprising performance impacts, if the
handler is configured such that it enables and disables the
profiler for the same single log message, an error message is
logged but the profiler is still disabled.
"""
started = False
if self.start:
if self.running():
self.log(logging.WARNING,
'Profiler %r already running, ignoring start'
% self.profiler)
else:
self.log(logging.INFO,
'Starting profiler %r' % self.profiler)
import pdb; pdb.set_trace()
self.enable()
started = True
if self.stop:
if not self.running():
self.log(logging.WARNING,
'Profiler %r not running, ignoring stop'
% self.profiler)
else:
if started:
self.log(logging.ERROR,
'Handler for profiler %r configured to start '
'and stop for the same log message'
% self.profiler)
self.log(logging.INFO,
'Stopping profiler %r' % self.profiler)
import pdb; pdb.set_trace()
self.disable()
if not started and self.print_formats:
self.log(logging.DEBUG, 'Printing profiler %r stats:\n%s'
% (self.profiler, self.log_stats()))
def log_stats(self):
stream = StringIO.StringIO()
stats = self.get_stats(stream)
if stats is None:
return
if self.strip_dirs:
stats.strip_dirs()
if self.sort_stats:
stats.sort_stats(self.sort_stats)
for method in self.print_formats:
getattr(stats, 'print_'+method)(*self.restriction)
return stream.getvalues()
# Profiler specific support
def setUpProfiler(self):
"""Set up the selected profiler."""
raise NotImplemented
def enable(self):
raise NotImplemented
def disable(self):
raise NotImplemented
def running(self):
return isinstance(sys.getprofile(), type(self.profiler))
def get_stats(self, stream):
if self.running():
self.log(logging.ERROR,
"Cannot get stats when the profiler from the "
"`profile` module is already running")
return None
import pdb; pdb.set_trace()
stats = pstats.Stats(self.profiler, stream=stream)
return stats
class ProfileHandler(BaseProfilingHandler):
"""Use the pure-python `profile` module to profile on logging events."""
def setUpProfiler(self):
if not self.functions:
raise ValueError(
'The `profile` module does not support profiling '
'an already running stack')
self.profiler = profile.Profile()
def running(self):
hook = sys.getprofile()
return (hook is self.profiler.dispatcher
and isinstance(hook.im_self, type(self.profiler)))
|
Pyroseza/Random
|
test.py
|
Python
|
mit
| 23 | 0.043478 |
print("h
|
ello world!")
| |
unibet/unbound-ec2
|
unbound_ec2/config.py
|
Python
|
isc
| 3,425 | 0.00438 |
import ConfigParser
import os.path
import ast
DEFAULT_
|
CONF_FILE = '/etc/unbound/unbound_ec2.conf'
DEFAULT_AWS_REGION = 'us-west-1'
DEFAULT_ZONE = 'zone.tld'
DEFAULT_REVERSE_ZONE = '127.in-addr.arpa'
DEFAULT_TTL = '300'
DEFAULT_CACHE_TTL = '30'
DEFAULT_SERVER_TYPE = 'caching'
DEFAULT_LOOKUP_TYPE = 'cache'
DEFAULT_LOOKUP_TAG_NAME_INCLUDE_DOMAIN = 'True'
DEFAULT_LOOKUP_FILTERS = "{'instance-state-name': 'running'}"
DEFAULT_IP_ORDER = 'private'
DEFAULT_FORWAR
|
DED_ZONES = ''
class UnboundEc2Conf(object):
"""Configuration parser for Unbound EC2 module.
"""
def __init__(self, conf_file=None):
self.config = ConfigParser.ConfigParser()
self.conf_file = conf_file if conf_file else os.environ.get('UNBOUND_EC2_CONF',
DEFAULT_CONF_FILE).encode('ascii')
self.ec2 = {}
self.main = {}
self.lookup = {}
self.lookup_filters = {}
self.server = {}
def set_defaults(self):
"""Sets default values for defined self instance attributes.
"""
self.ec2['aws_region'] = os.environ.get('AWS_DEFAULT_REGION', DEFAULT_AWS_REGION).encode('ascii')
self.main['zone'] = os.environ.get('UNBOUND_ZONE', DEFAULT_ZONE).encode('ascii')
self.main['reverse_zone'] = os.environ.get('UNBOUND_REVERSE_ZONE', DEFAULT_REVERSE_ZONE).encode('ascii')
self.main['ttl'] = self.__try_type(os.environ.get('UNBOUND_TTL', DEFAULT_TTL).encode('ascii'))
self.main['cache_ttl'] = self.__try_type(
os.environ.get('UNBOUND_CACHE_TTL', DEFAULT_CACHE_TTL).encode('ascii'))
self.server['type'] = os.environ.get('UNBOUND_SERVER_TYPE', DEFAULT_SERVER_TYPE).encode('ascii')
self.lookup['type'] = os.environ.get('UNBOUND_LOOKUP_TYPE', DEFAULT_LOOKUP_TYPE).encode('ascii')
self.lookup['tag_name_include_domain'] = self.__try_type(
os.environ.get('UNBOUND_LOOKUP_TAG_NAME_INCLUDE_DOMAIN',
DEFAULT_LOOKUP_TAG_NAME_INCLUDE_DOMAIN).encode('ascii'))
self.lookup_filters = self.__try_type(
os.environ.get('UNBOUND_LOOKUP_FILTERS', DEFAULT_LOOKUP_FILTERS).encode('ascii'))
self.main['ip_order'] = os.environ.get('UNBOUND_IP_ORDER', DEFAULT_IP_ORDER).encode('ascii')
self.main['forwarded_zones'] = os.environ.get('UNBOUND_FORWARDED_ZONES', DEFAULT_FORWARDED_ZONES)\
.encode('ascii')
def parse(self):
"""Tries to read defined configuration file and merge values with instance attributes.
"""
result = False
if os.path.isfile(self.conf_file):
self.config.read(self.conf_file)
for section in self.config.sections():
setattr(self, section, self.__get_merged_attribute(section, dict(self.config.items(section))))
result = True
return result
def __get_merged_attribute(self, name, value):
string_result = value
if getattr(self, name):
string_result = getattr(self, name).copy()
string_result.update(value)
result = {}
for key in string_result:
result[key] = self.__try_type(string_result[key])
return result
def __try_type(self, value):
try:
result = ast.literal_eval(value)
except (ValueError, SyntaxError):
result = value
return result
|
clawpack/clawpack-4.x
|
doc/sphinx/example-acoustics-2d/1drad/setrun.py
|
Python
|
bsd-3-clause
| 5,754 | 0.012165 |
"""
Module to set up run time parameters for Clawpack.
The values set in the function setrun are then written out to data files
that will be read in by the Fortran code.
"""
import os
from pyclaw import data
#------------------------------
def setrun(claw_pkg='classic'):
#------------------------------
"""
Define the parameters used for running Clawpack.
INPUT:
claw_pkg expected to be "classic" for this setrun.
OUTPUT:
rundata - object of class ClawRunData
"""
assert claw_pkg.lower() == 'classic', "Expected claw_pkg = 'classic'"
ndim = 1
rundata = data.ClawRunData(claw_pkg, ndim)
#------------------------------------------------------------------
# Problem-specific parameters to be written to setprob.data:
#------------------------------------------------------------------
probdata = rundata.new_UserData(name='probdata',fname='setprob.data')
probdata.add_param('ndim', 2, 'ndim')
probdata.add_param('rho', 1., 'density of medium')
probdata.add_param('bulk', 4., 'bulk modulus')
probdata.add_param('width', 0.1, 'width used in qinit')
#------------------------------------------------------------------
# Standard Clawpack parameters to be written to claw.data:
#------------------------------------------------------------------
clawdata = rundata.clawdata # initialized when rundata instantiated
# ---------------
# Spatial domain:
# ---------------
# Number of space dimensions:
clawdata.ndim = ndim
# Lower and upper edge of computational domain:
clawdata.xlower = 0.0
clawdata.xupper = 1.5
# Number of grid cells:
clawdata.mx = 2000
# ---------------
# Size of system:
# ---------------
# Number of equations in the system:
clawdata.meqn = 2
# Number of auxiliary variables in the aux array (initialized in setaux)
clawdata.maux = 0
# Index of aux array corresponding to capacity function, if there is one:
clawdata.mcapa = 0
# -------------
# Initial time:
# -------------
clawdata.t0 = 0.0
# -------------
# Output times:
#--------------
# Specify at what times the results should be written to fort.q files.
# Note that the time integration stops after the final output time.
# The solution at initial time t0 is always written in addition.
clawdata.outstyle = 1
if clawdata.outstyle==1:
# Output nout frames at equally spaced times up to tfinal:
clawdata.nout = 4
clawdata.tfinal = 0.2
elif clawdata.outstyle == 2:
# Specify a list of output times.
clawdata.tout = [0.5, 1.0] # used if outstyle == 2
clawdata.nout = len(clawdata.tout)
elif clawdata.outstyle == 3:
# Output every iout timesteps with a total of ntot time steps:
iout = 1
ntot = 5
clawdata.iout = [iout, ntot]
# ---------------------------------------------------
# Verbosity of messages to screen during integration:
# ---------------------------------------------------
# The current t, dt, and cfl will be printed every time step
# at AMR levels <= verbosity. Set verbosity = 0 for no printing.
# (E.g. verbosity == 2 means print only on levels 1 and 2.)
clawdata.verbosity = 0
# --------------
# Time stepping:
# ---
|
-----------
# if dt_variable==1: variable time steps used based on cfl_desired,
# if dt_variable==0: fixed time steps dt = dt_initial will always be used.
clawdata.dt_variable = 1
# Initial time step for variable dt.
# If dt_variable==0 then dt=dt_initial for all steps:
clawdata.dt_initial = 0.5
|
# Max time step to be allowed if variable dt used:
clawdata.dt_max = 1e+99
# Desired Courant number if variable dt used, and max to allow without
# retaking step with a smaller dt:
clawdata.cfl_desired = 1.0
clawdata.cfl_max = 1.0
# Maximum number of time steps to allow between output times:
clawdata.max_steps = 5000
# ------------------
# Method to be used:
# ------------------
# Order of accuracy: 1 => Godunov, 2 => Lax-Wendroff plus limiters
clawdata.order = 1
# Transverse order for 2d or 3d (not used in 1d):
clawdata.order_trans = 0
# Number of waves in the Riemann solution:
clawdata.mwaves = 2
# List of limiters to use for each wave family:
# Required: len(mthlim) == mwaves
clawdata.mthlim = [2, 2]
# Source terms splitting:
# src_split == 0 => no source term (src routine never called)
# src_split == 1 => Godunov (1st order) splitting used,
# src_split == 2 => Strang (2nd order) splitting used, not recommended.
clawdata.src_split = 1
# --------------------
# Boundary conditions:
# --------------------
# Number of ghost cells (usually 2)
clawdata.mbc = 2
# Choice of BCs at xlower and xupper:
# 0 => user specified (must modify bcN.f to use this option)
# 1 => extrapolation (non-reflecting outflow)
# 2 => periodic (must specify this at both boundaries)
# 3 => solid wall for systems where q(2) is normal velocity
clawdata.mthbc_xlower = 3
clawdata.mthbc_xupper = 1
return rundata
# end of function setrun
# ----------------------
if __name__ == '__main__':
# Set up run-time parameters and write all data files.
import sys
if len(sys.argv) == 2:
rundata = setrun(sys.argv[1])
else:
rundata = setrun()
rundata.write()
|
golden1232004/webrtc_new
|
tools/python_charts/webrtc/main.py
|
Python
|
gpl-3.0
| 6,301 | 0.011903 |
#!/usr/bin/env python
# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
import os
import gviz_api
import webrtc.data_helper
def main():
"""
This Python script displays a web page with test created with the
video_quality_measurement program, which is a tool in WebRTC.
The script requires on two external files and one Python library:
- A HTML template file with layout and references to the json variables
defined in this script
- A data file in Python format, containing the following:
- test_configuration - a dictionary of test configuration names and values.
- frame_data_types - a dictionary that maps the different metrics to their
data types.
- frame_data - a list of dictionaries where each dictionary maps a metric to
it's value.
- The gviz_api.py of the Google Visualization Python API, available at
http://code.google.com/p/google-visualization-python/
The HTML file is shipped with the script, while the data file must be
generated by running video_quality_measurement with the --python flag
specified.
"""
print 'Content-type: text/html\n' # the newline is required!
page_template_filename = '../templates/chart_page_template.html'
# The data files must be located in the project tree for app engine being
# able to access them.
data_filenames = ['../data/vp8_sw.py', '../data/vp8_hw.py']
# Will contain info/error messages
|
to be displayed on the resulting page.
messages = []
# Load the page HTML template.
try:
f = open(page_template_filename)
page_template = f.read()
f.close()
except IOError as e:
ShowErrorPage('Cannot open page template file: %s<br>Details: %s' %
|
(page_template_filename, e))
return
# Read data from external Python script files. First check that they exist.
for filename in data_filenames:
if not os.path.exists(filename):
messages.append('Cannot open data file: %s' % filename)
data_filenames.remove(filename)
# Read data from all existing input files.
data_list = []
test_configurations = []
names = []
for filename in data_filenames:
read_vars = {} # empty dictionary to load the data into.
execfile(filename, read_vars, read_vars)
test_configuration = read_vars['test_configuration']
table_description = read_vars['frame_data_types']
table_data = read_vars['frame_data']
# Verify the data in the file loaded properly.
if not table_description or not table_data:
messages.append('Invalid input file: %s. Missing description list or '
'data dictionary variables.' % filename)
continue
# Frame numbers appear as number type in the data, but Chart API requires
# values of the X-axis to be of string type.
# Change the frame_number column data type:
table_description['frame_number'] = ('string', 'Frame number')
# Convert all the values to string types:
for row in table_data:
row['frame_number'] = str(row['frame_number'])
# Store the unique data from this file in the high level lists.
test_configurations.append(test_configuration)
data_list.append(table_data)
# Name of the test run must be present.
test_name = FindConfiguration(test_configuration, 'name')
if not test_name:
messages.append('Invalid input file: %s. Missing configuration key '
'"name"', filename)
continue
names.append(test_name)
# Create data helper and build data tables for each graph.
helper = webrtc.data_helper.DataHelper(data_list, table_description,
names, messages)
# Loading it into gviz_api.DataTable objects and create JSON strings.
description, data = helper.CreateConfigurationTable(test_configurations)
configurations = gviz_api.DataTable(description, data)
json_configurations = configurations.ToJSon() # pylint: disable=W0612
description, data = helper.CreateData('ssim')
ssim = gviz_api.DataTable(description, data)
# pylint: disable=W0612
json_ssim_data = ssim.ToJSon(helper.GetOrdering(description))
description, data = helper.CreateData('psnr')
psnr = gviz_api.DataTable(description, data)
# pylint: disable=W0612
json_psnr_data = psnr.ToJSon(helper.GetOrdering(description))
description, data = helper.CreateData('packets_dropped')
packet_loss = gviz_api.DataTable(description, data)
# pylint: disable=W0612
json_packet_loss_data = packet_loss.ToJSon(helper.GetOrdering(description))
description, data = helper.CreateData('bit_rate')
# Add a column of data points for the desired bit rate to be plotted.
# (uses test configuration from the last data set, assuming it is the same
# for all of them)
desired_bit_rate = FindConfiguration(test_configuration, 'bit_rate_in_kbps')
if not desired_bit_rate:
ShowErrorPage('Cannot configuration field named "bit_rate_in_kbps"')
return
desired_bit_rate = int(desired_bit_rate)
# Add new column data type description.
description['desired_bit_rate'] = ('number', 'Desired bit rate (kbps)')
for row in data:
row['desired_bit_rate'] = desired_bit_rate
bit_rate = gviz_api.DataTable(description, data)
# pylint: disable=W0612
json_bit_rate_data = bit_rate.ToJSon(helper.GetOrdering(description))
# Format the messages list with newlines.
messages = '\n'.join(messages)
# Put the variables as JSon strings into the template.
print page_template % vars()
def FindConfiguration(configuration, name):
""" Finds a configuration value using it's name.
Returns the first configuration with a matching name. Returns None if no
matching configuration is found. """
return_value = None
for row in configuration:
if row['name'] == name:
return_value = row['value']
break
return return_value
def ShowErrorPage(error_message):
print '<html><body>%s</body></html>' % error_message
if __name__ == '__main__':
main()
|
potash/drain
|
bin/run_step.py
|
Python
|
mit
| 692 | 0.001445 |
import sys
from os.path import dirname
import drain.step, drain.serialize
from drain.drake import is_target_filename, is_step_filename
if len(sys.argv) == 1:
raise ValueError('Need at least one argument')
args = sys.argv[1:]
dr
|
ain.PATH = dirname(dirname(dirname(args[0])))
if is_target_filename(args[0]):
output = drain.serialize.load(args[0])
args = args[1:]
else:
output = None
if not is_step_filename(args[0]):
raise ValueError('Need a step to run')
step = drain.serialize.load(args[0])
inputs = []
for i in args[1:]:
i
|
f is_step_filename(i) or is_target_filename(i):
inputs.append(drain.serialize.load(i))
step.execute(output=output, inputs=inputs)
|
bit-trade-one/SoundModuleAP
|
lib-src/lv2/sratom/waflib/Tools/icc.py
|
Python
|
gpl-2.0
| 890 | 0.057303 |
#! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
import os,sys
from waflib.Tools import ccroot,ar,gcc
from waflib.Configure import conf
@conf
def find_icc(conf):
if sys.platform=='cygwin':
conf.fatal('The Intel compiler does not work on Cygwin')
v=conf.env
cc=None
if v['CC']:cc=v['CC']
elif'CC'in conf.environ:cc=conf.environ['CC']
if not cc:cc=conf.find_program('icc',var='CC')
if not cc:cc=conf.find_program('ICL',var='CC')
if not cc:conf.fatal('Intel C Compiler (icc) was not found')
c
|
c=conf.cmd_to_list(cc)
conf.get_cc_ver
|
sion(cc,icc=True)
v['CC']=cc
v['CC_NAME']='icc'
def configure(conf):
conf.find_icc()
conf.find_ar()
conf.gcc_common_flags()
conf.gcc_modifier_platform()
conf.cc_load_tools()
conf.cc_add_flags()
conf.link_add_flags()
|
sagark/tsdb-perf-test
|
tests/insertlongstream.py
|
Python
|
bsd-2-clause
| 1,776 | 0.013514 |
#Java/SQL stuff
from java.lang import *
#Grinder stuff
from net.grinder.script.Grinder import grinder
from net.grinder.script import Test
#misc
import time
import sys
#project specific
from framework import TSdata_w, TSdata_h, importstrs
#import relevant t_DATABASENAME depending on settings in grinder.properties
inp = grinder.getProperties()["grinder.inp"]
inp = importstrs(inp)
exec(inp)
class TestRunner:
def __init__(self):
self.testdb = DBAccess(*dbargs)
self.numstreams = 100
logstr = self.testdb.init_insert(100000, self.numstreams, False)
grinder.logger.info(logstr)
#this has a crazy amount of overhead in python, need to figure out
#what's up
def __call__(self):
try:
res = self.testdb.run_insert_h()
grinder.logger.info("Insertion Results as (start time, end time, "
"completion" +
" time): (" + str(res[0]) + ", " + str(res[1]) +
", " + str(res[2]) + ")")
print("done insert")
except StopIteration:
# the test is complete
grinder.logg
|
er.info("Insertion finished at: " + str(time.time()))
self.testdb.close_all()
grinder.stopThisWorkerThread()
res = self.testdb.run_query_all()
grinder.logger.info("Query Results as (start time, end time, "
"completion" +
|
" time): (" + str(res[0]) + ", " + str(res[1]) +
", " + str(res[2]) + ")")
#log db size
size = self.testdb.get_db_size()
grinder.logger.info("The database size is now " + size + " bytes.")
self.testdb.reset_conn_state()
|
epawlowska/whylog
|
whylog/config/consts.py
|
Python
|
bsd-3-clause
| 247 | 0 |
class YamlFileNames(object):
rules = 'rules.yaml'
parsers = 'parsers.yaml'
default_log_typ
|
es = 'log_types.yaml'
unix_log_types = 'unix_log_types.yaml'
windows_log_types = 'win
|
dows_log_types.yaml'
settings = 'settings.yaml'
|
scardine/image_size
|
setup.py
|
Python
|
mit
| 776 | 0.001289 |
import codecs
from setuptools import setup
VERSION = '0.2.0'
def read_long_description():
long_desc = []
with codecs.open('README.rst', 'r', 'utf8') as longdesc:
long_desc.append(longdesc.read())
with codecs.open('HISTORY.rst', 'r', 'utf8') as history:
long_desc.appe
|
nd(history.read())
return u'\n\n'.join(long_desc)
LONG_DESCRIPTION = read_long_description()
setup(
name='get_image_size',
url='https://github.com/scardine/image_size',
version=VERSION,
long_description=LONG_DESCRIPTION,
author='github.com/scardine',
author_email=' ',
license='MIT',
py_modules=['get_image_size'],
entry_points={
'con
|
sole_scripts': [
'get-image-size = get_image_size:main',
],
},
)
|
bpsinc-native/src_third_party_chromite
|
scripts/pushimage.py
|
Python
|
bsd-3-clause
| 18,246 | 0.006577 |
# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ChromeOS image pusher (from cbuildbot to signer).
This pushes files from the archive bucket to the signer bucket and marks
artifacts for signing (which a signing process will look for).
"""
from __future__ import print_function
import ConfigParser
import cStringIO
import errno
import getpass
import os
import re
import tempfile
import textwrap
from chromite.cbuildbot import constants
from chromite.lib import commandline
from chromite.lib import cros_build_lib
from chromite.lib import git
from chromite.lib import gs
from chromite.lib import osutils
from chromite.lib import signing
# This will split a fully qualified ChromeOS version string up.
# R34-5126.0.0 will break into "34" and "5126.0.0".
VERSION_REGEX = r'^R([0-9]+)-([^-]+)'
# The test signers will scan this dir looking for test work.
# Keep it in sync with the signer config files [gs_test_buckets].
TEST_SIGN_BUCKET_BASE = 'gs://chromeos-throw-away-bucket/signer-tests'
# Ketsets that are only valid in the above test bucket.
TEST_KEYSETS = set(('test-keys-mp', 'test-keys-premp'))
class PushError(Exception):
"""When an (unknown) error happened while trying to push artifacts."""
class MissingBoardInstructions(Exception):
"""Raised when a board lacks any signer instructions."""
class InputInsns(object):
"""Object to hold settings for a signable board.
Note: The format of the instruction file pushimage outputs (and the signer
reads) is not exactly the same as the instruction file pushimage reads.
"""
def __init__(self, board):
self.board = board
config = ConfigParser.ConfigParser()
config.readfp(open(self.GetInsnFile('DEFAULT')))
try:
input_insn = self.GetInsnFile('recovery')
config.readfp(open(input_insn))
except IOError as e:
if e.errno == errno.ENOENT:
# This board doesn't have any signing instructions.
# This is normal for new or experimental boards.
raise MissingBoardInstructions(input_insn)
raise
self.cfg = config
def GetInsnFile(self, image_type):
"""Find the signer instruction files for this board/image type.
Args:
image_type: The type of instructions to load. It can be a common file
(like "DEFAULT"), or one of the --sign-types.
Returns:
|
Full path to the instruction file us
|
ing |image_type| and |self.board|.
"""
if image_type == image_type.upper():
name = image_type
elif image_type == 'recovery':
name = self.board
else:
name = '%s.%s' % (self.board, image_type)
return os.path.join(signing.INPUT_INSN_DIR, '%s.instructions' % name)
@staticmethod
def SplitCfgField(val):
"""Split a string into multiple elements.
This centralizes our convention for multiple elements in the input files
being delimited by either a space or comma.
Args:
val: The string to split.
Returns:
The list of elements from having done split the string.
"""
return val.replace(',', ' ').split()
def GetChannels(self):
"""Return the list of channels to sign for this board.
If the board-specific config doesn't specify a preference, we'll use the
common settings.
"""
return self.SplitCfgField(self.cfg.get('insns', 'channel'))
def GetKeysets(self):
"""Return the list of keysets to sign for this board."""
return self.SplitCfgField(self.cfg.get('insns', 'keyset'))
def OutputInsns(self, image_type, output_file, sect_insns, sect_general):
"""Generate the output instruction file for sending to the signer.
Note: The format of the instruction file pushimage outputs (and the signer
reads) is not exactly the same as the instruction file pushimage reads.
Args:
image_type: The type of image we will be signing (see --sign-types).
output_file: The file to write the new instruction file to.
sect_insns: Items to set/override in the [insns] section.
sect_general: Items to set/override in the [general] section.
"""
config = ConfigParser.ConfigParser()
config.readfp(open(self.GetInsnFile(image_type)))
# Clear channel entry in instructions file, ensuring we only get
# one channel for the signer to look at. Then provide all the
# other details for this signing request to avoid any ambiguity
# and to avoid relying on encoding data into filenames.
for sect, fields in zip(('insns', 'general'), (sect_insns, sect_general)):
if not config.has_section(sect):
config.add_section(sect)
for k, v in fields.iteritems():
config.set(sect, k, v)
output = cStringIO.StringIO()
config.write(output)
data = output.getvalue()
osutils.WriteFile(output_file, data)
cros_build_lib.Debug('generated insns file for %s:\n%s', image_type, data)
def MarkImageToBeSigned(ctx, tbs_base, insns_path, priority):
"""Mark an instructions file for signing.
This will upload a file to the GS bucket flagging an image for signing by
the signers.
Args:
ctx: A viable gs.GSContext.
tbs_base: The full path to where the tobesigned directory lives.
insns_path: The path (relative to |tbs_base|) of the file to sign.
priority: Set the signing priority (lower == higher prio).
Returns:
The full path to the remote tobesigned file.
"""
if priority < 0 or priority > 99:
raise ValueError('priority must be [0, 99] inclusive')
if insns_path.startswith(tbs_base):
insns_path = insns_path[len(tbs_base):].lstrip('/')
tbs_path = '%s/tobesigned/%02i,%s' % (tbs_base, priority,
insns_path.replace('/', ','))
with tempfile.NamedTemporaryFile(
bufsize=0, prefix='pushimage.tbs.') as temp_tbs_file:
lines = [
'PROG=%s' % __file__,
'USER=%s' % getpass.getuser(),
'HOSTNAME=%s' % cros_build_lib.GetHostName(fully_qualified=True),
'GIT_REV=%s' % git.RunGit(constants.CHROMITE_DIR,
['rev-parse', 'HEAD']).output.rstrip(),
]
osutils.WriteFile(temp_tbs_file.name, '\n'.join(lines) + '\n')
# The caller will catch gs.GSContextException for us.
ctx.Copy(temp_tbs_file.name, tbs_path)
return tbs_path
def PushImage(src_path, board, versionrev=None, profile=None, priority=50,
sign_types=None, dry_run=False, mock=False, force_keysets=()):
"""Push the image from the archive bucket to the release bucket.
Args:
src_path: Where to copy the files from; can be a local path or gs:// URL.
Should be a full path to the artifacts in either case.
board: The board we're uploading artifacts for (e.g. $BOARD).
versionrev: The full Chromium OS version string (e.g. R34-5126.0.0).
profile: The board profile in use (e.g. "asan").
priority: Set the signing priority (lower == higher prio).
sign_types: If set, a set of types which we'll restrict ourselves to
signing. See the --sign-types option for more details.
dry_run: Show what would be done, but do not upload anything.
mock: Upload to a testing bucket rather than the real one.
force_keysets: Set of keysets to use rather than what the inputs say.
Returns:
A dictionary that maps 'channel' -> ['gs://signer_instruction_uri1',
'gs://signer_instruction_uri2',
...]
"""
# Whether we hit an unknown error. If so, we'll throw an error, but only
# at the end (so that we still upload as many files as possible).
unknown_error = False
if versionrev is None:
# Extract milestone/version from the directory name.
versionrev = os.path.basename(src_path)
# We only support the latest format here. Older releases can use pushimage
# from the respective branch which deals with legacy cruft.
m = re.match(VERSION_REGEX, versionrev)
if not m:
raise ValueError('version %s does not match %s' %
(versionrev, VERSION_REGEX))
milestone = m.group(1)
version = m.group(2)
|
shailcoolboy/Warp-Trinity
|
ResearchApps/Measurement/examples/TxPower_vs_BER/warpnet_experiment_structs.py
|
Python
|
bsd-2-clause
| 5,510 | 0.023775 |
# WARPnet Client<->Server Architecture
# WARPnet Parameter Definitions
#
# Author: Siddharth Gupta
import struct, time
from warpnet_common_params import *
from warpnet_client_definitions import *
from twisted.internet import reactor
import binascii
# Struct IDs
STRUCTID_CONTROL = 0x13
STRUCTID_CONTROL_ACK = 0x14
STRUCTID_COMMAND = 0x17
STRUCTID_COMMAND_ACK = 0x18
STRUCTID_OBSERVE_BER = 0x24
STRUCTID_OBSERVE_BER_REQ = 0x25
STRUCTID_OBSERVE_PER = 0x26
STRUCTID_OBSERVE_PER_REQ = 0x27
# Command IDs
COMMANDID_STARTTRIAL = 0x40
COMMANDID_STOPTRIAL = 0x41
COMMANDID_RESET_PER = 0x50
COMMANDID_ENABLE_BER_TESTING = 0x51
COMMANDID_DISABLE_BER_TESTING = 0x52
########################
## Struct Definitions ##
########################
# ControlStruct is a ClientStruct that stores
|
some basic parameters to pass to the WARP board. The local variable can be accessed
# globally by calling ControlStruct.txPower etc. The struct must also understand the conversion from integer values to binary
# using the
|
prepToSend function; it will be provided with the nodeID.
# typedef struct {
# char structID;
# char nodeID;
# char txPower;
# char channel;
# char modOrderHeader;
# char modOrderPayload;
# short reserved;
# int pktGen_period;
# int pktGen_length;
# } warpnetControl;
class ControlStruct(ClientStruct):
txPower = -1
channel = -1
modOrderHeader = -1
modOrderPayload = -1
reserved = 0
packetGeneratorPeriod = 0
packetGeneratorLength = 0
def __init__(self):
self.structID = STRUCTID_CONTROL
self.txPower = 63
self.channel = 4
self.modOrderHeader = 0
self.modOrderPayload = 2
self.packetGeneratorPeriod = 0
self.packetGeneratorLength = 1300
self.expectedReturnStructID = STRUCTID_CONTROL_ACK
def prepToSend(self, nodeID):
self.updateDone = False
return struct.pack('!6BHII', self.structID, nodeID, self.txPower, self.channel, self.modOrderHeader, self.modOrderPayload, self.reserved, self.packetGeneratorPeriod, self.packetGeneratorLength)
def updateFromNode(self, rawData, pcapts):
dataTuple = struct.unpack('!BBH', rawData[0:4])
#print "Control struct successfully applied at node %d" % dataTuple[1]
#CommandStruct is used to send commands or requests to the WARP nodes
# The cmdIDs are defined above
# Matching C code definition:
# typedef struct {
# char structID;
# char nodeID;
# char cmdID;
# char cmdParam;
# } warpnetCommand;
class CommandStruct(ClientStruct):
cmdID = -1
cmdParam = -1
def __init__(self, cmdID, cmdParam):
self.structID = STRUCTID_COMMAND
self.expectedReturnStructID = STRUCTID_COMMAND_ACK
self.cmdID = cmdID
self.cmdParam = cmdParam
def prepToSend(self, nodeID):
self.updateDone = False
return struct.pack('!4B', self.structID, nodeID, self.cmdID, self.cmdParam)
def updateFromNode(self, rawData, pcapts):
pass
#print "Successfully executed command %d" % self.cmdID
#ObservePERStruct collects packet error rate (PER) data from WARP nodes
# Matching C code definition:
# typedef struct {
# unsigned char structID;
# unsigned char nodeID;
# unsigned char reqNum;
# unsigned char reqType;
# unsigned int numPkts_tx;
# unsigned int numPkts_rx_good;
# unsigned int numPkts_rx_goodHdrBadPyld;
# unsigned int numPkts_rx_badHdr;
# } warpnetObservePER;
class ObservePERStruct(ClientStruct):
numPkts_tx = -1
numPkts_rx_good = -1
numPkts_rx_goodHdrBadPyld = -1
numPkts_rx_badHdr = -1
reqNum = -1
reqType = -1
def __init__(self, logger=None):
ClientStruct.__init__(self, logger)
self.structID = STRUCTID_OBSERVE_PER_REQ
self.expectedReturnStructID = STRUCTID_OBSERVE_PER
self.numPkts_tx = 0
self.numPkts_rx_good = 0
self.numPkts_rx_goodHdrBadPyld = 0
self.numPkts_rx_badHdr = 0
self.reqNum = 0
self.reqType = 0
def prepToSend(self, nodeID):
self.updateDone = False
return struct.pack('!4B', self.structID, nodeID, self.reqNum, self.reqType)
def updateFromNode(self, rawData, pcapts):
dataTuple = struct.unpack('!2B 2B 4I', rawData[0:20])
self.reqNum = dataTuple[2]
self.reqType = dataTuple[3]
self.numPkts_tx = dataTuple[4]
self.numPkts_rx_good = dataTuple[5]
self.numPkts_rx_goodHdrBadPyld = dataTuple[6]
self.numPkts_rx_badHdr = dataTuple[7]
#Client struct for collecting BER updates from the ber_processor program
# Matching C code struct:
# typedef struct {
# unsigned char structID;
# unsigned char nodeID;
# unsigned short sequenceNumber;
# unsigned char nodeID_tx;
# unsigned char nodeID_rx;
# unsigned short mac_seqNum;
# unsigned char mac_pktType;
# unsigned char reserved0;
# unsigned char reserved1;
# unsigned char reserved2;
# unsigned int bits_rx;
# unsigned int bits_errors;
# } warpnetObserveBER;
class ObserveBERStruct(ClientStruct):
totalBitsReceived = 0
totalBitErrors = 0
nodeID_tx = -1
nodeID_rx = -1
def __init__(self, logger=None):
ClientStruct.__init__(self, logger)
self.structID = STRUCTID_OBSERVE_BER_REQ
self.expectedReturnStructID = STRUCTID_OBSERVE_BER
self.totalBitsReceived = 0
self.totalBitErrors = 0
def prepToSend(self, nodeID):
self.updateDone = False
return struct.pack('!BBH', self.structID, nodeID, 0)
def updateFromNode(self, rawData, pcapts):
dataTuple = struct.unpack('!2B H 2B H 2I', rawData[0:16])
self.nodeID_tx = dataTuple[3]
self.nodeID_rx = dataTuple[4]
self.totalBitsReceived += dataTuple[6]
self.totalBitErrors += dataTuple[7]
def clearBitCounts(self):
self.totalBitsReceived = 0
self.totalBitErrors = 0
|
kasper190/SPAforum
|
accounts/forms.py
|
Python
|
mit
| 2,436 | 0.002053 |
from django import forms
from django.contrib.auth import get_user_model
from django.contrib.auth.forms import ReadOnlyPasswordHashField
from django.utils.translation import ugettext_lazy as _
User = get_user_model()
class AdminUserCreationForm(forms.ModelForm):
password1 = forms.CharField(label='Password', widget=forms.PasswordInput)
password2 = forms.CharField(label='Password confirmation', widget=forms.PasswordInput)
class Meta:
model = User
fields = ['username', 'email']
def clean_password2(self):
password1 = self.cleaned_data.get('password1')
password2 = self.cleaned_data.get('password2')
if password1 and password2 and password1 != password2:
raise forms.ValidationError("Passwords don't match")
return password2
def save(self, commit=True):
user = super(AdminUserCreationForm, self).save(commit=False)
user.set_password(self.cleaned_data["password1"])
if commit:
user.save()
return user
class AdminUserChangeForm(forms.ModelForm):
password = ReadOnlyPasswordHashField(label= ("Password"),
help_text= ("<a href=\"../password/\">Change password</a>"))
class Meta:
model = User
fields = ['username', 'email', 'password', 'is_banned', 'is_admin']
def clean_password(self):
return self.initial['password']
class AdminPasswordChangeForm(forms.Form):
error_messages = {
'password_mismatch': _("The two password fields didn't match."),
}
password1 = forms.CharField(label=_("Password"),
widget=forms.PasswordInput)
password2 = forms.CharField(label=_("Password (again)"),
widget=forms.PasswordInput)
def __init__(self, user, *args, **kwargs):
self.user = user
super(AdminPasswordChangeForm, self).__init__(*args, **kwargs)
def clean_password2(self):
password1 = self.cleaned_data.get('password1')
password2 = self.cleaned_data.get('password2')
if password1 and password2:
if password1 != password2:
raise forms.ValidationError(
self.error_messages['password_mismatch'])
return password2
def save(self, commit=True):
self.user.set_password(self.cleaned_data["password1"])
if co
|
mmit:
self.user.save()
return
|
self.user
|
amenonsen/ansible
|
lib/ansible/modules/network/fortios/fortios_system_email_server.py
|
Python
|
gpl-3.0
| 12,462 | 0.001284 |
#!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_system_email_server
short_description: Configure the email server used by the FortiGate various things. For example, for sending email messages to users to support user
authentication features in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS device by allowing the
user to set and modify system feature and email_server category.
Examples include all parameters and values need to be adjusted to datasources before usage.
Tested with FOS v6.0.5
version_added: "2.9"
author:
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Requires fortiosapi library developed by Fortinet
- Run as a local_action in your playbook
requirements:
- fortiosapi>=0.9.8
options:
host:
description:
- FortiOS or FortiGate IP address.
type: str
required: false
username:
description:
- FortiOS or FortiGate username.
type: str
required: false
password:
description:
- FortiOS or FortiGate password.
type: str
default: ""
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
type: str
default: root
https:
description:
- Indicates if the requests towards FortiGate must use HTTPS protocol.
type: bool
default: true
ssl_verify:
description:
- Ensures FortiGate certificate must be verified by a proper CA.
type: bool
default: true
system_email_server:
description:
- Configure the email server used by the FortiGate various things. For example, for sending email messages to users to support user authentication
features.
default: null
type: dict
suboptions:
authenticate:
description:
- Enable/disable authentication.
type: str
choices:
- enable
- disable
password:
description:
- SMTP server user password for authentication.
type: str
|
port:
description:
- SMTP server port.
type: int
reply_to:
description:
- Reply-To email address.
type: str
security:
|
description:
- Connection security used by the email server.
type: str
choices:
- none
- starttls
- smtps
server:
description:
- SMTP server IP address or hostname.
type: str
source_ip:
description:
- SMTP server IPv4 source IP.
type: str
source_ip6:
description:
- SMTP server IPv6 source IP.
type: str
ssl_min_proto_version:
description:
- Minimum supported protocol version for SSL/TLS connections (default is to follow system global setting).
type: str
choices:
- default
- SSLv3
- TLSv1
- TLSv1-1
- TLSv1-2
type:
description:
- Use FortiGuard Message service or custom email server.
type: str
choices:
- custom
username:
description:
- SMTP server user name for authentication.
type: str
validate_server:
description:
- Enable/disable validation of server certificate.
type: str
choices:
- enable
- disable
'''
EXAMPLES = '''
- hosts: localhost
vars:
host: "192.168.122.40"
username: "admin"
password: ""
vdom: "root"
ssl_verify: "False"
tasks:
- name: Configure the email server used by the FortiGate various things. For example, for sending email messages to users to support user authentication
features.
fortios_system_email_server:
host: "{{ host }}"
username: "{{ username }}"
password: "{{ password }}"
vdom: "{{ vdom }}"
https: "False"
system_email_server:
authenticate: "enable"
password: "<your_own_value>"
port: "5"
reply_to: "<your_own_value>"
security: "none"
server: "192.168.100.40"
source_ip: "84.230.14.43"
source_ip6: "<your_own_value>"
ssl_min_proto_version: "default"
type: "custom"
username: "<your_own_value>"
validate_server: "enable"
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "id"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
from ansible.module_utils.network.fortimanager.common import FAIL_SOCKET_MSG
def login(data, fos):
host = data['host']
username = data['username']
password = data['password']
ssl_verify = data['ssl_verify']
fos.debug('on')
if 'https' in data and not data['https']:
fos.https('off')
else:
fos.https('on')
fos.login(host, username, password, verify=ssl_verify)
def filter_system_email_server_data(json):
option_list = ['authenticate', 'password', 'port',
'reply_to', 'security', 'server',
'source_ip', 'source_ip6', 'ssl_min_proto_versi
|
em92/quakelive-local-ratings
|
qllr/blueprints/player/methods.py
|
Python
|
agpl-3.0
| 6,371 | 0.000942 |
import json
from functools import reduce
from typing import Optional
from asyncpg import Connection
from qllr.common import DATETIME_FORMAT, clean_name, convert_timestamp_to_tuple
from qllr.db import cache
from qllr.exceptions import MatchNotFound, PlayerNotFound
from qllr.settings import MOVING_AVG_COUNT
async def get_player_info_mod_date(
con: Connection, steam_id: int, gametype_id: Optional[int] = None
):
query = """
SELECT MAX(last_played_timestamp)
FROM gametype_ratings
WHERE steam_id = $1
"""
params = [steam_id]
if gametype_id is not None:
query += " AND gametype_id = $2"
params.append(gametype_id)
return convert_timestamp_to_tuple(await con.fetchval(query, *params))
async def get_player_info(con: Connection, steam_id: int):
await con.set_type_codec(
"json", encoder=json.dumps, decoder=json.loads, schema="pg_catalog"
)
def choose_rating_values(item: dict):
if cache.USE_AVG_PERF[item["gametype_short"]]:
item["rating"] = item["r2_value"]
item["rating_d"] = 0
else:
item["rating"] = item["r1_mean"]
item["rating_d"] = item["r1_deviation"]
del item["r1_mean"]
del item["r1_deviation"]
del item["r2_value"]
return item
# player name, rating and games played
query = """
SELECT json_build_object(
'name', p.name,
'ratings', COALESCE(t.ratings, '{ }')
)
FROM players p
LEFT JOIN (
SELECT gr.steam_id, array_agg( json_build_object(
'r1_mean', CAST( ROUND( CAST(gr.r1_mean AS NUMERIC), 2) AS REAL ),
'r1_deviation', CAST( ROUND( CAST(gr.r1_deviation AS NUMERIC), 2) AS REAL ),
'r2_value', CAST( ROUND( CAST(gr.r2_value AS NUMERIC), 2) AS REAL ),
'n', gr.n,
'gametype_short', g.gametype_short,
'gametype', g.gametype_name
) ORDER by gr.n DESC ) AS ratings
FROM gametype_ratings gr
LEFT JOIN gametypes g ON g.gametype_id = gr.gametype_id
WHERE gr.steam_id = $1
GROUP BY gr.steam_id
) t ON p.steam_id = t.stea
|
m_id
WHERE p.steam_id = $1
"""
result = await con.fetchval(query, steam_id)
if result is None:
raise PlayerNotFound(steam_id)
result["ratings"] = list(map(choose_rating_values, result["ratings"]))
# weapon stats (frags + acc)
query = """
SELECT array_agg(json_build_object(
'name', w.weapon_name,
'short', w.weapon_short,
'frags', t2.frags,
'acc', t.accuracy
) ORDER BY t.weapon_id ASC)
FROM (
SELECT
weap
|
on_id,
CASE
WHEN SUM(shots) = 0 THEN 0
ELSE CAST(100. * SUM(hits) / SUM(shots) AS INT)
END AS accuracy
FROM (
SELECT weapon_id, frags, hits, shots
FROM scoreboards_weapons sw
LEFT JOIN ( -- TODO: need to change from LEFT JOIN to WHERE match_id IN
SELECT m.match_id
FROM matches m
LEFT JOIN scoreboards s ON s.match_id = m.match_id
WHERE steam_id = $2
ORDER BY timestamp DESC LIMIT $1
) m ON m.match_id = sw.match_id
WHERE sw.steam_id = $2
) sw
GROUP BY weapon_id
) t
LEFT JOIN weapons w ON t.weapon_id = w.weapon_id
LEFT JOIN (
SELECT
weapon_id,
SUM(frags) AS frags
FROM scoreboards_weapons sw
WHERE steam_id = $2
GROUP BY weapon_id
) t2 ON t2.weapon_id = t.weapon_id
"""
# TODO: cover case, where weapon_status is empty array
result["weapon_stats"] = await con.fetchval(query, MOVING_AVG_COUNT, steam_id) or []
# fav map
query = """
SELECT map_name
FROM (
SELECT map_id, COUNT(*) AS n
FROM matches m
WHERE match_id IN (SELECT match_id FROM scoreboards WHERE steam_id = $1)
GROUP BY map_id
) t
LEFT JOIN maps ON maps.map_id = t.map_id
ORDER BY n DESC, maps.map_id ASC
LIMIT 1
"""
row = await con.fetchval(query, steam_id)
fav_map = "None"
if row is not None:
fav_map = row
fav_gt = "None"
if len(result["ratings"]) > 0:
fav_gt = result["ratings"][0]["gametype"]
result["fav"] = {
"map": fav_map,
"gt": fav_gt,
"wpn": reduce(
lambda sum, x: sum if sum["frags"] > x["frags"] else x,
result["weapon_stats"],
{"frags": 0, "name": "None"},
)["name"],
}
# 10 last matches
query = """
SELECT
array_agg(json_build_object(
'match_id', m.match_id,
'datetime', to_char(to_timestamp(timestamp), '{DATETIME_FORMAT}'),
'timestamp', timestamp,
'gametype', g.gametype_short,
'result', CASE
WHEN m.team1_score > m.team2_score AND m.team = 1 THEN 'Win'
WHEN m.team1_score < m.team2_score AND m.team = 2 THEN 'Win'
ELSE 'Loss'
END,
'team1_score', m.team1_score,
'team2_score', m.team2_score,
'map', mm.map_name
) ORDER BY timestamp DESC) AS matches
FROM(
SELECT s.steam_id, s.team, m.*
FROM scoreboards s
LEFT JOIN matches m ON s.match_id = m.match_id
WHERE s.steam_id = $1
ORDER BY timestamp DESC
LIMIT 10
) m
LEFT JOIN gametypes g ON g.gametype_id = m.gametype_id
LEFT JOIN maps mm ON mm.map_id = m.map_id
""".format(
DATETIME_FORMAT=DATETIME_FORMAT
)
result["matches"] = await con.fetchval(query, steam_id)
return {"response": result, "title": clean_name(result["name"])}
async def get_best_match_of_player(
con: Connection, steam_id: int, gametype_id: int
) -> str:
query = """
SELECT s.match_id::text
FROM scoreboards s
WHERE match_id IN (
SELECT match_id
FROM matches
WHERE gametype_id = $1
) AND
match_perf IS NOT NULL AND
alive_time >= 1200 AND
steam_id = $2
ORDER BY match_perf DESC
LIMIT 1
"""
result = await con.fetchval(query, gametype_id, steam_id)
if result is None:
raise MatchNotFound("could not detect player's best match")
return result
|
pombredanne/logbook
|
logbook/notifiers.py
|
Python
|
bsd-3-clause
| 11,853 | 0.000169 |
# -*- coding: utf-8 -*-
"""
logbook.notifiers
~~~~~~~~~~~~~~~~~
System notify handlers for OSX and Linux.
:copyright: (c) 2010 by Armin Ronacher, Christopher Grebs.
:license: BSD, see LICENSE for more details.
"""
import os
import sys
import base64
from time import time
from logbook.base import NOTSET, ERROR, WARNING
from logbook.handlers import Handler, LimitingHandlerMixin
from logbook.helpers import get_application_name, PY2, http_client, u
if PY2:
from urllib import urlencode
else:
from urllib.parse import urlencode
def create_notification_handler(application_name=None, level=NOTSET,
icon=None):
"""Creates a handler perfectly fit the current platform. On Linux
systems this creates a :class:`LibNotifyHandler`, on OS X systems it
will create a :class:`GrowlHandler`.
"""
if sys.platform == 'darwin':
return GrowlHandler(application_name, level=level, icon=icon)
return LibNotifyHandler(application_name, level=level, icon=icon)
class NotificationBaseHandler(Handler, LimitingHandlerMixin):
"""Baseclass for notification handlers."""
def __init__(self, application_name=None, record_limit=None,
record_delta=None, level=NOTSET, filter=None, bubble=False):
Handler.__init__(self, level, filter, bubble)
LimitingHandlerMixin.__init__(self, record_limit, record_delta)
if application_name is None:
application_name = get_application_name()
self.application_name = application_name
def make_title(self, record):
"""Called to get the title from the record."""
return u('%s: %s') % (record.channel, record.level_name.title())
def make_text(self, record):
"""Called to get the text of the record."""
return record.message
class GrowlHandler(NotificationBaseHandler):
"""A handler that dispatches to Growl. Requires that either growl-py or
py-Growl are installed.
"""
def __init__(self, application_name=None, icon=None, host=None,
password=None, record_limit=None, record_delta=None,
level=NOTSET, filter=None, bubble=False):
NotificationBaseHandler.__init__(self, application_name, record_limit,
record_delta, level, filter, bubble)
# growl is using the deprecated md5 module, but we really don't need
# to see that deprecation warning
from warnings import filterwarnings
filterwarnings(module='Growl', category=DeprecationWarning,
action='ignore')
try:
import Growl
self._growl = Growl
except ImportError:
raise RuntimeError('The growl module is not available. You have '
'to install either growl-py or py-Growl to '
'use the GrowlHandler.')
if icon is not None:
if not os.path.isfile(icon):
raise IOError('Filename to an icon expected.')
icon = self._growl.Image.imageFromPath(icon)
else:
try:
icon = self._growl.Image.imageWithIconForCurrentApplication()
except TypeError:
icon = None
self._notifier = self._growl.GrowlNotifier(
applicationName=self.application_name,
applicationIcon=icon,
notifications=['Notset', 'Debug', 'Info', 'Notice', 'Warning',
'Error', 'Critical'],
hostname=host,
password=password
)
self._notifier.register()
def is_sticky(self, record):
"""Returns `True` if the sticky flag should be set for this record.
The default implementation marks errors and criticals sticky.
"""
return record.level >= ER
|
ROR
def get_priority(self, record):
"""Returns the priority flag for Gr
|
owl. Errors and criticals are
get highest priority (2), warnings get higher priority (1) and the
rest gets 0. Growl allows values between -2 and 2.
"""
if record.level >= ERROR:
return 2
elif record.level == WARNING:
return 1
return 0
def emit(self, record):
if not self.check_delivery(record)[1]:
return
self._notifier.notify(record.level_name.title(),
self.make_title(record),
self.make_text(record),
sticky=self.is_sticky(record),
priority=self.get_priority(record))
class LibNotifyHandler(NotificationBaseHandler):
"""A handler that dispatches to libnotify. Requires pynotify installed.
If `no_init` is set to `True` the initialization of libnotify is skipped.
"""
def __init__(self, application_name=None, icon=None, no_init=False,
record_limit=None, record_delta=None, level=NOTSET,
filter=None, bubble=False):
NotificationBaseHandler.__init__(self, application_name, record_limit,
record_delta, level, filter, bubble)
try:
import pynotify
self._pynotify = pynotify
except ImportError:
raise RuntimeError('The pynotify library is required for '
'the LibNotifyHandler.')
self.icon = icon
if not no_init:
pynotify.init(self.application_name)
def set_notifier_icon(self, notifier, icon):
"""Used to attach an icon on a notifier object."""
try:
from gtk import gdk
except ImportError:
# TODO: raise a warning?
raise RuntimeError('The gtk.gdk module is required to set an icon.')
if icon is not None:
if not isinstance(icon, gdk.Pixbuf):
icon = gdk.pixbuf_new_from_file(icon)
notifier.set_icon_from_pixbuf(icon)
def get_expires(self, record):
"""Returns either EXPIRES_DEFAULT or EXPIRES_NEVER for this record.
The default implementation marks errors and criticals as EXPIRES_NEVER.
"""
pn = self._pynotify
return pn.EXPIRES_NEVER if record.level >= ERROR else pn.EXPIRES_DEFAULT
def get_urgency(self, record):
"""Returns the urgency flag for pynotify. Errors and criticals are
get highest urgency (CRITICAL), warnings get higher priority (NORMAL)
and the rest gets LOW.
"""
pn = self._pynotify
if record.level >= ERROR:
return pn.URGENCY_CRITICAL
elif record.level == WARNING:
return pn.URGENCY_NORMAL
return pn.URGENCY_LOW
def emit(self, record):
if not self.check_delivery(record)[1]:
return
notifier = self._pynotify.Notification(self.make_title(record),
self.make_text(record))
notifier.set_urgency(self.get_urgency(record))
notifier.set_timeout(self.get_expires(record))
self.set_notifier_icon(notifier, self.icon)
notifier.show()
class BoxcarHandler(NotificationBaseHandler):
"""Sends notifications to boxcar.io. Can be forwarded to your iPhone or
other compatible device.
"""
api_url = 'https://boxcar.io/notifications/'
def __init__(self, email, password, record_limit=None, record_delta=None,
level=NOTSET, filter=None, bubble=False):
NotificationBaseHandler.__init__(self, None, record_limit,
record_delta, level, filter, bubble)
self.email = email
self.password = password
def get_screen_name(self, record):
"""Returns the value of the screen name field."""
return record.level_name.title()
def emit(self, record):
if not self.check_delivery(record)[1]:
return
body = urlencode({
'notification[from_screen_name]':
self.get_screen_name(record).encode('utf-8'),
'notification[message]':
|
SoftwareEngineeringToolDemos/ICSE-2011-Checker-Framework
|
release/release_push.py
|
Python
|
gpl-2.0
| 24,373 | 0.020843 |
#!/usr/bin/env python
# encoding: utf-8
"""
release_push.py
Created by Jonathan Burke on 2013-12-30.
Copyright (c) 2015 University of Washington. All rights reserved.
"""
#See README-maintainers.html for more information
from release_vars import *
from release_utils import *
from sanity_checks import *
import urllib
import zipfile
#ensure that the latest built version is
def check_release_version( previous_release, new_release ):
if version_to_integer( previous_release ) >= version_to_integer( new_release ):
raise Exception( "Previous release version ( " + previous_release + " ) should be less than " +
"the new release version( " + new_release + " )" )
def copy_release_dir( path_to_dev, path_to_live, release_version ):
source_location = os.path.join( path_to_dev, release_version )
dest_location = os.path.join( path_to_live, release_version )
if os.path.exists( dest_location ):
prompt_to_delete( dest_location )
if os.path.exists( dest_location ):
raise Exception( "Destination location exists: " + dest_location )
cmd = "cp -r %s %s" % ( source_location, dest_location )
execute( cmd )
return dest_location
def copy_htaccess():
execute("cp %s %s" % (DEV_HTACCESS, LIVE_HTACCESS))
ensure_group_access(LIVE_HTACCESS)
def copy_releases_to_live_site( checker_version, afu_version):
copy_release_dir( JSR308_INTERM_RELEASES_DIR, JSR308_LIVE_RELEASES_DIR, checker_version )
copy_release_dir( CHECKER_INTERM_RELEASES_DIR, CHECKER_LIVE_RELEASES_DIR, checker_version )
copy_release_dir( AFU_INTERM_RELEASES_DIR, AFU_LIVE_RELEASES_DIR, afu_version )
def update_release_symlinks( checker_version, afu_version ):
afu_latest_release_dir = os.path.join( AFU_LIVE_RELEASES_DIR, afu_version )
checker_latest_release_dir = os.path.join( CHECKER_LIVE_RELEASES_DIR, checker_version )
force_symlink( os.path.join( JSR308_LIVE_RELEASES_DIR, checker_version ), os.path.join( JSR308_LIVE_SITE, "current" ) )
force_symlink( checker_latest_release_dir, os.path.join( CHECKER_LIVE_SITE, "current" ) )
force_symlink( afu_latest_release_dir, os.path.join( AFU_LIVE_SITE, "current" ) )
#After the copy operations the index.htmls will point into the dev directory
force_symlink( os.path.join( afu_latest_release_dir, "annotation-file-utilities.html" ), os.path.join( afu_latest_release_dir, "index.html" ) )
force_symlink( os.path.join( checker_latest_release_dir, "checker-framework-webpage.html" ), os.path.join( checker_latest_release_dir, "index.html" ) )
def ensure_group_access_to_releases():
ensure_group_access( JSR308_LIVE_RELEASES_DIR )
ensure_group_access( AFU_LIVE_RELEASES_DIR )
ensure_group_access( CHECKER_LIVE_RELEASES_DIR )
def push_maven_artifacts_to_release_repo( version ):
mvn_deploy_mvn_plugin( MAVEN_PLUGIN_DIR, MAVEN_PLUGIN_POM, version, MAVEN_LIVE_REPO )
# Deploy jsr308 and checker-qual jars to maven repo
mvn_deploy( CHECKER_BINARY, CHECKER_BINARY_POM, MAVEN_LIVE_REPO )
mvn_deploy( CHECKER_QUAL, CHECKER_QUAL_POM, MAVEN_LIVE_REPO )
mvn_deploy( JAVAC_BINARY, JAVAC_BINARY_POM, MAVEN_LIVE_REPO )
mvn_deploy( JDK7_BINARY, JDK7_BINARY_POM, MAVEN_LIVE_REPO )
mvn_deploy( JDK8_BINARY, JDK8_BINARY_POM, MAVEN_LIVE_REPO )
def stage_maven_artifacts_in_maven_central( new_checker_version ):
pgp_user="checker-framework-dev@googlegroups.com"
pgp_passphrase = read_first_line( PGP_PASSPHRASE_FILE )
mvn_dist = os.path.join(MAVEN_PLUGIN_DIR, "dist" )
execute( "mkdir -p " + mvn_dist )
#build Jar files with only readmes for artifacts that don't have sources/javadocs
ant_cmd = "ant -f release.xml -Ddest.dir=%s -Dmaven.plugin.dir=%s jar-maven-extras" % (mvn_dist, MAVEN_PLUGIN_DIR)
execute(ant_cmd, True, False, CHECKER_FRAMEWORK_RELEASE)
#At the moment, checker.jar is the only artifact with legitimate accompanying source/javadoc jars
mvn_sign_and_deploy_all( SONATYPE_OSS_URL, SONATYPE_STAGING_REPO_ID, CHECKER_BINARY_RELEASE_POM, CHECKER_BINARY,
CHECKER_SOURCE, CHECKER_JAVADOC,
pgp_user, pgp_passphrase )
mvn_sign_and_deploy_all( SONATYPE_OSS_URL, SONATYPE_STAGING_REPO_ID, CHECKER_QUAL_RELEASE_POM, CHECKER_QUAL,
os.path.join(MAVEN_RELEASE_DIR, mvn_dist, "checker-qual-source.jar" ),
os.path.join(MAVEN_RELEASE_DIR, mvn_dist, "checker-qual-javadoc.jar" ),
pgp_user, pgp_passphrase )
mvn_sign_and_deploy_all( SONATYPE_OSS_URL, SONATYPE_STAGING_REPO_ID, CHECKER_COMPAT_QUAL_RELEASE_POM,
CHECKER_COMPAT_QUAL,
os.path.join(MAVEN_RELEASE_DIR, mvn_dist, "checker-compat-qual-source.jar" ),
os.path.join(MAVEN_RELEASE_DIR, mvn_dist, "checker-compat-qual-javadoc.jar" ),
pgp_user, pgp_passphrase )
mvn_sign_and_deploy_all( SONATYPE_OSS_URL, SONATYPE_STAGING_REPO_ID, JAVAC_BINARY_RELEASE_POM, JAVAC_BINARY,
os.path.join(MAVEN_RELEASE_DIR, mvn_dist, "compiler-source.jar" ),
os.path.join(MAVEN_RELEASE_DIR, mvn_dist, "compiler-javadoc.j
|
ar" ),
pgp_user, pgp_passphrase )
mvn_sign_and_deploy_all( SONATYPE_OSS_URL, SONATYPE_STAGING_REPO_ID, JDK7_BINARY_RELEASE_POM, JDK7_BINARY,
os.path.join(MAVEN_RELEASE_DIR, mvn_dist, "jdk7-source.jar" ),
os.path.join(MAVEN_RELEASE_DIR, mvn_dist, "jdk7-javadoc.jar" ),
pgp_user, pgp_passphrase )
mvn_sign_and_dep
|
loy_all( SONATYPE_OSS_URL, SONATYPE_STAGING_REPO_ID, JDK8_BINARY_RELEASE_POM, JDK8_BINARY,
os.path.join(MAVEN_RELEASE_DIR, mvn_dist, "jdk8-source.jar" ),
os.path.join(MAVEN_RELEASE_DIR, mvn_dist, "jdk8-javadoc.jar" ),
pgp_user, pgp_passphrase )
mvn_sign_and_deploy_all( SONATYPE_OSS_URL, SONATYPE_STAGING_REPO_ID, JAVACUTIL_BINARY_RELEASE_POM, JAVACUTIL_BINARY,
JAVACUTIL_SOURCE_JAR, JAVACUTIL_JAVADOC_JAR,
pgp_user, pgp_passphrase )
mvn_sign_and_deploy_all( SONATYPE_OSS_URL, SONATYPE_STAGING_REPO_ID, DATAFLOW_BINARY_RELEASE_POM, DATAFLOW_BINARY,
DATAFLOW_SOURCE_JAR, DATAFLOW_JAVADOC_JAR,
pgp_user, pgp_passphrase )
plugin_jar = find_mvn_plugin_jar( MAVEN_PLUGIN_DIR, new_checker_version )
plugin_source_jar = find_mvn_plugin_jar( MAVEN_PLUGIN_DIR, new_checker_version, "sources" )
plugin_javadoc_jar = os.path.join( MAVEN_RELEASE_DIR, mvn_dist, "checkerframework-maven-plugin-javadoc.jar" )
mvn_sign_and_deploy_all( SONATYPE_OSS_URL, SONATYPE_STAGING_REPO_ID, MAVEN_PLUGIN_RELEASE_POM, plugin_jar,
plugin_source_jar, plugin_javadoc_jar, pgp_user, pgp_passphrase )
delete_path( mvn_dist )
def run_link_checker( site, output ):
check_links_script = os.path.join(CHECKER_FRAMEWORK_RELEASE, "checkLinks.sh")
cmd = ["sh", check_links_script, site]
out_file = open( output, 'w+' )
print("Executing: " + " ".join(cmd) )
process = subprocess.Popen(cmd, stdout=out_file, stderr=out_file)
process.communicate()
process.wait()
out_file.close()
if process.returncode != 0:
raise Exception('Non-zero return code( %s ) while executing %s' % (process.returncode, cmd))
return output
def check_all_links( jsr308_website, afu_website, checker_website, suffix ):
jsr308Check = run_link_checker( jsr308_website, "/tmp/jsr308." + suffix + ".check" )
afuCheck = run_link_checker( afu_website, "/tmp/afu." + suffix + ".check" )
checkerCheck = run_link_checker( checker_website, "/tmp/checker-framework." + suffix + ".check" )
print( "Link checker results can be found at:\n" +
"\t" + jsr308Check + "\n" +
"\t" + afuCheck + "\n" +
"\
|
qedsoftware/commcare-hq
|
corehq/apps/export/tests/test_table_configuration.py
|
Python
|
bsd-3-clause
| 10,403 | 0.000481 |
from django.test import SimpleTestCase
from corehq.apps.export.const import USERNAME_TRANSFORM
from corehq.apps.export.models import (
DocRow,
RowNumberColumn,
PathNode,
ExportRow,
ScalarItem,
ExportColumn,
TableConfiguration,
)
class TableConfigurationTest(SimpleTestCase):
def test_get_column(self):
table_configuration = TableConfiguration(
path=[PathNode(name='form', is_repeat=False), PathNode(name="repeat1", is_repeat=True)],
columns=[
ExportColumn(
item=ScalarItem(
path=[
PathNode(name='form'),
PathNode(name='repeat1', is_repeat=True),
PathNode(name='q1')
],
)
),
ExportColumn(
item=ScalarItem(
path=[
PathNode(name="form"),
PathNode(name="user_id"),
],
transform=USERNAME_TRANSFORM
)
),
ExportColumn(
item=ScalarItem(
path=[
PathNode(name='form'),
PathNode(name='repeat1', is_repeat=True),
PathNode(name='q2')
],
)
),
]
)
index, column = table_configuration.get_column(
[
PathNode(name='form'),
PathNode(name='repeat1', is_repeat=True),
PathNode(name='q1')
],
'ScalarItem',
None,
)
self.assertEqual(
column.item.path,
[
PathNode(name='form'),
PathNode(name='repeat1', is_repeat=True),
PathNode(name='q1')
]
)
self.assertEqual(index, 0)
index, column = table_configuration.get_column(
[
PathNode(name='form'),
PathNode(name='repeat1', is_repeat=True),
PathNode(name='DoesNotExist')
],
'ScalarItem',
None,
)
self.assertIsNone(column)
# Verify that get_column ignores deid transforms
index, column = table_configuration.get_column(
[PathNode(name="form"), PathNode(name="user_id")],
'ScalarItem',
USERNAME_TRANSFORM
)
self.assertIsNotNone(column)
self.assertEqual(index, 1)
class TableConfigurationGetSubDocumentsTest(SimpleTestCase):
def test_basic(self):
table = Tabl
|
eConfiguration(path=[])
self.assertEqual(
table._get_sub_documents(
{'foo': 'a'},
0
),
|
[
DocRow(row=(0,), doc={'foo': 'a'})
]
)
def test_simple_repeat(self):
table = TableConfiguration(
path=[PathNode(name="foo", is_repeat=True)]
)
self.assertEqual(
table._get_sub_documents(
{
'foo': [
{'bar': 'a'},
{'bar': 'b'},
]
},
0
),
[
DocRow(row=(0, 0), doc={'bar': 'a'}),
DocRow(row=(0, 1), doc={'bar': 'b'})
]
)
def test_nested_repeat(self):
table = TableConfiguration(
path=[PathNode(name='foo', is_repeat=True), PathNode(name='bar', is_repeat=True)],
)
self.assertEqual(
table._get_sub_documents(
{
'foo': [
{
'bar': [
{'baz': 'a'},
{'baz': 'b'}
],
},
{
'bar': [
{'baz': 'c'}
],
},
],
},
0
),
[
DocRow(row=(0, 0, 0), doc={'baz': 'a'}),
DocRow(row=(0, 0, 1), doc={'baz': 'b'}),
DocRow(row=(0, 1, 0), doc={'baz': 'c'}),
]
)
def test_single_iteration_repeat(self):
table = TableConfiguration(
path=[PathNode(name='group1', is_repeat=False), PathNode(name='repeat1', is_repeat=True)],
)
self.assertEqual(
table._get_sub_documents(
{
'group1': {
'repeat1': {
'baz': 'a'
},
}
},
0
),
[
DocRow(row=(0, 0), doc={'baz': 'a'}),
]
)
class TableConfigurationGetRowsTest(SimpleTestCase):
def test_simple(self):
table_configuration = TableConfiguration(
path=[],
columns=[
ExportColumn(
item=ScalarItem(
path=[PathNode(name='form'), PathNode(name='q3')],
),
selected=True,
),
ExportColumn(
item=ScalarItem(
path=[PathNode(name='form'), PathNode(name='q1')],
),
selected=True,
),
ExportColumn(
item=ScalarItem(
path=[PathNode(name='form'), PathNode(name='q2')],
),
selected=False,
),
]
)
submission = {
'domain': 'my-domain',
'_id': '1234',
"form": {
"q1": "foo",
"q2": "bar",
"q3": "baz"
}
}
self.assertEqual(
[row.data for row in table_configuration.get_rows(submission, 0)],
[['baz', 'foo']]
)
def test_repeat(self):
table_configuration = TableConfiguration(
path=[PathNode(name="form", is_repeat=False), PathNode(name="repeat1", is_repeat=True)],
columns=[
ExportColumn(
item=ScalarItem(
path=[
PathNode(name="form"),
PathNode(name="repeat1", is_repeat=True),
PathNode(name="q1")
],
),
selected=True,
),
]
)
submission = {
'domain': 'my-domain',
'_id': '1234',
'form': {
'repeat1': [
{'q1': 'foo'},
{'q1': 'bar'}
]
}
}
self.assertEqual(
[row.data for row in table_configuration.get_rows(submission, 0)],
[ExportRow(['foo']).data, ExportRow(['bar']).data]
)
def test_double_repeat(self):
table_configuration = TableConfiguration(
path=[
PathNode(name="form", is_repeat=False),
PathNode(name="repeat1", is_repeat=True),
PathNode(name="group1", is_repeat=False),
PathNode(name="repeat2", is_repeat=True),
],
columns=[
RowNumberColumn(
selected=True
),
ExportColumn(
item=ScalarItem(
path=[
PathNode(name='form'),
PathNode(name='repeat1', is_repeat=True),
PathNode(name='group1'),
PathNode(name='repeat2', is_repeat=True),
|
somini/gpodder
|
src/gpodder/query.py
|
Python
|
gpl-3.0
| 5,258 | 0.001331 |
# -*- coding: utf-8 -*-
#
# gPodder - A media aggregator and podcast client
# Copyright (c) 2005-2014 Thomas Perl and the gPodder Team
#
# gPodder is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# gPodder is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
# gpodder.query - Episode Query Language (EQL) implementation (2010-11-29)
#
import gpodder
import re
import datetime
class Matcher(object):
"""Match implementation for EQL
This class implements the low-level matching of
EQL statements against episode objects.
"""
def __init__(self, episode):
self._episode = episode
def match(self, term):
try:
return bool(eval(term, {'__builtins__': None}, self))
except Exception, e:
print e
return False
def __getitem__(self, k):
episode = self._episode
# Adjectives (for direct usage)
if k == 'new':
return (episode.state == gpodder.STATE_NORMAL and episode.is_new)
elif k in ('downloaded', 'dl'):
return episode.was_downloaded(and_exists=True)
elif k in ('deleted', 'rm'):
return episode.state == gpodder.STATE_DELETED
elif k == 'played':
return not episode.is_new
elif k == 'downloading':
return episode.downloading
elif k == 'archive':
return episode.archive
elif k in ('finished', 'fin'):
return episode.is_finished()
elif k in ('video', 'audio'):
return episode.file_type() == k
elif k == 'torrent':
return episode.url.endswith('.torrent') or 'torrent' in episode.mime_type
# Nouns (for comparisons)
if k in ('megabytes', 'mb'):
return float(episode.file_size) / (1024*1024)
elif k == 'title':
return episode.title
elif k == 'description':
return episode.description
elif k == 'since':
return (datetime.datetime.now() - datetime.datetime.fromtimestamp(episode.published)).days
elif k == 'age':
return episode.age_in_days()
elif k in ('minutes', 'min'):
return float(episode.total_time) / 60
elif k in ('remaining', 'rem'):
return float(episode.total_time - episode.current_position) / 60
raise KeyError(k)
class EQL(object):
"""A Query in EQL
Objects of this class represent a query on episodes
using EQL. Example usage:
>>> q = EQL('downloaded and megabytes > 10')
>>> q.filter(channel.get_all_episodes())
>>> EQL('new and video').match(episode)
Regular expression queries are also supported:
>>> q = EQL('/^The.*/')
>>> q = EQL('/community/i')
Normal string matches are also supported:
>>> q = EQL('"S04"')
>>> q = EQL("'linux'")
Normal EQL queries cannot be mixed with RegEx
or string matching yet, so this does NOT work:
>>> EQL('downloaded and /The.*/i')
"""
def __init__(self, query):
self._query = query
self._flags = 0
self._regex = False
self._string = False
# Regular expression based query
match = re.match(r'^/(.*)/(i?)$', query)
if match is not None:
self._regex = True
self._query, flags = match.groups()
if flags == 'i':
self._flags |= re.I
# String based query
match = re.match("^([\"'])(.*)(\\1)$", query)
if match is not None:
self._string = True
a, query, b = match.groups()
self._query = query.lower()
# For everything else, compile the expression
if not self._regex and not self._string:
try:
self._query = compile(query, '<eql-string>', 'eval')
except Exception, e:
print e
self._query = None
def match(self, episode):
if self._query is None:
return False
if self._regex:
return re.search(self._query, episode.title, self._flags) is not None
elif self._string:
return self._query in episode.title.lower() or self._query in episode.description.lower()
return Matcher(episode).match(self._qu
|
ery)
def filter(self, episodes):
return filter(self.match, episodes)
def UserEQL(query):
"""EQL wrapper for user input
Automatically adds missing quotes around a
non-EQL string for us
|
er-based input. In this
case, EQL queries need to be enclosed in ().
"""
if query is None:
return None
if query == '' or (query and query[0] not in "(/'\""):
return EQL("'%s'" % query)
else:
return EQL(query)
|
trun/redux
|
redux/internal/scheduler.py
|
Python
|
mit
| 5,911 | 0.00203 |
from greenlet import greenlet
import os
from sandbox import Sandbox, SandboxConfig
from redux.internal.exceptions import RobotDeathException
import traceback
# this is just for testing
LOADER_CODE_TEST = """
import time
while True:
try:
print 'running... %s' % str(rc)
time.sleep(1)
print 'done'
defer()
except Exception:
print 'caught exception... ignoring'
"""
# This will be the real player execution code
LOADER_CODE = """
from {team}.player import RobotPlayer
player = RobotPlayer(rc)
player.run()
"""
# TODO paramaterize
BYTECODE_LIMIT = 8000
def passthrough(thread, throw=False):
"""A one shot greenlet that simply resumes an existing greenlet and then
returns. This allows greenlets to be resumed without a presistent parent.
"""
def _run():
retval = thread.resume(throw)
if retval is None:
raise Exception('robot run method returned')
return retval
g = greenlet(run=_run)
thread.parent = g
return g.switch()
class Scheduler():
_instance = None
def __init__(self, game_world):
self.game_world = game_world
self.current_thread = None
self.threads = {}
self.threads_to_kill = set()
@classmethod
def create(cls, game_world=None):
cls._instance = Scheduler(game_world)
@classmethod
def destroy(cls):
cls._instance = None
@classmethod
def instance(cls):
return cls._instance
def spawn_thread(self, robot):
"""Spawn a new player"""
player = Player(RobotController(robot, self.game_world).interface())
thread = PlayerThread(player)
self.threads[robot.id] = thread
def run_thread(self, id):
"""Run a player thread for the given robot id"""
print '[SCHEDULER] running thread', id
self.current_thread = self.threads.get(id)
assert not self.current_thread is None, 'null thread?'
# check if the robot is scheduled to be killed
throw = id in self.threads_to_kill
# check if the robot is over the bytecode limit
if self.get_bytecode_left() < 0 and not throw:
self.current_thread.bytecode_used -= BYTECODE_LIMIT
return
# resume robot execution
try:
passthrough(self.current_thread.player, throw)
except Exception as e:
if not isinstance(e, RobotDeathException):
traceback.print_exc()
del self.threads[id]
self.current_thread = None
def end_thread(self):
self.current_thread.bytecode_used -= min(8000, self.current_thread.bytecode_used)
self.current_thread.player.pause()
def current_robot(self):
return self.current_thread.player.robot_controller.robot
def kill_robot(self, id):
self.threads_to_kill.add(id)
def increment_bytecode(self, amt):
assert amt >= 0, 'negative bytecode increments not allowed'
self.current_thread.bytecode_used += amt
if self.current_thread.bytecode_used > BYTECODE_LIMIT:
self.end_thread()
def get_bytecode_left(self):
return BYTECODE_LIMIT - self.current_thread.bytecode_used
def get_bytecode_used(self):
return self.current_thread.bytecode_used
class Player(greenlet):
def __init__(self, robot_controller):
super(Player, self).__init__()
self.robot_controller = robot_controller
config = SandboxConfig(use_subprocess=False)
config.enable('traceback')
config.enable('stdout')
config.enable('stderr')
config.enable('time')
# TODO need to allow *all* imports from team package
config.allowModule(robot_controller.robot.team + '.player', 'RobotPlayer')
# TODO need a better method for override the sys_path
config.sys_path = config.sys_path + (os.getcwd(),)
# add additional builtins to the config
# - increment_clock
this = self
def increment_clock(amt):
Scheduler.instance().increment_bytecode(amt)
# TODO need a better method to add builtins additions
config._builtins_additions = {
'increment_clock': increment_clock,
}
self.sandbox = Sandbox(config)
self.running = False
def resume(self, throw=False):
return self.switch(throw)
def pause(self):
# break out of the sandbox
self.sandbox.disable_protections()
# return execution to the scheduler
throw = self.parent.switch()
if throw:
raise RobotDeathException('killed by engine')
# re-enable sandbox protections
self.sandbox.enable_protections()
def run(self, *args):
statement = LOADER_CODE.format(team=self.robot_controller.robot.team)
safelocals = { 'rc': self.robot_controller }
self.running = True
self.sandbox.execute(statement, globals={}, locals=safelocals)
class PlayerThread(object):
def __init__(self, player):
self.bytecode_used = 0
self.player = player
class RobotController(object):
def __init__(self, robot, game_world):
self.robot = robot
self.game_world
|
= game_world
def yield_execution(self):
# TODO yield bonus
Scheduler.instance().end_thread()
def interface(self):
"""
|
Returns an encapsulated version of the controller that can safely be
passed to the sandboxed player code.
"""
this = self
class _interface(object):
def __init__(self):
self._robot = this.robot.interface() # TODO robot should cache its own interface
def yield_execution(self):
this.yield_execution()
@property
def robot(self):
return self._robot
return _interface()
|
Azure/azure-sdk-for-python
|
sdk/containerregistry/azure-containerregistry/tests/test_container_registry_client.py
|
Python
|
mit
| 23,434 | 0.004139 |
# coding=utf-8
# ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------
from datetime import datetime
from azure.core import credentials
import pytest
import six
import time
from azure.containerregistry import (
RepositoryProperties,
ArtifactManifestProperties,
ArtifactManifestOrder,
ArtifactTagProperties,
ArtifactTagOrder,
ContainerRegistryClient,
)
from azure.core.exceptions import ResourceNotFoundError, ClientAuthenticationError
from azure.core.paging import ItemPaged
from testcase import ContainerRegistryTestClass, get_authority
from constants import TO_BE_DELETED, HELLO_WORLD, ALPINE, BUSYBOX, DOES_NOT_EXIST
from preparer import acr_preparer
class TestContainerRegistryClient(ContainerRegistryTestClass):
@acr_preparer()
def test_list_repository_names(self, containerregistry_endpoint):
client = self.create_registry_client(containerregistry_endpoint)
repositories =
|
client.list_repository_names()
assert isinstance(repositories, ItemPaged)
count = 0
prev = None
for repo in repositories:
count += 1
assert isinstance(repo, six.string_types)
assert prev != repo
prev = repo
assert count > 0
@acr_prep
|
arer()
def test_list_repository_names_by_page(self, containerregistry_endpoint):
client = self.create_registry_client(containerregistry_endpoint)
results_per_page = 2
total_pages = 0
repository_pages = client.list_repository_names(results_per_page=results_per_page)
prev = None
for page in repository_pages.by_page():
page_count = 0
for repo in page:
assert isinstance(repo, six.string_types)
assert prev != repo
prev = repo
page_count += 1
assert page_count <= results_per_page
total_pages += 1
assert total_pages >= 1
@acr_preparer()
def test_delete_repository(self, containerregistry_endpoint, containerregistry_resource_group):
self.import_image(containerregistry_endpoint, HELLO_WORLD, [TO_BE_DELETED])
client = self.create_registry_client(containerregistry_endpoint)
client.delete_repository(TO_BE_DELETED)
for repo in client.list_repository_names():
if repo == TO_BE_DELETED:
raise ValueError("Repository not deleted")
@acr_preparer()
def test_delete_repository_does_not_exist(self, containerregistry_endpoint):
client = self.create_registry_client(containerregistry_endpoint)
client.delete_repository("not_real_repo")
@acr_preparer()
def test_get_repository_properties(self, containerregistry_endpoint):
client = self.create_registry_client(containerregistry_endpoint)
properties = client.get_repository_properties(ALPINE)
assert isinstance(properties, RepositoryProperties)
assert properties.name == ALPINE
@acr_preparer()
def test_update_repository_properties(self, containerregistry_endpoint):
repository = self.get_resource_name("repo")
tag_identifier = self.get_resource_name("tag")
self.import_image(containerregistry_endpoint, HELLO_WORLD, ["{}:{}".format(repository, tag_identifier)])
client = self.create_registry_client(containerregistry_endpoint)
properties = client.get_repository_properties(repository)
properties.can_delete = False
properties.can_read = False
properties.can_list = False
properties.can_write = False
new_properties = client.update_repository_properties(repository, properties)
assert properties.can_delete == new_properties.can_delete
assert properties.can_read == new_properties.can_read
assert properties.can_list == new_properties.can_list
assert properties.can_write == new_properties.can_write
new_properties.can_delete = True
new_properties.can_read = True
new_properties.can_list = True
new_properties.can_write = True
new_properties = client.update_repository_properties(repository, new_properties)
assert new_properties.can_delete == True
assert new_properties.can_read == True
assert new_properties.can_list == True
assert new_properties.can_write == True
@acr_preparer()
def test_update_repository_properties_kwargs(self, containerregistry_endpoint):
repo = self.get_resource_name("repo")
tag = self.get_resource_name("tag")
self.import_image(containerregistry_endpoint, HELLO_WORLD, ["{}:{}".format(repo, tag)])
client = self.create_registry_client(containerregistry_endpoint)
properties = client.get_repository_properties(repo)
properties = self.set_all_properties(properties, True)
received = client.update_repository_properties(repo, properties)
self.assert_all_properties(properties, True)
received = client.update_repository_properties(repo, can_delete=False)
assert received.can_delete == False
assert received.can_list == True
assert received.can_read == True
assert received.can_write == True
received = client.update_repository_properties(repo, can_read=False)
assert received.can_delete == False
assert received.can_list == True
assert received.can_read == False
assert received.can_write == True
received = client.update_repository_properties(repo, can_write=False)
assert received.can_delete == False
assert received.can_list == True
assert received.can_read == False
assert received.can_write == False
received = client.update_repository_properties(repo, can_list=False)
assert received.can_delete == False
assert received.can_list == False
assert received.can_read == False
assert received.can_write == False
received = client.update_repository_properties(
repo,
can_delete=True,
can_read=True,
can_write=True,
can_list=True,
)
self.assert_all_properties(received, True)
@acr_preparer()
def test_list_registry_artifacts(self, containerregistry_endpoint):
client = self.create_registry_client(containerregistry_endpoint)
count = 0
for artifact in client.list_manifest_properties(BUSYBOX):
assert isinstance(artifact, ArtifactManifestProperties)
assert isinstance(artifact.created_on, datetime)
assert isinstance(artifact.last_updated_on, datetime)
assert artifact.repository_name == BUSYBOX
assert artifact.fully_qualified_reference in self.create_fully_qualified_reference(containerregistry_endpoint, BUSYBOX, artifact.digest)
count += 1
assert count > 0
@acr_preparer()
def test_list_registry_artifacts_by_page(self, containerregistry_endpoint):
client = self.create_registry_client(containerregistry_endpoint)
results_per_page = 2
pages = client.list_manifest_properties(BUSYBOX, results_per_page=results_per_page)
page_count = 0
for page in pages.by_page():
reg_count = 0
for tag in page:
reg_count += 1
assert reg_count <= results_per_page
page_count += 1
assert page_count >= 1
@acr_preparer()
def test_list_registry_artifacts_descending(self, containerregistry_endpoint):
client = self.create_registry_client(containerregistry_endpoint)
prev_last_updated_on = None
count = 0
for artifact in client.list_manifest_properties(BUSYBOX, order_by=ArtifactManifestOrder.LAST_UPDATED_ON_DESCENDING):
if prev_last_updated_on:
assert artifact.last_updated_on < prev_last_updated_on
prev_last_updated_on = artifact.last_updated_on
count += 1
assert count > 0
prev_last_updated_
|
vandorjw/django-template-project
|
project/project_name/urls.py
|
Python
|
mit
| 1,221 | 0.005733 |
from django.conf.urls import patterns, include, url
from django.contrib.sitemaps import Sitemap
from django.views.generic import TemplateView
from django.contrib import admin
from {{ project_name }}.views import HomePageView, ContactPageView, RobotPageView, HumanPageView
from {{ project
|
_name }}.sitemap import BlogSitemap, HardCodedSitemap
admin.autodiscover()
sitemaps = {
'blog':
|
BlogSitemap,
'hardcodedpages': HardCodedSitemap,
}
urlpatterns = patterns('',
url(
regex=r"^$",
view=HomePageView.as_view(),
name="homepage",
),
url(
regex=r"^contact/$",
view=ContactPageView.as_view(),
name="contactpage",
),
url(
regex=r"^robots\.txt$",
view=RobotPageView.as_view(),
name="robots",
),
url(
regex=r"^humans\.txt$",
view=HumanPageView.as_view(),
name="humans",
),
url(r'^sitemap\.xml$', 'django.contrib.sitemaps.views.sitemap', {'sitemaps': sitemaps}),
url(r'^grappelli/', include('grappelli.urls')),
url(r'^tinymce/', include('tinymce.urls')),
url(r'^admin/', include(admin.site.urls)),
url("^blog/", include('blog.urls', namespace='blog', app_name='blog')),
)
|
CARMinesDouai/MultiRobotExplorationPackages
|
inria_demo/scripts/autodock_client.py
|
Python
|
mit
| 3,861 | 0.019684 |
#!/usr/bin/env python
# Software License Agreement (BSD License)
#
# Copyright (c) 2012, Yujin Robot
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of the Yujin Robot nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
|
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
|
ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# Author: Younghun Ju <yhju@yujinrobot.com> <yhju83@gmail.com>
import roslib; roslib.load_manifest('kobuki_auto_docking')
import rospy
import actionlib
from kobuki_msgs.msg import AutoDockingAction, AutoDockingGoal
from actionlib_msgs.msg import GoalStatus
from std_msgs.msg import Bool
def doneCb(status, result):
if 0: print ''
elif status == GoalStatus.PENDING : state='PENDING'
elif status == GoalStatus.ACTIVE : state='ACTIVE'
elif status == GoalStatus.PREEMPTED : state='PREEMPTED'
elif status == GoalStatus.SUCCEEDED : state='SUCCEEDED'
elif status == GoalStatus.ABORTED : state='ABORTED'
elif status == GoalStatus.REJECTED : state='REJECTED'
elif status == GoalStatus.PREEMPTING: state='PREEMPTING'
elif status == GoalStatus.RECALLING : state='RECALLING'
elif status == GoalStatus.RECALLED : state='RECALLED'
elif status == GoalStatus.LOST : state='LOST'
# Print state of action server
print 'Result - [ActionServer: ' + state + ']: ' + result.text
def activeCb():
if 0: print 'Action server went active.'
def feedbackCb(feedback):
# Print state of dock_drive module (or node.)
print 'Feedback: [DockDrive: ' + feedback.state + ']: ' + feedback.text
def callback(data):
if data.data == True:
dock_drive_client()
def dock_drive_client():
# add timeout setting
client = actionlib.SimpleActionClient('dock_drive_action', AutoDockingAction)
while not client.wait_for_server(rospy.Duration(5.0)):
if rospy.is_shutdown(): return
print 'Action server is not connected yet. still waiting...'
goal = AutoDockingGoal();
client.send_goal(goal, doneCb, activeCb, feedbackCb)
print 'Goal: Sent.'
rospy.on_shutdown(client.cancel_goal)
client.wait_for_result()
#print ' - status:', client.get_goal_status_text()
return client.get_result()
if __name__ == '__main__':
try:
rospy.init_node('dock_drive_client_py', anonymous=True)
topic = "/robotcmd/dock"
if rospy.has_param("~dock_active_topic"):
topic = rospy.get_param("~dock_active_topic")
rospy.Subscriber(topic,Bool, callback)
rospy.spin()
#print ''
#print "Result: ", result
except rospy.ROSInterruptException:
print "program interrupted before completion"
|
cgstudiomap/cgstudiomap
|
main/parts/web/web_widget_x2many_2d_matrix/__openerp__.py
|
Python
|
agpl-3.0
| 1,516 | 0 |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# This module copyright (C) 2015 Therp BV <http://therp.nl>.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warran
|
ty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You shou
|
ld have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name": "2D matrix for x2many fields",
"version": "1.0",
"author": "Therp BV",
"license": "AGPL-3",
"category": "Hidden/Dependency",
"summary": "Show list fields as a matrix",
"depends": [
'web',
],
"data": [
'views/templates.xml',
],
"qweb": [
'static/src/xml/web_widget_x2many_2d_matrix.xml',
],
"test": [
],
"auto_install": False,
"installable": True,
"application": False,
"external_dependencies": {
'python': [],
},
}
|
richardliaw/ray
|
rllib/agents/ars/ars_tf_policy.py
|
Python
|
apache-2.0
| 4,456 | 0 |
# Code in this file is copied and adapted from
# https://github.com/openai/evolution-strategies-starter.
import gym
import numpy as np
import tree
import ray
import ray.experimental.tf_utils
from ray.rllib.agents.es.es_tf_policy import make_session
from ray.rllib.models import ModelCatalog
from ray.rllib.policy.policy import Policy
from ray.rllib.policy.sample_batch import SampleBatch
from ray.rllib.utils.filter import get_filter
from ray.rllib.utils.framework import try_import_tf
from ray.rllib.utils.spaces.space_utils import unbatch
tf1, tf, tfv = try_import_tf()
class ARSTFPolicy(Policy):
def __init__(self, obs_space, action_space, config):
super().__init__(obs_space, action_space, config)
self.action_noise_std = self.config["action_noise_std"]
self.preprocessor = ModelCatalog.get_preprocessor_for_space(
self.observation_space)
self.observation_filter = get_filter(self.config["observation_filter"],
self.preprocessor.shape)
self.single_threaded = self.config.get("single_threaded", False)
if self.config["framework"] == "tf":
self.sess = make_session(single_threaded=self.single_threaded)
self.inputs = tf1.placeholder(
tf.float32, [None] + list(self.preproces
|
sor.shape))
else:
|
if not tf1.executing_eagerly():
tf1.enable_eager_execution()
self.sess = self.inputs = None
# Policy network.
self.dist_class, dist_dim = ModelCatalog.get_action_dist(
self.action_space, self.config["model"], dist_type="deterministic")
self.model = ModelCatalog.get_model_v2(
obs_space=self.preprocessor.observation_space,
action_space=self.action_space,
num_outputs=dist_dim,
model_config=self.config["model"])
self.sampler = None
if self.sess:
dist_inputs, _ = self.model({SampleBatch.CUR_OBS: self.inputs})
dist = self.dist_class(dist_inputs, self.model)
self.sampler = dist.sample()
self.variables = ray.experimental.tf_utils.TensorFlowVariables(
dist_inputs, self.sess)
self.sess.run(tf1.global_variables_initializer())
else:
self.variables = ray.experimental.tf_utils.TensorFlowVariables(
[], None, self.model.variables())
self.num_params = sum(
np.prod(variable.shape.as_list())
for _, variable in self.variables.variables.items())
def compute_actions(self,
observation,
add_noise=False,
update=True,
**kwargs):
# Squeeze batch dimension (we always calculate actions for only a
# single obs).
observation = observation[0]
observation = self.preprocessor.transform(observation)
observation = self.observation_filter(observation[None], update=update)
# `actions` is a list of (component) batches.
# Eager mode.
if not self.sess:
dist_inputs, _ = self.model({SampleBatch.CUR_OBS: observation})
dist = self.dist_class(dist_inputs, self.model)
actions = dist.sample()
actions = tree.map_structure(lambda a: a.numpy(), actions)
# Graph mode.
else:
actions = self.sess.run(
self.sampler, feed_dict={self.inputs: observation})
actions = unbatch(actions)
if add_noise and isinstance(self.action_space, gym.spaces.Box):
actions += np.random.randn(*actions.shape) * self.action_noise_std
return actions, [], {}
def compute_single_action(self,
observation,
add_noise=False,
update=True,
**kwargs):
action, state_outs, extra_fetches = self.compute_actions(
[observation], add_noise=add_noise, update=update, **kwargs)
return action[0], state_outs, extra_fetches
def get_state(self):
return {"state": self.get_flat_weights()}
def set_state(self, state):
return self.set_flat_weights(state["state"])
def set_flat_weights(self, x):
self.variables.set_flat(x)
def get_flat_weights(self):
return self.variables.get_flat()
|
christianurich/VIBe2UrbanSim
|
3rdparty/opus/src/waterdemand/docs/latex/build_docs.py
|
Python
|
gpl-2.0
| 1,657 | 0.007846 |
# Opus/UrbanSim urban simulation software.
# Copyright (C) 2005-2009 University of Washington
# See opus_core/LICENSE
import os
import waterdemand
basepath = waterdemand.__path__[0]
path = os.path.join(basepath, "docs", "latex")
cwd = os.getcwd()
os.chdir(path)
modules = ["userguide"]
for module in modules:
# hack - make a fake index file to prime the pump, so that latex doesn't give an error the first time it is run
# (There's probaby a better way to do this, but this works.)
index_file = file(module + ".ind", 'w')
index_file.write(r"\begin{theindex} \end{theindex}")
index_file
|
.close()
# run latex, make the index, then run latex again to resolve cross-references correctly and inclu
|
de the index
if os.system("pdflatex -interaction=nonstopmode " + module + ".tex") > 0:
raise Exception("pdflatex failed")
# The makeindex command will fail if the module doesn't have an index - so it's important NOT to check
# if the result of the system call succeeded. (The advantage of calling it anyway is that we can just
# process all of the files with a loop, rather than having separate processing for modules with and without indices.)
os.system("makeindex " + module + ".idx")
if os.system("pdflatex -interaction=nonstopmode " + module + ".tex") > 0:
raise Exception("Latex failed")
os.chdir(cwd)
# The old script called latex (rather than pdflatex), followed by dvips and ps2pdf
# - pdflatex works better that latex followed by dvips and ps2pdf for producing pdf files if there are no figures
# (gets links right in table of contents)
|
jeremiedecock/snippets
|
python/pyqt/pyqt5/widget_QTableView_share_selection_in_two_views.py
|
Python
|
mit
| 1,608 | 0.003109 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Ref: http://doc.qt.io/qt-5/modelview.html#2-1-a-read-only-table
import sys
from PyQt5.QtCore import Qt, QAbstractTableModel, QVariant
from PyQt5.QtWidgets import QApplication, QTableView, QVBoxLayout, QWidget
class MyModel(QAbstractTableModel):
def __init__(self, parent):
|
super().__init__(parent)
def rowCount(self, parent):
return 2
def columnCount(self, parent):
return 3
def data(self, index, role):
if role == Qt.DisplayRole:
re
|
turn "({},{})".format(index.row(), index.column())
return QVariant()
if __name__ == '__main__':
app = QApplication(sys.argv)
window = QWidget()
table_view1 = QTableView()
table_view2 = QTableView()
my_model = MyModel(None)
table_view1.setModel(my_model)
table_view2.setModel(my_model)
# http://doc.qt.io/qt-5/model-view-programming.html#sharing-selections-among-views
table_view2.setSelectionModel(table_view1.selectionModel()) # <- Share the SelectionModel
# Set the layout
vbox = QVBoxLayout()
vbox.addWidget(table_view1)
vbox.addWidget(table_view2)
window.setLayout(vbox)
# Show
window.show()
# The mainloop of the application. The event handling starts from this point.
# The exec_() method has an underscore. It is because the exec is a Python keyword. And thus, exec_() was used instead.
exit_code = app.exec_()
# The sys.exit() method ensures a clean exit.
# The environment will be informed, how the application ended.
sys.exit(exit_code)
|
MauricioDinki/hatefull
|
hatefull/apps/answers/migrations/0001_initial.py
|
Python
|
bsd-3-clause
| 1,380 | 0.003623 |
# -*-
|
coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-02-07 20:55
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration
|
):
initial = True
dependencies = [
('questions', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('tests', '0003_test_user'),
]
operations = [
migrations.CreateModel(
name='Answer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('answer', models.CharField(max_length=50)),
('question', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='questions.Question', verbose_name=b'Question')),
('question_owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='owner', to=settings.AUTH_USER_MODEL, verbose_name=b'Question owner')),
('tests', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tests.Test', verbose_name=b'Test')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='user', to=settings.AUTH_USER_MODEL, verbose_name=b'User')),
],
),
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.