text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
# Copyright (c) 2012-2015 Netforce Co. Ltd.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
# OR OTHER DEALINGS IN THE SOFTWARE.
from netforce.model import Model, fields, get_model
from netforce.database import get_connection
from datetime import *
import time
from pprint import pprint
def js_time(s):
d=datetime.strptime(s,"%Y-%m-%d %H:%M:%S")
return time.mktime(d.timetuple()) * 1000
def js_date(s):
d=datetime.strptime(s,"%Y-%m-%d")
return time.mktime(d.timetuple()) * 1000
class ReportIssue(Model):
_name = "report.issue"
_store = False
def get_issue_chart(self, context={}):
actions=[]
for issue in get_model("issue").search_browse([]):
if issue.date_created:
actions.append((issue.date_created,"open"))
if issue.state=="closed" and issue.date_closed:
actions.append((issue.date_closed,"close"))
actions.sort()
values=[]
num_issues=0
for d,action in actions:
if action=="open":
num_issues+=1
elif action=="close":
num_issues-=1
values.append((js_time(d), num_issues))
data = {
"value": values,
}
return data
def get_issue_close_chart(self, context={}):
closed={}
for issue in get_model("issue").search_browse([["state","=","closed"],["date_closed","!=",None]]):
d=issue.date_closed[:10]
closed.setdefault(d,0)
closed[d]+=1
values=[]
for d,n in sorted(closed.items()):
values.append((js_date(d), n))
data = {
"value": [{
"key": "Closed",
"values": values,
}]
}
pprint(data)
return data
ReportIssue.register()
| sidzan/netforce | netforce_support/netforce_support/models/report_issue.py | Python | mit | 2,805 | 0.012834 |
#!/usr/bin/python
import os
path = os.path.join(os.path.dirname(__file__), "../")
path = os.path.abspath(path)
regex = '-regex ".*\.[cChH]\(pp\)?"'
exclude = '-not -path "*/external/*" -not -name "*#*"'
cmd = 'find {p} {r} {e} -print | xargs etags '.format(p=path, e=exclude, r=regex)
print cmd
os.system(cmd)
| bailey-lab/graphSourceCode | scripts/etags.py | Python | gpl-3.0 | 316 | 0.012658 |
# -*- coding: utf-8 -*-
"""
***************************************************************************
OTBTester.py
---------------------
Copyright : (C) 2013 by CS Systemes d'information (CS SI)
Email : otb at c-s dot fr (CS SI)
Contributors : Julien Malik (CS SI)
Oscar Picas (CS SI)
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
from future import standard_library
standard_library.install_aliases()
from builtins import zip
from builtins import str
from builtins import range
from builtins import object
__author__ = 'Julien Malik, Oscar Picas'
__copyright__ = '(C) 2013, CS Systemes d\'information (CS SI)'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from parsing import parse
from string import Template
import os
import traceback
from configparser import SafeConfigParser
from processing.otb.OTBHelper import get_OTB_log
class LowerTemplate(Template):
def safe_substitute(self, param):
ret = super(LowerTemplate, self).safe_substitute(param).lower()
return ret
class MakefileParser(object):
def __init__(self):
self.maxDiff = None
self.parser = SafeConfigParser()
self.parser.read('otbcfg.ini')
if not os.path.exists('otbcfg.ini'):
raise Exception("OTB_SOURCE_DIR and OTB_BINARY_DIR must be specified in the file otbcfg.ini")
self.root_dir = self.parser.get('otb', 'checkout_dir')
if not os.path.exists(self.root_dir):
raise Exception("Check otbcfg.ini : OTB_SOURCE_DIR and OTB_BINARY_DIR must be specified there")
self.build_dir = self.parser.get('otb', 'build_dir')
if not os.path.exists(self.build_dir):
raise Exception("Check otbcfg.ini : OTB_SOURCE_DIR and OTB_BINARY_DIR must be specified there")
self.logger = get_OTB_log()
def test_CMakelists(self):
provided = {}
provided["OTB_SOURCE_DIR"] = self.root_dir
provided["OTB_BINARY_DIR"] = self.build_dir
provided["OTB_DATA_LARGEINPUT_ROOT"] = os.path.normpath(os.path.join(self.root_dir, "../OTB-Data/Input"))
try:
with open(os.path.join(self.root_dir, "CMakeLists.txt")) as file_input:
content = file_input.read()
output = parse(content)
defined_paths = [each for each in output if 'Command' in str(type(each)) and "FIND_PATH" in each.name]
the_paths = {key.body[0].contents: [thing.contents for thing in key.body[1:]] for key in defined_paths}
the_sets = [each for each in output if 'Command' in str(type(each)) and "SET" in each.name.upper()]
the_sets = {key.body[0].contents: [thing.contents for thing in key.body[1:]] for key in the_sets}
the_sets = {key: " ".join(the_sets[key]) for key in the_sets}
the_strings = set([each.body[-1].contents for each in output if 'Command' in str(type(each)) and "STRING" in each.name.upper()])
def mini_clean(item):
if item.startswith('"') and item.endswith('"') and " " not in item:
return item[1:-1]
return item
the_sets = {key: mini_clean(the_sets[key]) for key in the_sets}
def templatize(item):
if "$" in item:
return Template(item)
return item
for key in the_sets:
if key in the_strings:
the_sets[key] = the_sets[key].lower()
the_sets = {key: templatize(the_sets[key]) for key in the_sets}
for path in the_paths:
target_file = the_paths[path][1]
suggested_paths = []
if len(the_paths[path]) > 2:
suggested_paths = the_paths[path][2:]
try:
provided[path] = find_file(target_file)
except Exception as e:
for each in suggested_paths:
st = Template(each)
pac = os.path.abspath(st.safe_substitute(provided))
if os.path.exists(pac):
provided[path] = pac
break
resolve_dict(provided, the_sets)
provided.update(the_sets)
return provided
except Exception as e:
traceback.print_exc()
self.fail(str(e))
def add_make(self, previous_context, new_file):
with open(new_file) as f:
input = f.read()
output = parse(input)
apps = [each for each in output if 'Command' in str(type(each))]
setcommands = [each for each in apps if 'SET' in each.name.upper()]
stringcommands = [each for each in apps if 'STRING' in each.name.upper()]
environment = previous_context
def mini_clean(item):
if item.startswith('"') and item.endswith('"') and " " not in item:
return item[1:-1]
return item
new_env = {}
for command in setcommands:
key = command.body[0].contents
ct = " ".join([item.contents for item in command.body[1:]])
ct = mini_clean(ct)
if "$" in ct:
values = Template(ct)
else:
values = ct
new_env[key] = values
for stringcommand in stringcommands:
key = stringcommand.body[-1].contents
ct = stringcommand.body[-2].contents
ct = mini_clean(ct.lower())
if "$" in ct:
values = LowerTemplate(ct)
else:
values = ct
new_env[key] = values
resolve_dict(environment, new_env)
environment.update(new_env)
return environment
def get_apps(self, the_makefile, the_dict):
with open(the_makefile) as f:
input = f.read()
output = parse(input)
apps = [each for each in output if 'Command' in str(type(each))]
otb_apps = [each for each in apps if 'OTB_TEST_APPLICATION' in each.name.upper()]
return otb_apps
def get_tests(self, the_makefile, the_dict):
with open(the_makefile) as f:
input = f.read()
output = parse(input)
apps = [each for each in output if 'Command' in str(type(each))]
otb_tests = [each for each in apps if 'ADD_TEST' in each.name.upper()]
return otb_tests
def get_apps_with_context(self, the_makefile, the_dict):
with open(the_makefile) as f:
input = f.read()
output = parse(input)
def is_a_command(item):
return 'Command' in str(type(item))
appz = []
context = []
for each in output:
if is_a_command(each):
if 'FOREACH' in each.name and 'ENDFOREACH' not in each.name:
args = [item.contents for item in each.body]
context.append(args)
elif 'ENDFOREACH' in each.name:
context.pop()
elif 'OTB_TEST_APPLICATION' in each.name.upper():
appz.append((each, context[:]))
return appz
def get_name_line(self, the_list, the_dict):
items = ('NAME', 'APP', 'OPTIONS', 'TESTENVOPTIONS', 'VALID')
itemz = [[], [], [], [], []]
last_index = 0
for each in the_list:
if each.contents in items:
last_index = items.index(each.contents)
else:
itemz[last_index].append(each.contents)
result = itemz[0][0]
the_string = Template(result).safe_substitute(the_dict)
if '$' in the_string:
neo_dict = the_dict
the_string = Template(the_string).safe_substitute(neo_dict)
while '$' in the_string:
try:
the_string = Template(the_string).substitute(neo_dict)
except KeyError as e:
self.logger.warning("Key %s is not found in makefiles" % str(e))
neo_dict[str(e)] = ""
if 'string.Template' in the_string:
raise Exception("Unexpected toString call in %s" % the_string)
return the_string
def get_command_line(self, the_list, the_dict):
items = ('NAME', 'APP', 'OPTIONS', 'TESTENVOPTIONS', 'VALID')
itemz = [[], [], [], [], []]
last_index = 0
for each in the_list:
if each.contents in items:
last_index = items.index(each.contents)
else:
itemz[last_index].append(each.contents)
result = []
result.extend(["otbcli_%s" % each for each in itemz[1]])
if len(result[0]) == 7:
raise Exception("App name is empty!")
result.extend(itemz[2])
result.append("-testenv")
result.extend(itemz[3])
the_string = Template(" ".join(result)).safe_substitute(the_dict)
if '$' in the_string:
neo_dict = the_dict
the_string = Template(" ".join(result)).safe_substitute(neo_dict)
while '$' in the_string:
try:
the_string = Template(the_string).substitute(neo_dict)
except KeyError as e:
self.logger.warning("Key %s is not found in makefiles" % str(e))
neo_dict[str(e)] = ""
if 'string.Template' in the_string:
raise Exception("Unexpected toString call in %s" % the_string)
return the_string
def get_test(self, the_list, the_dict):
items = ('NAME', 'APP', 'OPTIONS', 'TESTENVOPTIONS', 'VALID')
itemz = [[], [], [], [], []]
last_index = 0
for each in the_list:
if each.contents in items:
last_index = items.index(each.contents)
else:
itemz[last_index].append(each.contents)
result = ["otbTestDriver"]
result.extend(itemz[4])
if len(result) == 1:
return ""
the_string = Template(" ".join(result)).safe_substitute(the_dict)
if '$' in the_string:
neo_dict = the_dict
the_string = Template(" ".join(result)).safe_substitute(neo_dict)
while '$' in the_string:
try:
the_string = Template(the_string).substitute(neo_dict)
except KeyError as e:
self.logger.warning("Key %s is not found in makefiles" % str(e))
neo_dict[str(e)] = ""
if 'string.Template' in the_string:
raise Exception("Unexpected toString call in %s" % the_string)
return the_string
def test_algos(self):
tests = {}
algos_dir = os.path.join(self.root_dir, "Testing/Applications")
makefiles = find_files("CMakeLists.txt", algos_dir)
to_be_excluded = os.path.join(self.root_dir, "Testing/Applications/CMakeLists.txt")
if to_be_excluded in makefiles:
makefiles.remove(to_be_excluded)
resolve_algos = {}
for makefile in makefiles:
intermediate_makefiles = []
path = makefile.split(os.sep)[len(self.root_dir.split(os.sep)):-1]
for ind in range(len(path)):
tmp_path = path[:ind + 1]
tmp_path.append("CMakeLists.txt")
tmp_path = os.sep.join(tmp_path)
candidate_makefile = os.path.join(self.root_dir, tmp_path)
if os.path.exists(candidate_makefile):
intermediate_makefiles.append(candidate_makefile)
resolve_algos[makefile] = intermediate_makefiles
dict_for_algo = {}
for makefile in makefiles:
basic = self.test_CMakelists()
last_context = self.add_make(basic, os.path.join(self.root_dir, "Testing/Utilities/CMakeLists.txt"))
for intermediate_makefile in resolve_algos[makefile]:
last_context = self.add_make(last_context, intermediate_makefile)
dict_for_algo[makefile] = last_context
for makefile in makefiles:
appz = self.get_apps_with_context(makefile, dict_for_algo[makefile])
for app, context in appz:
if len(context) == 0:
import copy
ddi = copy.deepcopy(dict_for_algo[makefile])
tk_dict = autoresolve(ddi)
tk_dict = autoresolve(tk_dict)
name_line = self.get_name_line(app.body, tk_dict)
command_line = self.get_command_line(app.body, tk_dict)
test_line = self.get_test(app.body, tk_dict)
if '$' in test_line or '$' in command_line:
if '$' in command_line:
self.logger.error(command_line)
if '$' in test_line:
self.logger.warning(test_line)
else:
tests[name_line] = (command_line, test_line)
else:
contexts = {}
for iteration in context:
key = iteration[0]
values = [each[1:-1].lower() for each in iteration[1:]]
contexts[key] = values
keyorder = list(contexts.keys())
import itertools
pool = [each for each in itertools.product(*list(contexts.values()))]
import copy
for poolinstance in pool:
neo_dict = copy.deepcopy(dict_for_algo[makefile])
zipped = list(zip(keyorder, poolinstance))
for each in zipped:
neo_dict[each[0]] = each[1]
ak_dict = autoresolve(neo_dict)
ak_dict = autoresolve(ak_dict)
ak_dict = autoresolve(ak_dict)
ddi = ak_dict
name_line = self.get_name_line(app.body, ddi)
command_line = self.get_command_line(app.body, ddi)
test_line = self.get_test(app.body, ddi)
if '$' in command_line or '$' not in test_line:
if '$' in command_line:
self.logger.error(command_line)
if '$' in test_line:
self.logger.warning(test_line)
else:
tests[name_line] = (command_line, test_line)
return tests
def autoresolve(a_dict):
def as_template(item, b_dict):
if hasattr(item, 'safe_substitute'):
return item.safe_substitute(b_dict)
ate = Template(item)
return ate.safe_substitute(b_dict)
templatized = {key: as_template(a_dict[key], a_dict) for key in list(a_dict.keys())}
return templatized
def find_file(file_name, base_dir=os.curdir):
import os
for root, dirs, files in os.walk(base_dir, topdown=False):
for name in files:
if name == file_name:
return os.path.join(root, name)
raise Exception("File not found %s" % file_name)
def find_files(file_name, base_dir=os.curdir):
import os
result = []
for root, dirs, files in os.walk(base_dir, topdown=False):
for name in files:
if name == file_name:
result.append(os.path.join(root, name))
return result
def resolve_dict(adia, adib):
init = len(adia)
fin = len(adia) + 1
def _resolve_dict(dia, dib):
for key in dib:
cand_value = dib[key]
if hasattr(cand_value, 'safe_substitute'):
value = cand_value.safe_substitute(dia)
if isinstance(value, str) and "$" not in value:
dia[key] = value
else:
dia[key] = cand_value
for key in dia:
if key in dib:
del dib[key]
while(init != fin):
init = len(adia)
_resolve_dict(adia, adib)
fin = len(adia)
| drnextgis/QGIS | python/plugins/processing/algs/otb/maintenance/OTBTester.py | Python | gpl-2.0 | 17,027 | 0.001879 |
# -*- coding: utf-8 -*-
""" Friendly Dates and Times """
# Disable pylint's invalid name warning. 'tz' is used in a few places and it
# should be the only thing causing pylint to include the warning.
# pylint: disable-msg=C0103
import calendar
import datetime
import locale
import os
import pytz
import random
# Some functions may take a parameter to designate a return value in UTC
# instead of local time. This will be used to force them to return UTC
# regardless of the paramter's value.
_FORCE_UTC = False
class _FormatsMetaClass(type):
"""Allows the formats class to be treated as an iterable.
It is important to understand has this class works.
``hasattr(formats, 'DATE')`` is true. ``'DATE' in formats` is false.
``hasattr(formats, 'D_FMT')`` is false. ``'D_FMT' in formats` is true.
This is made possible through the ``__contains__`` and ``__getitem__``
methods. ``__getitem__`` checks for the name of the attribute within
the ``formats`` class. ``__contains__``, on the other hand, checks for
the specified value assigned to an attribute of the class.
pass
"""
DATE = 'D_FMT'
DATETIME = 'D_T_FMT'
TIME = 'T_FMT'
TIME_AMPM = 'T_FMT_AMPM'
def __contains__(self, value):
index = 0
for attr in dir(_FormatsMetaClass):
if not attr.startswith('__') and attr != 'mro' and \
getattr(_FormatsMetaClass, attr) == value:
index = attr
break
return index
def __getitem__(self, attr):
return getattr(_FormatsMetaClass, attr)
def __iter__(self):
for attr in dir(_FormatsMetaClass):
if not attr.startswith('__') and attr != 'mro':
yield attr
formats = _FormatsMetaClass('formats', (object,), {})
formats.__doc__ = """A set of predefined datetime formats.
.. versionadded:: 0.3.0
"""
def _add_time(value, years=0, months=0, weeks=0, days=0,
hours=0, minutes=0, seconds=0, milliseconds=0, microseconds=0):
assert _is_date_type(value)
# If any of the standard timedelta values are used, use timedelta for them.
if seconds or minutes or hours or days or weeks:
delta = datetime.timedelta(weeks=weeks, days=days, hours=hours,
minutes=minutes, seconds=seconds,
milliseconds=milliseconds,
microseconds=microseconds)
value += delta
# Months are tricky. If the current month plus the requested number of
# months is greater than 12 (or less than 1), we'll get a ValueError. After
# figuring out the number of years and months from the number of months,
# shift the values so that we get a valid month.
if months:
more_years, months = divmod(months, 12)
years += more_years
if not (1 <= months + value.month <= 12):
more_years, months = divmod(months + value.month, 12)
months -= value.month
years += more_years
if months or years:
year = value.year + years
month = value.month + months
# When converting from a day in amonth that doesn't exist in the
# ending month, a ValueError will be raised. What follows is an ugly,
# ugly hack to get around this.
try:
value = value.replace(year=year, month=month)
except ValueError:
# When the day in the origin month isn't in the destination month,
# the total number of days in the destination month is needed.
# calendar.mdays would be a nice way to do this except it doesn't
# account for leap years at all; February always has 28 days.
_, destination_days = calendar.monthrange(year, month)
# I am reluctantly writing this comment as I fear putting the
# craziness of the hack into writing, but I don't want to forget
# what I was doing here so I can fix it later.
#
# The new day will either be 1, 2, or 3. It will be determined by
# the difference in days between the day value of the datetime
# being altered and the number of days in the destination month.
# After that, month needs to be incremented. If that puts the new
# date into January (the value will be 13), year will also need to
# be incremented (with month being switched to 1).
#
# Once all of that has been figured out, a simple replace will do
# the trick.
day = value.day - destination_days
month += 1
if month > 12:
month = 1
year += 1
value = value.replace(year=year, month=month, day=day)
return value
def _is_date_type(value):
# Acceptible types must be or extend:
# datetime.date
# datetime.time
return isinstance(value, (datetime.date, datetime.time))
def all_timezones():
"""Get a list of all time zones.
This is a wrapper for ``pytz.all_timezones``.
:returns: list -- all time zones.
.. versionadded:: 0.1.0
"""
return pytz.all_timezones
def all_timezones_set():
"""Get a set of all time zones.
This is a wrapper for ``pytz.all_timezones_set``.
:returns: set -- all time zones.
.. versionadded:: 0.1.0
"""
return pytz.all_timezones_set
def common_timezones():
"""Get a list of common time zones.
This is a wrapper for ``pytz.common_timezones``.
:returns: list -- common time zones.
.. versionadded:: 0.1.0
"""
return pytz.common_timezones
def common_timezones_set():
"""Get a set of common time zones.
This is a wrapper for ``pytz.common_timezones_set``.
:returns: set -- common time zones.
.. versionadded:: 0.1.0
"""
return pytz.common_timezones_set
def ever():
"""Get a random datetime.
Instead of using ``datetime.MINYEAR`` and ``datetime.MAXYEAR`` as the
bounds, the current year +/- 100 is used. The thought behind this is that
years that are too extreme will not be as useful.
:returns: datetime.datetime -- a random datetime.
.. versionadded:: 0.3.0
"""
# Get the year bounds
min_year = max(datetime.MINYEAR, today().year - 100)
max_year = min(datetime.MAXYEAR, today().year + 100)
# Get the random values
year = random.randint(min_year, max_year)
month = random.randint(1, 12)
day = random.randint(1, calendar.mdays[month])
hour = random.randint(0, 23)
minute = random.randint(0, 59)
second = random.randint(0, 59)
microsecond = random.randint(0, 1000000)
return datetime.datetime(year=year, month=month, day=day, hour=hour,
minute=minute, second=second,
microsecond=microsecond)
def format(value, format_string):
"""Get a formatted version of a datetime.
This is a wrapper for ``strftime()``. The full list of directives that can
be used can be found at
http://docs.python.org/library/datetime.html#strftime-strptime-behavior.
Predefined formats are exposed through ``when.formats``:
.. data:: when.formats.DATE
Date in locale-based format.
.. data:: when.formats.DATETIME
Date and time in locale-based format.
.. data:: when.formats.TIME
Time in locale-based format.
.. data:: when.formats.TIME_AMPM
12-hour time in locale-based format.
:param value: A datetime object.
:type value: datetime.datetime, datetime.date, datetime.time.
:param format_string: A string specifying formatting the directives or
to use.
:type format_string: str.
:returns: str -- the formatted datetime.
:raises: AssertionError
.. versionadded:: 0.3.0
"""
assert _is_date_type(value)
# Check to see if `format_string` is a value from the `formats` class. If
# it is, obtain the real value from `locale.nl_langinfo()`.
if format_string in formats:
format_string = locale.nl_langinfo(getattr(locale, format_string))
return value.strftime(format_string)
def future(years=0, months=0, weeks=0, days=0,
hours=0, minutes=0, seconds=0, milliseconds=0, microseconds=0,
utc=False):
"""Get a datetime in the future.
``future()`` accepts the all of the parameters of ``datetime.timedelta``,
plus includes the parameters ``years`` and ``months``. ``years`` and
``months`` will add their respective units of time to the datetime.
By default ``future()`` will return the datetime in the system's local
time. If the ``utc`` parameter is set to ``True`` or ``set_utc()`` has been
called, the datetime will be based on UTC instead.
:param years: The number of years to add.
:type years: int.
:param months: The number of months to add.
:type months: int.
:param weeks: The number of weeks to add.
:type weeks: int.
:param days: The number of days to add.
:type days: int.
:param hours: The number of hours to add.
:type hours: int.
:param minutes: The number of minutes to add.
:type minutes: int.
:param seconds: The number of seconds to add.
:type seconds: int.
:param milliseconds: The number of milliseconds to add.
:type milliseconds: int.
:param microseconds: The number of microseconds to add.
:type microseconds: int.
:param utc: Whether or not to use UTC instead of local time.
:type utc: bool.
:returns: datetime.datetime -- the calculated datetime.
.. versionadded:: 0.1.0
"""
return _add_time(now(utc), years=years, months=months, weeks=weeks,
days=days, hours=hours, minutes=minutes, seconds=seconds,
milliseconds=milliseconds, microseconds=microseconds)
def how_many_leap_days(from_date, to_date):
"""Get the number of leap days between two dates
:param from_date: A datetime object. If only a year is specified, will use
January 1.
:type from_date: datetime.datetime, datetime.date
:param to_date: A datetime object.. If only a year is specified, will use
January 1.
:type to_date: datetime.datetime, datetime.date
:returns: int -- the number of leap days.
.. versionadded:: 0.3.0
"""
if isinstance(from_date, int):
from_date = datetime.date(from_date, 1, 1)
if isinstance(to_date, int):
to_date = datetime.date(to_date, 1, 1)
assert _is_date_type(from_date) and \
not isinstance(from_date, datetime.time)
assert _is_date_type(to_date) and not isinstance(to_date, datetime.time)
# Both `from_date` and `to_date` need to be of the same type. Since both
# `datetime.date` and `datetime.datetime` will pass the above assertions,
# cast any `datetime.datetime` values to `datetime.date`.
if isinstance(from_date, datetime.datetime):
from_date = from_date.date()
if isinstance(to_date, datetime.datetime):
to_date = to_date.date()
assert from_date <= to_date
number_of_leaps = calendar.leapdays(from_date.year, to_date.year)
# `calendar.leapdays()` calculates the number of leap days by using
# January 1 for the specified years. If `from_date` occurs after
# February 28 in a leap year, remove one leap day from the total. If
# `to_date` occurs after February 28 in a leap year, add one leap day to
# the total.
if calendar.isleap(from_date.year):
month, day = from_date.month, from_date.day
if month > 2 or (month == 2 and day > 28):
number_of_leaps -= 1
if calendar.isleap(to_date.year):
month, day = to_date.month, to_date.day
if month > 2 or (month == 2 and day > 28):
number_of_leaps += 1
return number_of_leaps
def is_5_oclock():
# Congratulations, you've found an easter egg!
#
# Returns a `datetime.timedelta` object representing how much time is
# remaining until 5 o'clock. If the current time is between 5pm and
# midnight, a negative value will be returned. Keep in mind, a `timedelta`
# is considered negative when the `days` attribute is negative; the values
# for `seconds` and `microseconds` will always be positive.
#
# All values will be `0` at 5 o'clock.
# Because this method deals with local time, the force UTC flag will need
# to be turned off and back on if it has been set.
force = _FORCE_UTC
if force:
unset_utc()
# A `try` is used here to ensure that the UTC flag will be restored
# even if an exception is raised when calling `now()`. This should never
# be the case, but better safe than sorry.
try:
the_datetime = now()
finally:
if force:
set_utc()
five = datetime.time(17)
return datetime.datetime.combine(the_datetime.date(), five) - the_datetime
def is_timezone_aware(value):
"""Check if a datetime is time zone aware.
`is_timezone_aware()` is the inverse of `is_timezone_naive()`.
:param value: A valid datetime object.
:type value: datetime.datetime, datetime.time
:returns: bool -- if the object is time zone aware.
.. versionadded:: 0.3.0
"""
assert hasattr(value, 'tzinfo')
return value.tzinfo is not None and \
value.tzinfo.utcoffset(value) is not None
def is_timezone_naive(value):
"""Check if a datetime is time zone naive.
`is_timezone_naive()` is the inverse of `is_timezone_aware()`.
:param value: A valid datetime object.
:type value: datetime.datetime, datetime.time
:returns: bool -- if the object is time zone naive.
.. versionadded:: 0.3.0
"""
assert hasattr(value, 'tzinfo')
return value.tzinfo is None or value.tzinfo.utcoffset(value) is None
def now(utc=False):
"""Get a datetime representing the current date and time.
By default ``now()`` will return the datetime in the system's local time.
If the ``utc`` parameter is set to ``True`` or ``set_utc()`` has been
called, the datetime will be based on UTC instead.
:param utc: Whether or not to use UTC instead of local time.
:type utc: bool.
:returns: datetime.datetime -- the current datetime.
.. versionadded:: 0.1.0
"""
if _FORCE_UTC or utc:
return datetime.datetime.utcnow()
else:
return datetime.datetime.now()
def past(years=0, months=0, weeks=0, days=0,
hours=0, minutes=0, seconds=0, milliseconds=0, microseconds=0,
utc=False):
"""Get a datetime in the past.
``past()`` accepts the all of the parameters of ``datetime.timedelta``,
plus includes the parameters ``years`` and ``months``. ``years`` and
``months`` will add their respective units of time to the datetime.
By default ``past()`` will return the datetime in the system's local time.
If the ``utc`` parameter is set to ``True`` or ``set_utc()`` has been
called, the datetime will be based on UTC instead.
:param years: The number of years to subtract.
:type years: int.
:param months: The number of months to subtract.
:type months: int.
:param weeks: The number of weeks to subtract.
:type weeks: int.
:param days: The number of days to subtract.
:type days: int.
:param hours: The number of hours to subtract.
:type hours: int.
:param minutes: The number of minutes to subtract.
:type minutes: int.
:param seconds: The number of seconds to subtract.
:type seconds: int.
:param milliseconds: The number of milliseconds to subtract.
:type milliseconds: int.
:param microseconds: The number of microseconds to subtract.
:type microseconds: int.
:param utc: Whether or not to use UTC instead of local time.
:type utc: bool.
:returns: datetime.datetime -- the calculated datetime.
.. versionadded:: 0.1.0
"""
return _add_time(now(utc), years=-years, months=-months, weeks=-weeks,
days=-days, hours=-hours, minutes=-minutes,
seconds=-seconds, milliseconds=milliseconds,
microseconds=microseconds)
def set_utc():
"""Set all datetimes to UTC.
The ``utc`` parameter of other methods will be ignored, with the global
setting taking precedence.
This can be reset by calling ``unset_utc()``.
.. versionadded:: 0.1.0
"""
global _FORCE_UTC # Causes pylint W0603
_FORCE_UTC = True
def shift(value, from_tz=None, to_tz=None, utc=False):
"""Convert a datetime from one time zone to another.
``value`` will be converted from its time zone (when it is time zone aware)
or the time zone specified by ``from_tz`` (when it is time zone naive) to
the time zone specified by ``to_tz``. These values can either be strings
containing the name of the time zone (see ``pytz.all_timezones`` for a list
of all supported values) or a ``datetime.tzinfo`` object.
If no value is provided for either ``from_tz`` (when ``value`` is time zone
naive) or ``to_tz``, the current system time zone will be used. If the
``utc`` parameter is set to ``True`` or ``set_utc()`` has been called,
however, UTC will be used instead.
:param value: A datetime object.
:type value: datetime.datetime, datetime.time.
:param from_tz: The time zone to shift from.
:type from_tz: datetime.tzinfo, str.
:param to_tz: The time zone to shift to.
:type to_tz: datetime.tzinfo, str.
:param utc: Whether or not to use UTC instead of local time.
:type utc: bool.
:returns: datetime.datetime -- the calculated datetime.
:raises: AssertionError
.. versionchanged:: 0.3.0
Added AssertionError for invalid values of ``value``
"""
assert hasattr(value, 'tzinfo')
# Check for a from timezone
# If the datetime is time zone aware, its time zone should be used. If it's
# naive, from_tz must be supplied.
if is_timezone_aware(value):
from_tz = value.tzinfo
else:
if not from_tz:
if _FORCE_UTC or utc:
from_tz = pytz.UTC
else:
from_tz = timezone_object() # Use the system's time zone
else:
if not isinstance(from_tz, datetime.tzinfo):
# This will raise pytz.UnknownTimeZoneError
from_tz = pytz.timezone(from_tz)
# Check for a to timezone
if not to_tz:
if _FORCE_UTC or utc:
to_tz = pytz.UTC
else:
to_tz = timezone_object() # Use the system's time zone
else:
if not isinstance(to_tz, datetime.tzinfo):
# This will raise pytz.UnknownTimeZoneError
to_tz = pytz.timezone(to_tz)
if from_tz == to_tz:
return value
# If the datetime is time zone naive, pytz provides a convenient way to
# covert it to time zone aware. Using replace() directly on the datetime
# results in losing an hour when converting ahead.
if is_timezone_naive(value):
value = from_tz.localize(value)
return value.astimezone(to_tz).replace(tzinfo=None)
def timezone():
"""Get the name of the current system time zone.
:returns: str -- the name of the system time zone.
.. versionadded:: 0.1.0
"""
def _inner():
""" check for the time zone:
1. as an environment setting (most likely not)
2. in /etc/timezone (hopefully)
3. in /etc/localtime (last chance)
"""
tz = _timezone_from_env() # 1
if tz is not None:
return tz
tz = _timezone_from_etc_timezone() # 2
if tz is not None:
return tz
tz = _timezone_from_etc_localtime() # 3
if tz is not None:
return tz
return '{0}'.format(_inner())
def _timezone_from_env():
""" get the system time zone from os.environ """
if 'TZ' in os.environ:
try:
return pytz.timezone(os.environ['TZ'])
except pytz.UnknownTimeZoneError:
pass
return None
def _timezone_from_etc_localtime():
""" get the system time zone from /etc/loclatime """
matches = []
if os.path.exists('/etc/localtime'):
localtime = pytz.tzfile.build_tzinfo('/etc/localtime',
file('/etc/localtime'))
for tzname in pytz.all_timezones:
tz = pytz.timezone(tzname)
if dir(tz) != dir(localtime):
continue
for attr in dir(tz):
if callable(getattr(tz, attr)) or attr.startswith('__'):
continue
if attr == 'zone' or attr == '_tzinfos':
continue
if getattr(tz, attr) != getattr(localtime, attr):
break
else:
matches.append(tzname)
if matches:
return pytz.timezone(matches[0])
else:
# Causes pylint W0212
pytz._tzinfo_cache['/etc/localtime'] = localtime
return localtime
def _timezone_from_etc_timezone():
""" get the system time zone from /etc/timezone """
if os.path.exists('/etc/timezone'):
tz = file('/etc/timezone').read().strip()
try:
return pytz.timezone(tz)
except pytz.UnknownTimeZoneError:
pass
return None
def timezone_object(tz_name=None):
"""Get the current system time zone.
:param tz_name: The name of the time zone.
:type tz_name: str.
:returns: datetime.tzinfo -- the time zone, defaults to system time zone.
.. versionadded:: 0.1.0
"""
return pytz.timezone(tz_name if tz_name else timezone())
def today():
"""Get a date representing the current date.
:returns: datetime.date -- the current date.
.. versionadded:: 0.1.0
"""
return datetime.date.today()
def tomorrow():
"""Get a date representing tomorrow's date.
:returns: datetime.date -- the current date plus one day.
.. versionadded:: 0.1.0
"""
return datetime.date.today() + datetime.timedelta(days=1)
def unset_utc():
"""Set all datetimes to system time.
The ``utc`` parameter of other methods will be used.
This can be changed by calling ``set_utc()``.
.. versionadded:: 0.1.0
"""
global _FORCE_UTC # Causes pylint W0603
_FORCE_UTC = False
def yesterday():
"""Get a date representing yesterday's date.
:returns: datetime.date -- the current date minus one day.
.. versionadded:: 0.1.0
"""
return datetime.date.today() - datetime.timedelta(days=1)
| MVReddy/WhenPy | when.py | Python | bsd-3-clause | 22,733 | 0.000044 |
#!/usr/bin/env python2
# coding=utf-8
#
##############################################################################
### NZBGET POST-PROCESSING SCRIPT ###
# Post-Process to Mylar.
#
# This script sends the download to your automated media management servers.
#
# NOTE: This script requires Python to be installed on your system.
##############################################################################
#
### OPTIONS
## General
# Auto Update nzbToMedia (0, 1).
#
# Set to 1 if you want nzbToMedia to automatically check for and update to the latest version
#auto_update=0
# Safe Mode protection of DestDir (0, 1).
#
# Enable/Disable a safety check to ensure we don't process all downloads in the default_downloadDirectory by mistake.
#safe_mode=1
## Mylar
# Mylar script category.
#
# category that gets called for post-processing with Mylar.
#myCategory=comics
# Mylar host.
#
# The ipaddress for your Mylar server. e.g For the Same system use localhost or 127.0.0.1
#myhost=localhost
# Mylar port.
#myport=8090
# Mylar username.
#myusername=
# Mylar password.
#mypassword=
# Mylar uses ssl (0, 1).
#
# Set to 1 if using ssl, else set to 0.
#myssl=0
# Mylar web_root
#
# set this if using a reverse proxy.
#myweb_root=
# Mylar wait_for
#
# Set the number of minutes to wait after calling the force process, to check the issue has changed status.
#myswait_for=1
# Mylar watch directory.
#
# set this to where your Mylar completed downloads are.
#mywatch_dir=
# Mylar and NZBGet are a different system (0, 1).
#
# Enable to replace local path with the path as per the mountPoints below.
#myremote_path=0
## Posix
# Niceness for external tasks Extractor and Transcoder.
#
# Set the Niceness value for the nice command. These range from -20 (most favorable to the process) to 19 (least favorable to the process).
#niceness=10
# ionice scheduling class (0, 1, 2, 3).
#
# Set the ionice scheduling class. 0 for none, 1 for real time, 2 for best-effort, 3 for idle.
#ionice_class=2
# ionice scheduling class data.
#
# Set the ionice scheduling class data. This defines the class data, if the class accepts an argument. For real time and best-effort, 0-7 is valid data.
#ionice_classdata=4
## Network
# Network Mount Points (Needed for remote path above)
#
# Enter Mount points as LocalPath,RemotePath and separate each pair with '|'
# e.g. mountPoints=/volume1/Public/,E:\|/volume2/share/,\\NAS\
#mountPoints=
## WakeOnLan
# use WOL (0, 1).
#
# set to 1 to send WOL broadcast to the mac and test the server (e.g. xbmc) on the host and port specified.
#wolwake=0
# WOL MAC
#
# enter the mac address of the system to be woken.
#wolmac=00:01:2e:2D:64:e1
# Set the Host and Port of a server to verify system has woken.
#wolhost=192.168.1.37
#wolport=80
### NZBGET POST-PROCESSING SCRIPT ###
##############################################################################
import sys
import nzbToMedia
section = "Mylar"
result = nzbToMedia.main(sys.argv, section)
sys.exit(result)
| bbsan2k/nzbToMedia | nzbToMylar.py | Python | gpl-3.0 | 3,087 | 0.011986 |
from __future__ import absolute_import, print_function, division
import unittest
from pony.orm.tests.testutils import raises_exception
from pony.orm import *
db = Database('sqlite', ':memory:')
class AbstractUser(db.Entity):
username = PrimaryKey(unicode)
class User(AbstractUser):
diagrams = Set('Diagram')
email = Optional(unicode)
class SubUser1(User):
attr1 = Optional(unicode)
class SubUser2(User):
attr2 = Optional(unicode)
class Organization(AbstractUser):
address = Optional(unicode)
class SubOrg1(Organization):
attr3 = Optional(unicode)
class SubOrg2(Organization):
attr4 = Optional(unicode)
class Diagram(db.Entity):
name = Required(unicode)
owner = Required(User)
db.generate_mapping(create_tables=True)
with db_session:
u1 = User(username='user1')
u2 = SubUser1(username='subuser1', attr1='some attr')
u3 = SubUser2(username='subuser2', attr2='some attr')
o1 = Organization(username='org1')
o2 = SubOrg1(username='suborg1', attr3='some attr')
o3 = SubOrg2(username='suborg2', attr4='some attr')
au = AbstractUser(username='abstractUser')
Diagram(name='diagram1', owner=u1)
Diagram(name='diagram2', owner=u2)
Diagram(name='diagram3', owner=u3)
def is_seed(entity, pk):
cache = entity._database_._get_cache()
return pk in [ obj._pk_ for obj in cache.seeds[entity._pk_attrs_] ]
class TestFindInCache(unittest.TestCase):
def setUp(self):
rollback()
db_session.__enter__()
def tearDown(self):
rollback()
db_session.__exit__()
def test1(self):
u = User.get(username='org1')
org = Organization.get(username='org1')
u1 = User.get(username='org1')
self.assertEqual(u, None)
self.assertEqual(org, Organization['org1'])
self.assertEqual(u1, None)
def test_user_1(self):
Diagram.get(lambda d: d.name == 'diagram1')
last_sql = db.last_sql
self.assertTrue(is_seed(User, 'user1'))
u = AbstractUser['user1']
self.assertNotEqual(last_sql, db.last_sql)
self.assertEqual(u.__class__, User)
def test_user_2(self):
Diagram.get(lambda d: d.name == 'diagram1')
last_sql = db.last_sql
self.assertTrue(is_seed(User, 'user1'))
u = User['user1']
self.assertNotEqual(last_sql, db.last_sql)
self.assertEqual(u.__class__, User)
@raises_exception(ObjectNotFound)
def test_user_3(self):
Diagram.get(lambda d: d.name == 'diagram1')
last_sql = db.last_sql
self.assertTrue(is_seed(User, 'user1'))
try:
SubUser1['user1']
finally:
self.assertNotEqual(last_sql, db.last_sql)
@raises_exception(ObjectNotFound)
def test_user_4(self):
Diagram.get(lambda d: d.name == 'diagram1')
last_sql = db.last_sql
self.assertTrue(is_seed(User, 'user1'))
try:
Organization['user1']
finally:
self.assertEqual(last_sql, db.last_sql)
@raises_exception(ObjectNotFound)
def test_user_5(self):
Diagram.get(lambda d: d.name == 'diagram1')
last_sql = db.last_sql
self.assertTrue(is_seed(User, 'user1'))
try:
SubOrg1['user1']
finally:
self.assertEqual(last_sql, db.last_sql)
def test_subuser_1(self):
Diagram.get(lambda d: d.name == 'diagram2')
last_sql = db.last_sql
self.assertTrue(is_seed(User, 'subuser1'))
u = AbstractUser['subuser1']
self.assertNotEqual(last_sql, db.last_sql)
self.assertEqual(u.__class__, SubUser1)
def test_subuser_2(self):
Diagram.get(lambda d: d.name == 'diagram2')
last_sql = db.last_sql
self.assertTrue(is_seed(User, 'subuser1'))
u = User['subuser1']
self.assertNotEqual(last_sql, db.last_sql)
self.assertEqual(u.__class__, SubUser1)
def test_subuser_3(self):
Diagram.get(lambda d: d.name == 'diagram2')
last_sql = db.last_sql
self.assertTrue(is_seed(User, 'subuser1'))
u = SubUser1['subuser1']
self.assertNotEqual(last_sql, db.last_sql)
self.assertEqual(u.__class__, SubUser1)
@raises_exception(ObjectNotFound)
def test_subuser_4(self):
Diagram.get(lambda d: d.name == 'diagram2')
last_sql = db.last_sql
self.assertTrue(is_seed(User, 'subuser1'))
try:
Organization['subuser1']
finally:
self.assertEqual(last_sql, db.last_sql)
@raises_exception(ObjectNotFound)
def test_subuser_5(self):
Diagram.get(lambda d: d.name == 'diagram2')
last_sql = db.last_sql
self.assertTrue(is_seed(User, 'subuser1'))
try:
SubUser2['subuser1']
finally:
self.assertNotEqual(last_sql, db.last_sql)
@raises_exception(ObjectNotFound)
def test_subuser_6(self):
Diagram.get(lambda d: d.name == 'diagram2')
last_sql = db.last_sql
self.assertTrue(is_seed(User, 'subuser1'))
try:
SubOrg2['subuser1']
finally:
self.assertEqual(last_sql, db.last_sql)
def test_user_6(self):
u1 = SubUser1['subuser1']
last_sql = db.last_sql
u2 = SubUser1['subuser1']
self.assertEqual(last_sql, db.last_sql)
self.assertEqual(u1, u2)
def test_user_7(self):
u1 = SubUser1['subuser1']
u1.delete()
last_sql = db.last_sql
u2 = SubUser1.get(username='subuser1')
self.assertEqual(last_sql, db.last_sql)
self.assertEqual(u2, None)
def test_user_8(self):
u1 = SubUser1['subuser1']
last_sql = db.last_sql
u2 = SubUser1.get(username='subuser1', attr1='wrong val')
self.assertEqual(last_sql, db.last_sql)
self.assertEqual(u2, None)
if __name__ == '__main__':
unittest.main() | compiteing/flask-ponypermission | venv/lib/python2.7/site-packages/pony/orm/tests/test_core_find_in_cache.py | Python | mit | 6,105 | 0.00475 |
from datetime import datetime
from google.appengine.ext import ndb
from models import Deletion
from utils import updates
def get_key(slug):
return ndb.Key("Deletion", slug)
def delete_entity(key):
slug = key.string_id()
kind = key.kind()
key.delete()
deletion_key = get_key(slug)
deletion = Deletion(key=deletion_key)
deletion.time_added = datetime.utcnow()
deletion.deletion_key = key
deletion.kind = kind
deletion.put()
updates.set_last_delete_time(deletion.time_added)
| rockwotj/shiloh-ranch | backend/utils/deletions.py | Python | mit | 543 | 0.001842 |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Cloud'
db.create_table(u'cloudslave_cloud', (
('name', self.gf('django.db.models.fields.CharField')(max_length=200, primary_key=True)),
('endpoint', self.gf('django.db.models.fields.URLField')(max_length=200)),
('user_name', self.gf('django.db.models.fields.CharField')(max_length=200)),
('tenant_name', self.gf('django.db.models.fields.CharField')(max_length=200)),
('password', self.gf('django.db.models.fields.CharField')(max_length=200)),
('region', self.gf('django.db.models.fields.CharField')(max_length=200, blank=True)),
('flavor_name', self.gf('django.db.models.fields.CharField')(max_length=200)),
('image_name', self.gf('django.db.models.fields.CharField')(max_length=200)),
('floating_ip_mode', self.gf('django.db.models.fields.SmallIntegerField')(default=0)),
))
db.send_create_signal(u'cloudslave', ['Cloud'])
# Adding model 'KeyPair'
db.create_table(u'cloudslave_keypair', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('cloud', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['cloudslave.Cloud'])),
('name', self.gf('django.db.models.fields.CharField')(max_length=200)),
('private_key', self.gf('django.db.models.fields.TextField')()),
('public_key', self.gf('django.db.models.fields.TextField')()),
))
db.send_create_signal(u'cloudslave', ['KeyPair'])
# Adding unique constraint on 'KeyPair', fields ['cloud', 'name']
db.create_unique(u'cloudslave_keypair', ['cloud_id', 'name'])
# Adding model 'Reservation'
db.create_table(u'cloudslave_reservation', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('cloud', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['cloudslave.Cloud'])),
('number_of_slaves', self.gf('django.db.models.fields.IntegerField')()),
('state', self.gf('django.db.models.fields.SmallIntegerField')(default=0)),
('timeout', self.gf('django.db.models.fields.DateTimeField')()),
))
db.send_create_signal(u'cloudslave', ['Reservation'])
# Adding model 'Slave'
db.create_table(u'cloudslave_slave', (
('name', self.gf('django.db.models.fields.CharField')(max_length=200, primary_key=True)),
('reservation', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['cloudslave.Reservation'])),
('cloud_node_id', self.gf('django.db.models.fields.CharField')(max_length=200)),
('state', self.gf('django.db.models.fields.CharField')(max_length=15, null=True, blank=True)),
))
db.send_create_signal(u'cloudslave', ['Slave'])
def backwards(self, orm):
# Removing unique constraint on 'KeyPair', fields ['cloud', 'name']
db.delete_unique(u'cloudslave_keypair', ['cloud_id', 'name'])
# Deleting model 'Cloud'
db.delete_table(u'cloudslave_cloud')
# Deleting model 'KeyPair'
db.delete_table(u'cloudslave_keypair')
# Deleting model 'Reservation'
db.delete_table(u'cloudslave_reservation')
# Deleting model 'Slave'
db.delete_table(u'cloudslave_slave')
models = {
u'cloudslave.cloud': {
'Meta': {'object_name': 'Cloud'},
'endpoint': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'flavor_name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'floating_ip_mode': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'image_name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'primary_key': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'region': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'tenant_name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'user_name': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'cloudslave.keypair': {
'Meta': {'unique_together': "(('cloud', 'name'),)", 'object_name': 'KeyPair'},
'cloud': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['cloudslave.Cloud']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'private_key': ('django.db.models.fields.TextField', [], {}),
'public_key': ('django.db.models.fields.TextField', [], {})
},
u'cloudslave.reservation': {
'Meta': {'object_name': 'Reservation'},
'cloud': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['cloudslave.Cloud']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'number_of_slaves': ('django.db.models.fields.IntegerField', [], {}),
'state': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'timeout': ('django.db.models.fields.DateTimeField', [], {})
},
u'cloudslave.slave': {
'Meta': {'object_name': 'Slave'},
'cloud_node_id': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'primary_key': 'True'}),
'reservation': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['cloudslave.Reservation']"}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['cloudslave'] | sorenh/python-django-cloudslave | cloudslave/migrations/0001_initial.py | Python | apache-2.0 | 6,224 | 0.007069 |
#!/usr/bin/python2.7
from __future__ import print_function
# -*- coding: utf-8 -*-
import wx
import threading
import lcm
import random
import Forseti
import configurator
BLUE = (24, 25, 141)
GOLD = (241, 169, 50)
class TeamPanel(wx.Panel):
def __init__(self, remote, letter, number, name, colour, *args, **kwargs):
super(TeamPanel, self).__init__(*args, **kwargs)
self.remote = remote
self.InitUI(letter, number, name, colour)
def InitUI(self, letter, number, name, colour=None):
if colour is not None:
self.SetBackgroundColour(colour)
dc = wx.ScreenDC()
self.num_ctrl = wx.TextCtrl(self, size=(dc.GetCharWidth() * 2, dc.GetCharHeight()))
self.num_ctrl.AppendText(str(number))
self.get_button = wx.Button(self, label='Get', size=(dc.GetCharWidth() * 2, dc.GetCharHeight()))
self.get_button.Bind(wx.EVT_BUTTON, self.do_get_name)
self.name_ctrl = wx.TextCtrl(self, size=(dc.GetCharWidth() * 16,
dc.GetCharHeight()))
self.name_ctrl.AppendText(name)
name_num_box = wx.BoxSizer(wx.HORIZONTAL)
name_num_box.Add(wx.StaticText(self, label=letter,
size=(dc.GetCharWidth() * 0.6, dc.GetCharHeight())))
name_num_box.Add(self.num_ctrl)
name_num_box.Add(self.get_button)
name_num_box.Add(self.name_ctrl)
#button_box = wx.BoxSizer(wx.HORIZONTAL)
#button_box.Add(wx.Button(self, label='Reset'))
#button_box.Add(wx.Button(self, label='Configure'))
#button_box.Add(wx.Button(self, label='Disable'))
self.vbox = wx.BoxSizer(wx.VERTICAL)
self.vbox.Add(name_num_box, flag=wx.CENTER)
#vbox.Add(button_box, flag=wx.CENTER)
self.SetSizer(self.vbox)
self.Show(True)
def do_get_name(self, event):
self.name = configurator.get_team_name(self.number)
@property
def name(self):
return self.name_ctrl.GetValue()
@name.setter
def name(self, val):
self.name_ctrl.SetValue(val)
@property
def number(self):
try:
return int(self.num_ctrl.GetValue())
except ValueError:
return 0
@number.setter
def number(self, val):
self.num_ctrl.SetValue(str(val))
class MatchControl(wx.Panel):
def __init__(self, remote, *args, **kwargs):
super(MatchControl, self).__init__(*args, **kwargs)
self.remote = remote
self.InitUI()
def InitUI(self):
vbox = wx.BoxSizer(wx.VERTICAL)
dc = wx.ScreenDC()
match_number = wx.BoxSizer(wx.HORIZONTAL)
match_number.Add(wx.StaticText(self, label='Match #'.format(1)))
self.match_num_ctrl = wx.TextCtrl(self, size=(dc.GetCharWidth() * 2,
dc.GetCharHeight()))
match_number.Add(self.match_num_ctrl)
vbox.Add(match_number, flag=wx.CENTER)
teamSizer = wx.GridSizer(3, 2)
self.team_panels = [
TeamPanel(self.remote, 'A', 0, 'Unknown Team', BLUE, self),
TeamPanel(self.remote, 'C', 0, 'Unknown Team', GOLD, self),
TeamPanel(self.remote, 'B', 0, 'Unknown Team', BLUE, self),
TeamPanel(self.remote, 'D', 0, 'Unknown Team', GOLD, self),
]
teamSizer.AddMany(
[wx.StaticText(self, label='Blue Team'),
wx.StaticText(self, label='Gold Team')] +
[(panel, 0) for panel in self.team_panels])
vbox.Add(teamSizer, flag=wx.CENTER)
buttons = wx.BoxSizer(wx.HORIZONTAL)
self.init_button = wx.Button(self, label='Init')
self.init_button.Bind(wx.EVT_BUTTON, self.do_init)
self.go_button = wx.Button(self, label='GO!')
self.go_button.Bind(wx.EVT_BUTTON, self.do_go)
self.pause_button = wx.Button(self, label='Pause')
self.pause_button.Bind(wx.EVT_BUTTON, self.do_pause)
#self.save_button = wx.Button(self, label='Save')
#self.save_button.Bind(wx.EVT_BUTTON, self.do_save)
self.time_text = wx.StaticText(self, label='0:00')
self.stage_text = wx.StaticText(self, label='Unknown')
self.remote.time_text = self.time_text
#buttons.Add(self.save_button, flag=wx.LEFT)
buttons.Add(self.init_button)
buttons.Add(self.go_button)
buttons.Add(self.pause_button)
buttons.Add(self.time_text)
buttons.Add(self.stage_text)
vbox.Add(buttons, flag=wx.CENTER)
self.SetSizer(vbox)
self.Show(True)
def do_go(self, e):
self.remote.do_go()
def do_pause(self, e):
self.remote.do_pause()
def do_save(self, e):
self.remote.do_save(self.get_match())
def do_init(self, e):
self.remote.do_init(self.get_match())
def _set_match_panel(self, match, team_idx, panel_idx):
match.team_numbers[team_idx] = self.team_panels[panel_idx].number
match.team_names[team_idx] = self.team_panels[panel_idx].name
def _set_panel_match(self, match, team_idx, panel_idx):
self.team_panels[panel_idx].number = match.team_numbers[team_idx]
self.team_panels[panel_idx].name = match.team_names[team_idx]
def get_match(self):
match = Forseti.Match()
self._set_match_panel(match, 0, 0)
self._set_match_panel(match, 1, 2)
self._set_match_panel(match, 2, 1)
self._set_match_panel(match, 3, 3)
try:
match.match_number = int(self.match_num_ctrl.GetValue())
except ValueError:
match.match_number = random.getrandbits(31)
return match
def set_match(self, match):
self._set_panel_match(match, 0, 0)
self._set_panel_match(match, 1, 2)
self._set_panel_match(match, 2, 1)
self._set_panel_match(match, 3, 3)
self.match_num_ctrl.SetValue(str(match.match_number))
def set_time(self, match):
self.time_text.SetLabel(format_time(match.game_time_so_far))
self.stage_text.SetLabel(match.stage_name)
class ScheduleControl(wx.Panel):
def __init__(self, remote, match_control, *args, **kwargs):
self.remote = remote
super(ScheduleControl, self).__init__(*args, **kwargs)
self.InitUI()
self.remote.match_list_box = self.match_list
self.match_control = match_control
def InitUI(self):
self.match_list = wx.ListBox(self)
self.match_list.Bind(wx.EVT_LISTBOX, self.choose_match)
hbox = wx.BoxSizer(wx.HORIZONTAL)
self.load_button = wx.Button(self, label='Load All')
self.load_button.Bind(wx.EVT_BUTTON, self.do_load)
hbox.Add(self.load_button)
self.clear_first = wx.CheckBox(self, label='Clear first')
self.clear_first.SetValue(True)
hbox.Add(self.clear_first)
vbox = wx.BoxSizer(wx.VERTICAL)
vbox.Add(self.match_list, 1, wx.EXPAND)
vbox.Add(hbox)
self.SetSizer(vbox)
self.Show(True)
def do_load(self, e):
self.remote.do_load(self.clear_first.GetValue())
def choose_match(self, event):
self.match_control.set_match(event.GetClientData())
class MainWindow(wx.Frame):
def __init__(self, remote, *args, **kwargs):
super(MainWindow, self).__init__(*args, **kwargs)
self.remote = remote
self.InitUI()
def InitUI(self):
menubar = wx.MenuBar()
fileMenu = wx.Menu()
fitem = fileMenu.Append(wx.ID_EXIT, 'Quit', 'Quit application')
menubar.Append(fileMenu, '&File')
self.SetMenuBar(menubar)
match_control = MatchControl(self.remote, self)
schedule_control = ScheduleControl(self.remote, match_control, self)
self.remote.match_control = match_control
vbox = wx.BoxSizer(wx.VERTICAL)
vbox.Add(match_control, 0, wx.ALIGN_CENTER | wx.ALIGN_TOP, 8)
vbox.Add(schedule_control, 1, wx.EXPAND | wx.ALIGN_CENTER | wx.ALL, 8)
self.Bind(wx.EVT_MENU, self.OnQuit, fitem)
self.SetSize((800, 600))
self.SetSizer(vbox)
self.SetTitle('Forseti Dashboard')
self.Centre()
self.Show(True)
def OnQuit(self, e):
self.Close()
def format_match(match):
print(match.match_number)
print(match.team_names)
print(match.team_numbers)
return '{}: {} ({}) & {} ({}) vs. {} ({}) & {} ({})'.format(
match.match_number,
match.team_names[0], match.team_numbers[0],
match.team_names[1], match.team_numbers[1],
match.team_names[2], match.team_numbers[2],
match.team_names[3], match.team_numbers[3],
)
class Remote(object):
def __init__(self):
self.lc = lcm.LCM('udpm://239.255.76.67:7667?ttl=1')
self.lc.subscribe('Schedule/Schedule', self.handle_schedule)
self.lc.subscribe('Timer/Time', self.handle_time)
self.match_list_box = None
self.match_control = None
self.thread = threading.Thread(target=self._loop)
self.thread.daemon = True
def start(self):
self.thread.start()
def _loop(self):
while True:
try:
self.lc.handle()
except Exception as ex:
print('Got exception while handling lcm message', ex)
def handle_schedule(self, channel, data):
msg = Forseti.Schedule.decode(data)
for i in range(msg.num_matches):
self.match_list_box.Insert(format_match(msg.matches[i]), i,
msg.matches[i])
def handle_time(self, channel, data):
msg = Forseti.Time.decode(data)
#wx.CallAfter(self.time_text.SetLabel, format_time(msg.game_time_so_far))
wx.CallAfter(self.match_control.set_time, msg)
def do_load(self, clear_first):
if clear_first:
self.match_list_box.Clear()
msg = Forseti.ScheduleLoadCommand()
msg.clear_first = clear_first
print('Requesting load')
self.lc.publish('Schedule/Load', msg.encode())
def do_save(self, match):
self.lc.publish('Match/Save', match.encode())
def do_init(self, match):
self.lc.publish('Match/Init', match.encode())
def do_time_ctrl(self, command):
msg = Forseti.TimeControl()
msg.command_name = command
self.lc.publish('Timer/Control', msg.encode())
def do_go(self):
self.do_time_ctrl('start')
def do_pause(self):
self.do_time_ctrl('pause')
def format_time(seconds):
return '{}:{:02}'.format(seconds // 60,
seconds % 60)
def main():
app = wx.App()
remote = Remote()
MainWindow(remote, None)
remote.start()
remote.do_load(False)
app.MainLoop()
if __name__ == '__main__':
main()
| pioneers/forseti | wxdash.py | Python | apache-2.0 | 10,763 | 0.00288 |
#!/bin/python2
"""
#####################################
# iSpike-like joint conversion #
#####################################
"""
class sensNetIn():
#TODO: explain XD
'''
This object gets as input
and returns as output:
'''
from sys import exit
def __init__(self,
dof=0, #FIXME: NOT USED
std=0.5,
neuron_number=10,
min_angle=-90,
max_angle=90,
current_factor=1, #FIXME: unused?!
constant_current=0,
peak_current=40):
import scipy.stats
self._max_angle = max_angle
self._min_angle = min_angle
self._constant_current = constant_current
self.dof = dof #Accessed by what?
self.size = neuron_number #Linearized array?
if self.size < 2:
exit('ERROR: pySpike neuron size is less then 2!')
# Angle covered by each neuron
angle_dist = (max_angle - min_angle) / (self.size - 1)
# Standard deviation expressed in angle
sd_angle = std * angle_dist
# Create normal distribution and calculate current factor
self._normal_distribution = scipy.stats.norm(0, sd_angle)
self._current_factor = peak_current / self._normal_distribution.pdf(0)
# Populate the angles
self._neuron_angles = []
for n in range(self.size):
self._neuron_angles.append(min_angle + n * angle_dist)
self._angle = False
def step(self, input_angle):
'''
Set the value of the current input. Allows getCurrent()
'''
# Check if angle is in range
if input_angle > self._max_angle:
print("ERROR: input angle not in range! (%d is too high)"
% (input_angle))
self._angle = self._max_angle
elif input_angle < self._min_angle:
print("ERROR: input angle not in range! (%d is too low)"
% (input_angle))
self._angle = self._min_angle
else:
self._angle = input_angle
# Set input current to neurons
current_input = []
for i in range(self.size):
current_input.append(
(i
, self._constant_current + self._current_factor
* self._normal_distribution.pdf(
self._neuron_angles[i] - self._angle)
))
return current_input
class sensNetOut():
def __init__(self,
neuron_idx,
min_angle=-90, #The minimum angle to read
max_angle=90, #The maximum angle to read
decay_rate=0.25, #The rate of decay of the angle variables
#FIXME: current_increment UNUSED!?
#Increment of the input current to the neurons by each spike
current_increment=10,
dof=0, #Degree of freedom of joint. FIXME: NOT USED
integration_steps=1 #Step after which integration occurs (1step = 1ms)
):
self.neuron_idx = neuron_idx
neuron_number = len(neuron_idx)
if neuron_number < 2:
exit("FATAL ERROR: pySpike - You need at least 2 output neurons")
# Calculate angle covered by each current variable
angle_dist = (max_angle - min_angle) / (neuron_number - 1)
# Set up current variables
current_variables = [0.0] * neuron_number
# Populate the current variable angles
current_variable_angles = [0.0] * neuron_number
for n in range(neuron_number):
current_variable_angles[n] = min_angle + n * angle_dist
#Set globals
self.current_variables = current_variables
self.current_variable_angles = current_variable_angles
self.decay_rate = decay_rate
self.neuron_number = neuron_number
self.min_angle = min_angle
self.max_angle = max_angle
self.integration_steps = integration_steps - 1 #check at nth, not nth+1
self.missing_steps = integration_steps
self.current_angle = None
def step(self, fired):
#same as iSpike setFiring()
pattern = [1 if n in fired else 0 for n in self.neuron_idx]
self.current_variables =\
[x + y for x, y in zip(pattern, self.current_variables)]
self.missing_steps -= 1
#same as iSpike step()
if not self.missing_steps:
for d in range(0, len(self.current_variables)):
self.current_variables[d] *= self.decay_rate
angle_sum = 0
weighted_sum = 0
for n in range(0, self.neuron_number):
angle_sum += self.current_variables[n]
* self.current_variable_angles[n]
weighted_sum += self.current_variables[n]
new_angle = 0
if weighted_sum:
new_angle = angle_sum / weighted_sum
if new_angle > self.max_angle:
print "ERROR: new angle (%d) > maximum" % (new_angle)
new_angle = self.max_angle
elif new_angle < self.min_angle:
print "ERROR: new angle (%d) < minimum" % (new_angle)
new_angle = self.min_angle
self.current_angle = new_angle
self.missing_steps = self.integration_steps
return self.current_angle
| nico202/pyNeMo | libs/pySpike.py | Python | gpl-2.0 | 5,430 | 0.005893 |
'''author@esilgard'''
#
# Copyright (c) 2015-2016 Fred Hutchinson Cancer Research Center
#
# Licensed under the Apache License, Version 2.0: http://www.apache.org/licenses/LICENSE-2.0
#
import re, os
import global_strings as gb
PATH = os.path.dirname(os.path.realpath(__file__)) + os.path.sep
class SecondaryField(object):
'''
extract the value of a field which is dependant on another value
'''
__version__ = 'SecondaryField1.0'
def __init__(self):
self.field_name = 'Default'
standardization_dictionary = {}
self.return_d = {}
## variable window sizes based on primary field string matches ##
self.pre_window = 0
self.post_window = 0
self.strings1 = r''
self.strings2 = r''
self.patterns = []
def get_version(self):
''' return algorithm version '''
return self.__version__
def get(self, primary_field_dictionary, text):
''' retrieve evidence of a data element based on the location/value of another element '''
## general sets to track and aggregate overall findings for the text
finding_set = set([])
start_stops_set = set([])
## a dictionary of offsets for each string match in primary field dictionary
primary_offsets = primary_field_dictionary[gb.STARTSTOPS]
## loop through primary field matches
for offsets in primary_offsets:
## loop through secondary patterns
for pattern in self.patterns:
## find first match in each pattern in restricted window around primary value
p = re.match(pattern[0], text[offsets[gb.START]-self.pre_window: \
offsets[gb.STOP]+self.post_window].lower(), re.DOTALL)
if p:
## should normalize more when there are clear standards
if p.group(pattern[1]) in self.standardization_dictionary:
finding_set.add(self.standardization_dictionary[p.group(pattern[1])])
else:
finding_set.add(p.group(pattern[1]))
start_stops_set.add((p.start(pattern[1]) + (offsets[gb.START]-30), \
p.end(pattern[1]) + (offsets[gb.START]-30)))
if finding_set:
## initial confidence is set at the primary field's confidence level
confidence = float(primary_field_dictionary[gb.CONFIDENCE])
## multiple contradictory finds lowers confidence
if len(finding_set) > 1:
confidence = confidence * .75
self.return_d = {gb.NAME: self.field_name, \
gb.KEY: primary_field_dictionary[gb.KEY], \
gb.TABLE: primary_field_dictionary[gb.TABLE], \
gb.VERSION: self.get_version(), \
gb.VALUE: ';'.join(finding_set), \
gb.CONFIDENCE: ('%.2f' % confidence), \
gb.STARTSTOPS: [{gb.START: char[0], gb.STOP: char[1]} \
for char in start_stops_set]}
return self.return_d
| LabKey/argos_nlp | fhcrc_pathology/SecondaryField.py | Python | apache-2.0 | 3,282 | 0.012492 |
import bpy
from .utils import MultiCamContext
class MultiCamFadeError(Exception):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return repr(self.msg)
class BlendObj(object):
def __init__(self, **kwargs):
self.children = set()
p = self.parent = kwargs.get('parent')
if p is not None:
kwargs.setdefault('context', p.context)
self.context = kwargs.get('context')
self.blend_obj = kwargs.get('blend_obj')
if hasattr(self.__class__, 'fcurve_property'):
self.fcurve_property = self.__class__.fcurve_property
if not hasattr(self, 'fcurve_property'):
self.fcurve_property = kwargs.get('fcurve_property')
@property
def blend_obj(self):
return getattr(self, '_blend_obj', None)
@blend_obj.setter
def blend_obj(self, value):
old = self.blend_obj
if value == old:
return
self._blend_obj = value
self.on_blend_obj_set(value, old)
def on_blend_obj_set(self, new, old):
self._fcurve = None
@property
def context(self):
context = getattr(self, '_context', None)
if context is None:
context = bpy.context
return context
@context.setter
def context(self, value):
old = getattr(self, '_context', None)
if old == value:
return
self._context = value
self.on_context_set(value, old)
def on_context_set(self, new, old):
self._fcurve = None
for obj in self.children:
obj.context = new
@property
def fcurve(self):
fc = getattr(self, '_fcurve', None)
if fc is None:
fc = self._fcurve = self.get_fcurve()
return fc
def get_fcurve(self):
path = self.blend_obj.path_from_id()
action = self.context.scene.animation_data.action
if action is None:
return None
prop = self.fcurve_property
for fc in action.fcurves.values():
if path not in fc.data_path:
continue
if fc.data_path.split('.')[-1] != prop:
continue
return fc
def remove_fcurve(self):
if self.fcurve is None:
return
action = self.context.scene.animation_data.action
action.fcurves.remove(self.fcurve)
self._fcurve = None
def iter_keyframes(self):
for kf in self.fcurve.keyframe_points.values():
yield kf.co
def insert_keyframe(self, frame, value, prop=None, **kwargs):
if prop is None:
prop = self.fcurve_property
if self.fcurve is None:
self.blend_obj.keyframe_insert(prop, frame=frame)
kf = self.get_keyframe(frame)
kf.co[1] = value
else:
kf = self.fcurve.keyframe_points.insert(frame, value)
for key, val in kwargs.items():
setattr(kf, key, val)
return kf
def get_keyframe(self, frame):
for kf in self.fcurve.keyframe_points.values():
if kf.co[0] == frame:
return kf
def add_child(self, cls, **kwargs):
kwargs.setdefault('parent', self)
obj = cls(**kwargs)
self.children.add(obj)
return obj
def del_child(self, obj):
self.children.discard(obj)
class MultiCam(BlendObj):
fcurve_property = 'multicam_source'
def __init__(self, **kwargs):
super(MultiCam, self).__init__(**kwargs)
self.mc_fader = self.add_child(MultiCamFade)
self.cuts = {}
self.strips = {}
def bake_strips(self):
if not len(self.cuts):
self.build_cuts()
self.build_strip_keyframes()
self.blend_obj.mute = True
def build_cuts(self):
for frame, channel in self.iter_keyframes():
self.cuts[frame] = channel
if channel not in self.strips:
self.get_strip_from_channel(channel)
def build_fade(self, fade=None, frame=None):
if fade is None and frame is not None:
fade = self.mc_fader.build_fade(frame)
if fade is None:
return
for channel in range(1, self.blend_obj.channel):
if channel not in self.strips:
self.get_strip_from_channel(channel)
if channel not in self.strips:
continue
self.strips[channel].build_fade(fade)
def build_fades(self):
self.mc_fader.build_fades()
def build_strip_keyframes(self):
for strip in self.strips.values():
strip.build_keyframes()
def get_strip_from_channel(self, channel):
for s in self.context.scene.sequence_editor.sequences:
if s.channel == channel:
source = self.add_child(MulticamSource, blend_obj=s)
self.strips[channel] = source
return source
class MultiCamFade(BlendObj):
def __init__(self, **kwargs):
self.multicam = kwargs.get('parent', kwargs.get('multicam'))
self.fade_props = {}
self.fades = {}
super(MultiCamFade, self).__init__(**kwargs)
if self.blend_obj is None:
self.blend_obj = self.get_fade_prop_group()
def on_blend_obj_set(self, new, old):
for prop in self.fade_props.values():
self.del_child(prop)
self.fade_props.clear()
self.fades.clear()
if new is None:
return
self.get_fade_props()
def get_fade_prop_group(self):
mc_data_path = self.multicam.blend_obj.path_from_id()
return self.context.scene.multicam_fader_properties.get(mc_data_path)
def get_fade_props(self):
action = self.context.scene.animation_data.action
group_name = 'Multicam Fader (%s)' % (self.multicam.blend_obj.name)
group = action.groups.get(group_name)
for fc in group.channels:
key = fc.data_path.split('.')[-1]
fade_prop = self.add_child(MultiCamFadeProp, fcurve_property=key)
self.fade_props[key] = fade_prop
def build_fade(self, frame):
self.build_fades(frame)
return self.fades.get(frame)
def build_fades(self, fade_frame=None):
prop_iters = {}
for key, prop in self.fade_props.items():
prop_iters[key] = prop.iter_keyframes()
def find_next_fade(frame=None):
prop_vals = {'start':{}, 'end':{}}
start_frame = None
try:
for key, prop in prop_iters.items():
frame, value = next(prop)
if start_frame is None:
start_frame = frame
elif frame != start_frame:
raise MultiCamFadeError('keyframes are not aligned: %s' % ({'frame':frame, 'prop_vals':prop_vals}))
prop_vals['start'][key] = value
except StopIteration:
return None, None, None
end_frame = None
for key, prop in prop_iters.items():
frame, value = next(prop)
if end_frame is None:
end_frame = frame
elif frame != end_frame:
raise MultiCamFadeError('keyframes are not aligned: %s' % ({'frame':frame, 'prop_vals':prop_vals}))
prop_vals['end'][key] = value
return start_frame, end_frame, prop_vals
while True:
need_update = False
start_frame, end_frame, prop_vals = find_next_fade()
if start_frame is None:
break
if fade_frame is not None and fade_frame != start_frame:
continue
d = {
'start_frame':start_frame,
'end_frame':end_frame,
'start_source':prop_vals['start']['start_source'],
'next_source':prop_vals['start']['next_source'],
}
if start_frame not in self.fades:
need_update = True
self.fades[start_frame] = d
else:
for key, val in self.fades[start_frame].items():
if d[key] != val:
need_update = True
self.fades[start_frame][key] = d[key]
if need_update:
self.multicam.build_fade(d)
if fade_frame is not None:
break
class MultiCamFadeProp(BlendObj):
def __init__(self, **kwargs):
super(MultiCamFadeProp, self).__init__(**kwargs)
self.blend_obj = self.parent.blend_obj
class MulticamSource(BlendObj):
fcurve_property = 'blend_alpha'
def __init__(self, **kwargs):
super(MulticamSource, self).__init__(**kwargs)
self.multicam = self.parent
self.mc_fader = self.multicam.mc_fader
self._keyframe_data = None
@property
def keyframe_data(self):
d = self._keyframe_data
if d is None:
d = self._keyframe_data = self.build_keyframe_data()
return d
def build_keyframe_data(self):
d = {}
cuts = self.multicam.cuts
channel = self.blend_obj.channel
is_active = False
is_first_keyframe = True
for frame in sorted(cuts.keys()):
cut = cuts[frame]
if cut == channel:
d[frame] = True
is_active = True
elif is_active:
d[frame] = False
is_active = False
elif is_first_keyframe:
d[frame] = False
is_first_keyframe = False
return d
def build_fade(self, fade):
channel = self.blend_obj.channel
start_frame = fade['start_frame']
end_frame = fade['end_frame']
start_ch = fade['start_source']
end_ch = fade['next_source']
if channel < min([start_ch, end_ch]):
## this strip won't be affected
return
if start_ch == channel:
if end_ch < channel:
values = [1., 0.]
else:
values = [1., 1.]
elif end_ch == channel:
if start_ch < channel:
values = [0., 1.]
else:
values = [1., 1.]
elif channel > max([start_ch, end_ch]) or channel < max([start_ch, end_ch]):
values = [0., 0.]
else:
return
self.insert_keyframe(start_frame, values[0], interpolation='BEZIER')
self.insert_keyframe(end_frame, values[1], interpolation='CONSTANT')
self.insert_keyframe(end_frame+1, 1., interpolation='CONSTANT')
def build_fades(self):
for start_frame in sorted(self.mc_fader.fades.keys()):
fade = self.mc_fader.fades[start_frame]
self.build_fade(fade)
def build_keyframes(self):
self.remove_fcurve()
for frame, is_active in self.keyframe_data.items():
if is_active:
value = 1.
else:
value = 0.
self.insert_keyframe(frame, value, interpolation='CONSTANT')
class MultiCamBakeStrips(bpy.types.Operator, MultiCamContext):
'''Bakes the mulicam source into the affected strips using opacity'''
bl_idname = 'sequencer.bake_multicam_strips'
bl_label = 'Bake Multicam Strips'
def execute(self, context):
mc = MultiCam(blend_obj=self.get_strip(context),
context=context)
mc.bake_strips()
return {'FINISHED'}
def register():
bpy.utils.register_class(MultiCamBakeStrips)
def unregister():
bpy.utils.unregister_class(MultiCamBakeStrips)
| nocarryr/blender-scripts | multicam_tools/multicam.py | Python | gpl-2.0 | 11,734 | 0.006051 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-09-21 11:13
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('submission', '0011_auto_20170921_0937'),
('identifiers', '0003_brokendoi_journal'),
]
operations = [
migrations.RemoveField(
model_name='brokendoi',
name='journal',
),
migrations.AddField(
model_name='brokendoi',
name='article',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to='submission.Article'),
preserve_default=False,
),
]
| BirkbeckCTP/janeway | src/identifiers/migrations/0004_auto_20170921_1113.py | Python | agpl-3.0 | 750 | 0.001333 |
# Copyright (c) 2013 New Dream Network, LLC (DreamHost)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Copyright (C) 2013 Association of Universities for Research in Astronomy
# (AURA)
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# 3. The name of AURA and its representatives may not be used to
# endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED
# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
import email
import email.errors
import imp
import os
import re
import sysconfig
import tempfile
import textwrap
import fixtures
import mock
import pkg_resources
import six
import testtools
from testtools import matchers
import virtualenv
import wheel.install
from pbr import git
from pbr import packaging
from pbr.tests import base
PBR_ROOT = os.path.abspath(os.path.join(__file__, '..', '..', '..'))
class TestRepo(fixtures.Fixture):
"""A git repo for testing with.
Use of TempHomeDir with this fixture is strongly recommended as due to the
lack of config --local in older gits, it will write to the users global
configuration without TempHomeDir.
"""
def __init__(self, basedir):
super(TestRepo, self).__init__()
self._basedir = basedir
def setUp(self):
super(TestRepo, self).setUp()
base._run_cmd(['git', 'init', '.'], self._basedir)
base._config_git()
base._run_cmd(['git', 'add', '.'], self._basedir)
def commit(self, message_content='test commit'):
files = len(os.listdir(self._basedir))
path = self._basedir + '/%d' % files
open(path, 'wt').close()
base._run_cmd(['git', 'add', path], self._basedir)
base._run_cmd(['git', 'commit', '-m', message_content], self._basedir)
def uncommit(self):
base._run_cmd(['git', 'reset', '--hard', 'HEAD^'], self._basedir)
def tag(self, version):
base._run_cmd(
['git', 'tag', '-sm', 'test tag', version], self._basedir)
class GPGKeyFixture(fixtures.Fixture):
"""Creates a GPG key for testing.
It's recommended that this be used in concert with a unique home
directory.
"""
def setUp(self):
super(GPGKeyFixture, self).setUp()
tempdir = self.useFixture(fixtures.TempDir())
gnupg_version_re = re.compile('^gpg\s.*\s([\d+])\.([\d+])\.([\d+])')
gnupg_version = base._run_cmd(['gpg', '--version'], tempdir.path)
for line in gnupg_version[0].split('\n'):
gnupg_version = gnupg_version_re.match(line)
if gnupg_version:
gnupg_version = (int(gnupg_version.group(1)),
int(gnupg_version.group(2)),
int(gnupg_version.group(3)))
break
else:
if gnupg_version is None:
gnupg_version = (0, 0, 0)
config_file = tempdir.path + '/key-config'
f = open(config_file, 'wt')
try:
if gnupg_version[0] == 2 and gnupg_version[1] >= 1:
f.write("""
%no-protection
%transient-key
""")
f.write("""
%no-ask-passphrase
Key-Type: RSA
Name-Real: Example Key
Name-Comment: N/A
Name-Email: example@example.com
Expire-Date: 2d
Preferences: (setpref)
%commit
""")
finally:
f.close()
# Note that --quick-random (--debug-quick-random in GnuPG 2.x)
# does not have a corresponding preferences file setting and
# must be passed explicitly on the command line instead
if gnupg_version[0] == 1:
gnupg_random = '--quick-random'
elif gnupg_version[0] >= 2:
gnupg_random = '--debug-quick-random'
else:
gnupg_random = ''
base._run_cmd(
['gpg', '--gen-key', '--batch', gnupg_random, config_file],
tempdir.path)
class Venv(fixtures.Fixture):
"""Create a virtual environment for testing with.
:attr path: The path to the environment root.
:attr python: The path to the python binary in the environment.
"""
def __init__(self, reason, modules=(), pip_cmd=None):
"""Create a Venv fixture.
:param reason: A human readable string to bake into the venv
file path to aid diagnostics in the case of failures.
:param modules: A list of modules to install, defaults to latest
pip, wheel, and the working copy of PBR.
:attr pip_cmd: A list to override the default pip_cmd passed to
python for installing base packages.
"""
self._reason = reason
if modules == ():
pbr = 'file://%s#egg=pbr' % PBR_ROOT
modules = ['pip', 'wheel', pbr]
self.modules = modules
if pip_cmd is None:
self.pip_cmd = ['-m', 'pip', 'install']
else:
self.pip_cmd = pip_cmd
def _setUp(self):
path = self.useFixture(fixtures.TempDir()).path
virtualenv.create_environment(path, clear=True)
python = os.path.join(path, 'bin', 'python')
command = [python] + self.pip_cmd + ['-U']
if self.modules and len(self.modules) > 0:
command.extend(self.modules)
self.useFixture(base.CapturedSubprocess(
'mkvenv-' + self._reason, command))
self.addCleanup(delattr, self, 'path')
self.addCleanup(delattr, self, 'python')
self.path = path
self.python = python
return path, python
class CreatePackages(fixtures.Fixture):
"""Creates packages from dict with defaults
:param package_dirs: A dict of package name to directory strings
{'pkg_a': '/tmp/path/to/tmp/pkg_a', 'pkg_b': '/tmp/path/to/tmp/pkg_b'}
"""
defaults = {
'setup.py': textwrap.dedent(six.u("""\
#!/usr/bin/env python
import setuptools
setuptools.setup(
setup_requires=['pbr'],
pbr=True,
)
""")),
'setup.cfg': textwrap.dedent(six.u("""\
[metadata]
name = {pkg_name}
"""))
}
def __init__(self, packages):
"""Creates packages from dict with defaults
:param packages: a dict where the keys are the package name and a
value that is a second dict that may be empty, containing keys of
filenames and a string value of the contents.
{'package-a': {'requirements.txt': 'string', 'setup.cfg': 'string'}
"""
self.packages = packages
def _writeFile(self, directory, file_name, contents):
path = os.path.abspath(os.path.join(directory, file_name))
path_dir = os.path.dirname(path)
if not os.path.exists(path_dir):
if path_dir.startswith(directory):
os.makedirs(path_dir)
else:
raise ValueError
with open(path, 'wt') as f:
f.write(contents)
def _setUp(self):
tmpdir = self.useFixture(fixtures.TempDir()).path
package_dirs = {}
for pkg_name in self.packages:
pkg_path = os.path.join(tmpdir, pkg_name)
package_dirs[pkg_name] = pkg_path
os.mkdir(pkg_path)
for cf in ['setup.py', 'setup.cfg']:
if cf in self.packages[pkg_name]:
contents = self.packages[pkg_name].pop(cf)
else:
contents = self.defaults[cf].format(pkg_name=pkg_name)
self._writeFile(pkg_path, cf, contents)
for cf in self.packages[pkg_name]:
self._writeFile(pkg_path, cf, self.packages[pkg_name][cf])
self.useFixture(TestRepo(pkg_path)).commit()
self.addCleanup(delattr, self, 'package_dirs')
self.package_dirs = package_dirs
return package_dirs
class TestPackagingInGitRepoWithCommit(base.BaseTestCase):
scenarios = [
('preversioned', dict(preversioned=True)),
('postversioned', dict(preversioned=False)),
]
def setUp(self):
super(TestPackagingInGitRepoWithCommit, self).setUp()
repo = self.useFixture(TestRepo(self.package_dir))
repo.commit()
def test_authors(self):
self.run_setup('sdist', allow_fail=False)
# One commit, something should be in the authors list
with open(os.path.join(self.package_dir, 'AUTHORS'), 'r') as f:
body = f.read()
self.assertNotEqual(body, '')
def test_changelog(self):
self.run_setup('sdist', allow_fail=False)
with open(os.path.join(self.package_dir, 'ChangeLog'), 'r') as f:
body = f.read()
# One commit, something should be in the ChangeLog list
self.assertNotEqual(body, '')
def test_manifest_exclude_honoured(self):
self.run_setup('sdist', allow_fail=False)
with open(os.path.join(
self.package_dir,
'pbr_testpackage.egg-info/SOURCES.txt'), 'r') as f:
body = f.read()
self.assertThat(
body, matchers.Not(matchers.Contains('pbr_testpackage/extra.py')))
self.assertThat(body, matchers.Contains('pbr_testpackage/__init__.py'))
def test_install_writes_changelog(self):
stdout, _, _ = self.run_setup(
'install', '--root', self.temp_dir + 'installed',
allow_fail=False)
self.expectThat(stdout, matchers.Contains('Generating ChangeLog'))
class TestPackagingInGitRepoWithoutCommit(base.BaseTestCase):
def setUp(self):
super(TestPackagingInGitRepoWithoutCommit, self).setUp()
self.useFixture(TestRepo(self.package_dir))
self.run_setup('sdist', allow_fail=False)
def test_authors(self):
# No commits, no authors in list
with open(os.path.join(self.package_dir, 'AUTHORS'), 'r') as f:
body = f.read()
self.assertEqual(body, '\n')
def test_changelog(self):
# No commits, nothing should be in the ChangeLog list
with open(os.path.join(self.package_dir, 'ChangeLog'), 'r') as f:
body = f.read()
self.assertEqual(body, 'CHANGES\n=======\n\n')
class TestPackagingWheels(base.BaseTestCase):
def setUp(self):
super(TestPackagingWheels, self).setUp()
self.useFixture(TestRepo(self.package_dir))
# Build the wheel
self.run_setup('bdist_wheel', allow_fail=False)
# Slowly construct the path to the generated whl
dist_dir = os.path.join(self.package_dir, 'dist')
relative_wheel_filename = os.listdir(dist_dir)[0]
absolute_wheel_filename = os.path.join(
dist_dir, relative_wheel_filename)
wheel_file = wheel.install.WheelFile(absolute_wheel_filename)
wheel_name = wheel_file.parsed_filename.group('namever')
# Create a directory path to unpack the wheel to
self.extracted_wheel_dir = os.path.join(dist_dir, wheel_name)
# Extract the wheel contents to the directory we just created
wheel_file.zipfile.extractall(self.extracted_wheel_dir)
wheel_file.zipfile.close()
def test_data_directory_has_wsgi_scripts(self):
# Build the path to the scripts directory
scripts_dir = os.path.join(
self.extracted_wheel_dir, 'pbr_testpackage-0.0.data/scripts')
self.assertTrue(os.path.exists(scripts_dir))
scripts = os.listdir(scripts_dir)
self.assertIn('pbr_test_wsgi', scripts)
self.assertIn('pbr_test_wsgi_with_class', scripts)
self.assertNotIn('pbr_test_cmd', scripts)
self.assertNotIn('pbr_test_cmd_with_class', scripts)
def test_generates_c_extensions(self):
built_package_dir = os.path.join(
self.extracted_wheel_dir, 'pbr_testpackage')
static_object_filename = 'testext.so'
soabi = get_soabi()
if soabi:
static_object_filename = 'testext.{0}.so'.format(soabi)
static_object_path = os.path.join(
built_package_dir, static_object_filename)
self.assertTrue(os.path.exists(built_package_dir))
self.assertTrue(os.path.exists(static_object_path))
class TestPackagingHelpers(testtools.TestCase):
def test_generate_script(self):
group = 'console_scripts'
entry_point = pkg_resources.EntryPoint(
name='test-ep',
module_name='pbr.packaging',
attrs=('LocalInstallScripts',))
header = '#!/usr/bin/env fake-header\n'
template = ('%(group)s %(module_name)s %(import_target)s '
'%(invoke_target)s')
generated_script = packaging.generate_script(
group, entry_point, header, template)
expected_script = (
'#!/usr/bin/env fake-header\nconsole_scripts pbr.packaging '
'LocalInstallScripts LocalInstallScripts'
)
self.assertEqual(expected_script, generated_script)
def test_generate_script_validates_expectations(self):
group = 'console_scripts'
entry_point = pkg_resources.EntryPoint(
name='test-ep',
module_name='pbr.packaging')
header = '#!/usr/bin/env fake-header\n'
template = ('%(group)s %(module_name)s %(import_target)s '
'%(invoke_target)s')
self.assertRaises(
ValueError, packaging.generate_script, group, entry_point, header,
template)
entry_point = pkg_resources.EntryPoint(
name='test-ep',
module_name='pbr.packaging',
attrs=('attr1', 'attr2', 'attr3'))
self.assertRaises(
ValueError, packaging.generate_script, group, entry_point, header,
template)
class TestPackagingInPlainDirectory(base.BaseTestCase):
def setUp(self):
super(TestPackagingInPlainDirectory, self).setUp()
def test_authors(self):
self.run_setup('sdist', allow_fail=False)
# Not a git repo, no AUTHORS file created
filename = os.path.join(self.package_dir, 'AUTHORS')
self.assertFalse(os.path.exists(filename))
def test_changelog(self):
self.run_setup('sdist', allow_fail=False)
# Not a git repo, no ChangeLog created
filename = os.path.join(self.package_dir, 'ChangeLog')
self.assertFalse(os.path.exists(filename))
def test_install_no_ChangeLog(self):
stdout, _, _ = self.run_setup(
'install', '--root', self.temp_dir + 'installed',
allow_fail=False)
self.expectThat(
stdout, matchers.Not(matchers.Contains('Generating ChangeLog')))
class TestPresenceOfGit(base.BaseTestCase):
def testGitIsInstalled(self):
with mock.patch.object(git,
'_run_shell_command') as _command:
_command.return_value = 'git version 1.8.4.1'
self.assertEqual(True, git._git_is_installed())
def testGitIsNotInstalled(self):
with mock.patch.object(git,
'_run_shell_command') as _command:
_command.side_effect = OSError
self.assertEqual(False, git._git_is_installed())
class TestNestedRequirements(base.BaseTestCase):
def test_nested_requirement(self):
tempdir = tempfile.mkdtemp()
requirements = os.path.join(tempdir, 'requirements.txt')
nested = os.path.join(tempdir, 'nested.txt')
with open(requirements, 'w') as f:
f.write('-r ' + nested)
with open(nested, 'w') as f:
f.write('pbr')
result = packaging.parse_requirements([requirements])
self.assertEqual(result, ['pbr'])
class TestVersions(base.BaseTestCase):
scenarios = [
('preversioned', dict(preversioned=True)),
('postversioned', dict(preversioned=False)),
]
def setUp(self):
super(TestVersions, self).setUp()
self.repo = self.useFixture(TestRepo(self.package_dir))
self.useFixture(GPGKeyFixture())
self.useFixture(base.DiveDir(self.package_dir))
def test_email_parsing_errors_are_handled(self):
mocked_open = mock.mock_open()
with mock.patch('pbr.packaging.open', mocked_open):
with mock.patch('email.message_from_file') as message_from_file:
message_from_file.side_effect = [
email.errors.MessageError('Test'),
{'Name': 'pbr_testpackage'}]
version = packaging._get_version_from_pkg_metadata(
'pbr_testpackage')
self.assertTrue(message_from_file.called)
self.assertIsNone(version)
def test_capitalized_headers(self):
self.repo.commit()
self.repo.tag('1.2.3')
self.repo.commit('Sem-Ver: api-break')
version = packaging._get_version_from_git()
self.assertThat(version, matchers.StartsWith('2.0.0.dev1'))
def test_capitalized_headers_partial(self):
self.repo.commit()
self.repo.tag('1.2.3')
self.repo.commit('Sem-ver: api-break')
version = packaging._get_version_from_git()
self.assertThat(version, matchers.StartsWith('2.0.0.dev1'))
def test_tagged_version_has_tag_version(self):
self.repo.commit()
self.repo.tag('1.2.3')
version = packaging._get_version_from_git('1.2.3')
self.assertEqual('1.2.3', version)
def test_non_canonical_tagged_version_bump(self):
self.repo.commit()
self.repo.tag('1.4')
self.repo.commit('Sem-Ver: api-break')
version = packaging._get_version_from_git()
self.assertThat(version, matchers.StartsWith('2.0.0.dev1'))
def test_untagged_version_has_dev_version_postversion(self):
self.repo.commit()
self.repo.tag('1.2.3')
self.repo.commit()
version = packaging._get_version_from_git()
self.assertThat(version, matchers.StartsWith('1.2.4.dev1'))
def test_untagged_pre_release_has_pre_dev_version_postversion(self):
self.repo.commit()
self.repo.tag('1.2.3.0a1')
self.repo.commit()
version = packaging._get_version_from_git()
self.assertThat(version, matchers.StartsWith('1.2.3.0a2.dev1'))
def test_untagged_version_minor_bump(self):
self.repo.commit()
self.repo.tag('1.2.3')
self.repo.commit('sem-ver: deprecation')
version = packaging._get_version_from_git()
self.assertThat(version, matchers.StartsWith('1.3.0.dev1'))
def test_untagged_version_major_bump(self):
self.repo.commit()
self.repo.tag('1.2.3')
self.repo.commit('sem-ver: api-break')
version = packaging._get_version_from_git()
self.assertThat(version, matchers.StartsWith('2.0.0.dev1'))
def test_untagged_version_has_dev_version_preversion(self):
self.repo.commit()
self.repo.tag('1.2.3')
self.repo.commit()
version = packaging._get_version_from_git('1.2.5')
self.assertThat(version, matchers.StartsWith('1.2.5.dev1'))
def test_untagged_version_after_pre_has_dev_version_preversion(self):
self.repo.commit()
self.repo.tag('1.2.3.0a1')
self.repo.commit()
version = packaging._get_version_from_git('1.2.5')
self.assertThat(version, matchers.StartsWith('1.2.5.dev1'))
def test_untagged_version_after_rc_has_dev_version_preversion(self):
self.repo.commit()
self.repo.tag('1.2.3.0a1')
self.repo.commit()
version = packaging._get_version_from_git('1.2.3')
self.assertThat(version, matchers.StartsWith('1.2.3.0a2.dev1'))
def test_preversion_too_low_simple(self):
# That is, the target version is either already released or not high
# enough for the semver requirements given api breaks etc.
self.repo.commit()
self.repo.tag('1.2.3')
self.repo.commit()
# Note that we can't target 1.2.3 anymore - with 1.2.3 released we
# need to be working on 1.2.4.
err = self.assertRaises(
ValueError, packaging._get_version_from_git, '1.2.3')
self.assertThat(err.args[0], matchers.StartsWith('git history'))
def test_preversion_too_low_semver_headers(self):
# That is, the target version is either already released or not high
# enough for the semver requirements given api breaks etc.
self.repo.commit()
self.repo.tag('1.2.3')
self.repo.commit('sem-ver: feature')
# Note that we can't target 1.2.4, the feature header means we need
# to be working on 1.3.0 or above.
err = self.assertRaises(
ValueError, packaging._get_version_from_git, '1.2.4')
self.assertThat(err.args[0], matchers.StartsWith('git history'))
def test_get_kwargs_corner_cases(self):
# No tags:
git_dir = self.repo._basedir + '/.git'
get_kwargs = lambda tag: packaging._get_increment_kwargs(git_dir, tag)
def _check_combinations(tag):
self.repo.commit()
self.assertEqual(dict(), get_kwargs(tag))
self.repo.commit('sem-ver: bugfix')
self.assertEqual(dict(), get_kwargs(tag))
self.repo.commit('sem-ver: feature')
self.assertEqual(dict(minor=True), get_kwargs(tag))
self.repo.uncommit()
self.repo.commit('sem-ver: deprecation')
self.assertEqual(dict(minor=True), get_kwargs(tag))
self.repo.uncommit()
self.repo.commit('sem-ver: api-break')
self.assertEqual(dict(major=True), get_kwargs(tag))
self.repo.commit('sem-ver: deprecation')
self.assertEqual(dict(major=True, minor=True), get_kwargs(tag))
_check_combinations('')
self.repo.tag('1.2.3')
_check_combinations('1.2.3')
def test_invalid_tag_ignored(self):
# Fix for bug 1356784 - we treated any tag as a version, not just those
# that are valid versions.
self.repo.commit()
self.repo.tag('1')
self.repo.commit()
# when the tree is tagged and its wrong:
self.repo.tag('badver')
version = packaging._get_version_from_git()
self.assertThat(version, matchers.StartsWith('1.0.1.dev1'))
# When the tree isn't tagged, we also fall through.
self.repo.commit()
version = packaging._get_version_from_git()
self.assertThat(version, matchers.StartsWith('1.0.1.dev2'))
# We don't fall through x.y versions
self.repo.commit()
self.repo.tag('1.2')
self.repo.commit()
self.repo.tag('badver2')
version = packaging._get_version_from_git()
self.assertThat(version, matchers.StartsWith('1.2.1.dev1'))
# Or x.y.z versions
self.repo.commit()
self.repo.tag('1.2.3')
self.repo.commit()
self.repo.tag('badver3')
version = packaging._get_version_from_git()
self.assertThat(version, matchers.StartsWith('1.2.4.dev1'))
# Or alpha/beta/pre versions
self.repo.commit()
self.repo.tag('1.2.4.0a1')
self.repo.commit()
self.repo.tag('badver4')
version = packaging._get_version_from_git()
self.assertThat(version, matchers.StartsWith('1.2.4.0a2.dev1'))
# Non-release related tags are ignored.
self.repo.commit()
self.repo.tag('2')
self.repo.commit()
self.repo.tag('non-release-tag/2014.12.16-1')
version = packaging._get_version_from_git()
self.assertThat(version, matchers.StartsWith('2.0.1.dev1'))
def test_valid_tag_honoured(self):
# Fix for bug 1370608 - we converted any target into a 'dev version'
# even if there was a distance of 0 - indicating that we were on the
# tag itself.
self.repo.commit()
self.repo.tag('1.3.0.0a1')
version = packaging._get_version_from_git()
self.assertEqual('1.3.0.0a1', version)
def test_skip_write_git_changelog(self):
# Fix for bug 1467440
self.repo.commit()
self.repo.tag('1.2.3')
os.environ['SKIP_WRITE_GIT_CHANGELOG'] = '1'
version = packaging._get_version_from_git('1.2.3')
self.assertEqual('1.2.3', version)
def tearDown(self):
super(TestVersions, self).tearDown()
os.environ.pop('SKIP_WRITE_GIT_CHANGELOG', None)
class TestRequirementParsing(base.BaseTestCase):
def test_requirement_parsing(self):
pkgs = {
'test_reqparse':
{
'requirements.txt': textwrap.dedent("""\
bar
quux<1.0; python_version=='2.6'
requests-aws>=0.1.4 # BSD License (3 clause)
Routes>=1.12.3,!=2.0,!=2.1;python_version=='2.7'
requests-kerberos>=0.6;python_version=='2.7' # MIT
"""),
'setup.cfg': textwrap.dedent("""\
[metadata]
name = test_reqparse
[extras]
test =
foo
baz>3.2 :python_version=='2.7' # MIT
bar>3.3 :python_version=='2.7' # MIT # Apache
""")},
}
pkg_dirs = self.useFixture(CreatePackages(pkgs)).package_dirs
pkg_dir = pkg_dirs['test_reqparse']
# pkg_resources.split_sections uses None as the title of an
# anonymous section instead of the empty string. Weird.
expected_requirements = {
None: ['bar', 'requests-aws>=0.1.4'],
":(python_version=='2.6')": ['quux<1.0'],
":(python_version=='2.7')": ['Routes>=1.12.3,!=2.0,!=2.1',
'requests-kerberos>=0.6'],
'test': ['foo'],
"test:(python_version=='2.7')": ['baz>3.2', 'bar>3.3']
}
venv = self.useFixture(Venv('reqParse'))
bin_python = venv.python
# Two things are tested by this
# 1) pbr properly parses markers from requiremnts.txt and setup.cfg
# 2) bdist_wheel causes pbr to not evaluate markers
self._run_cmd(bin_python, ('setup.py', 'bdist_wheel'),
allow_fail=False, cwd=pkg_dir)
egg_info = os.path.join(pkg_dir, 'test_reqparse.egg-info')
requires_txt = os.path.join(egg_info, 'requires.txt')
with open(requires_txt, 'rt') as requires:
generated_requirements = dict(
pkg_resources.split_sections(requires))
self.assertEqual(expected_requirements, generated_requirements)
def get_soabi():
try:
return sysconfig.get_config_var('SOABI')
except IOError:
pass
if 'pypy' in sysconfig.get_scheme_names():
# NOTE(sigmavirus24): PyPy only added support for the SOABI config var
# to sysconfig in 2015. That was well after 2.2.1 was published in the
# Ubuntu 14.04 archive.
for suffix, _, _ in imp.get_suffixes():
if suffix.startswith('.pypy') and suffix.endswith('.so'):
return suffix.split('.')[1]
return None
| klmitch/pbr | pbr/tests/test_packaging.py | Python | apache-2.0 | 28,790 | 0.000278 |
from __future__ import absolute_import
import logging
import requests
from six.moves.urllib.parse import quote
from sentry import options
logger = logging.getLogger(__name__)
def sms_available():
return bool(options.get('sms.twilio-account'))
def send_sms(body, to, from_=None):
account = options.get('sms.twilio-account')
if not account:
raise RuntimeError('SMS backend is not configured.')
if account[:2] != 'AC':
account = 'AC' + account
url = 'https://api.twilio.com/2010-04-01/Accounts/%s/Messages.json' % \
quote(account)
rv = requests.post(url, auth=(account,
options.get('sms.twilio-token')), data={
'To': to,
'From': options.get('sms.twilio-number'),
'Body': body,
})
if not rv.ok:
logging.exception('Failed to send text message to %s: (%s) %s', to,
rv.status_code, rv.content)
return False
return True
| JamesMura/sentry | src/sentry/utils/sms.py | Python | bsd-3-clause | 976 | 0.001025 |
# Foremast - Pipeline Tooling
#
# Copyright 2018 Gogo, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .api_gateway_event import *
| gogoair/foremast | src/foremast/awslambda/api_gateway_event/__init__.py | Python | apache-2.0 | 661 | 0 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-08-07 13:08
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('main', '0009_remove_item_last_modified'),
]
operations = [
migrations.AddField(
model_name='item',
name='last_modified',
field=models.DateTimeField(auto_now=True, default=datetime.datetime(2016, 8, 7, 13, 8, 5, 518538, tzinfo=utc)),
preserve_default=False,
),
migrations.AddField(
model_name='item',
name='status',
field=models.BooleanField(default=True),
),
]
| detialiaj/pro | SYS/main/migrations/0010_auto_20160807_1508.py | Python | mit | 771 | 0.001297 |
"""Test the Dyson air quality component."""
import json
from unittest import mock
import asynctest
from libpurecool.dyson_pure_cool import DysonPureCool
from libpurecool.dyson_pure_state_v2 import DysonEnvironmentalSensorV2State
import homeassistant.components.dyson.air_quality as dyson
from homeassistant.components import dyson as dyson_parent
from homeassistant.components.air_quality import DOMAIN as AIQ_DOMAIN, \
ATTR_PM_2_5, ATTR_PM_10, ATTR_NO2
from homeassistant.helpers import discovery
from homeassistant.setup import async_setup_component
def _get_dyson_purecool_device():
"""Return a valid device as provided by the Dyson web services."""
device = mock.Mock(spec=DysonPureCool)
device.serial = 'XX-XXXXX-XX'
device.name = 'Living room'
device.connect = mock.Mock(return_value=True)
device.auto_connect = mock.Mock(return_value=True)
device.environmental_state.particulate_matter_25 = '0014'
device.environmental_state.particulate_matter_10 = '0025'
device.environmental_state.nitrogen_dioxide = '0042'
device.environmental_state.volatile_organic_compounds = '0035'
return device
def _get_config():
"""Return a config dictionary."""
return {dyson_parent.DOMAIN: {
dyson_parent.CONF_USERNAME: 'email',
dyson_parent.CONF_PASSWORD: 'password',
dyson_parent.CONF_LANGUAGE: 'GB',
dyson_parent.CONF_DEVICES: [
{
'device_id': 'XX-XXXXX-XX',
'device_ip': '192.168.0.1'
}
]
}}
@asynctest.patch('libpurecool.dyson.DysonAccount.login', return_value=True)
@asynctest.patch('libpurecool.dyson.DysonAccount.devices',
return_value=[_get_dyson_purecool_device()])
async def test_purecool_aiq_attributes(devices, login, hass):
"""Test state attributes."""
await async_setup_component(hass, dyson_parent.DOMAIN, _get_config())
await hass.async_block_till_done()
fan_state = hass.states.get("air_quality.living_room")
attributes = fan_state.attributes
assert fan_state.state == '14'
assert attributes[ATTR_PM_2_5] == 14
assert attributes[ATTR_PM_10] == 25
assert attributes[ATTR_NO2] == 42
assert attributes[dyson.ATTR_VOC] == 35
@asynctest.patch('libpurecool.dyson.DysonAccount.login', return_value=True)
@asynctest.patch('libpurecool.dyson.DysonAccount.devices',
return_value=[_get_dyson_purecool_device()])
async def test_purecool_aiq_update_state(devices, login, hass):
"""Test state update."""
device = devices.return_value[0]
await async_setup_component(hass, dyson_parent.DOMAIN, _get_config())
await hass.async_block_till_done()
event = {
"msg": "ENVIRONMENTAL-CURRENT-SENSOR-DATA",
"time": "2019-03-29T10:00:01.000Z",
"data": {
"pm10": "0080",
"p10r": "0151",
"hact": "0040",
"va10": "0055",
"p25r": "0161",
"noxl": "0069",
"pm25": "0035",
"sltm": "OFF",
"tact": "2960"
}
}
device.environmental_state = \
DysonEnvironmentalSensorV2State(json.dumps(event))
for call in device.add_message_listener.call_args_list:
callback = call[0][0]
if type(callback.__self__) == dyson.DysonAirSensor:
callback(device.environmental_state)
await hass.async_block_till_done()
fan_state = hass.states.get("air_quality.living_room")
attributes = fan_state.attributes
assert fan_state.state == '35'
assert attributes[ATTR_PM_2_5] == 35
assert attributes[ATTR_PM_10] == 80
assert attributes[ATTR_NO2] == 69
assert attributes[dyson.ATTR_VOC] == 55
@asynctest.patch('libpurecool.dyson.DysonAccount.login', return_value=True)
@asynctest.patch('libpurecool.dyson.DysonAccount.devices',
return_value=[_get_dyson_purecool_device()])
async def test_purecool_component_setup_only_once(devices, login, hass):
"""Test if entities are created only once."""
config = _get_config()
await async_setup_component(hass, dyson_parent.DOMAIN, config)
await hass.async_block_till_done()
discovery.load_platform(hass, AIQ_DOMAIN,
dyson_parent.DOMAIN, {}, config)
await hass.async_block_till_done()
assert len(hass.data[dyson.DYSON_AIQ_DEVICES]) == 1
@asynctest.patch('libpurecool.dyson.DysonAccount.login', return_value=True)
@asynctest.patch('libpurecool.dyson.DysonAccount.devices',
return_value=[_get_dyson_purecool_device()])
async def test_purecool_aiq_without_discovery(devices, login, hass):
"""Test if component correctly returns if discovery not set."""
await async_setup_component(hass, dyson_parent.DOMAIN, _get_config())
await hass.async_block_till_done()
add_entities_mock = mock.MagicMock()
dyson.setup_platform(hass, None, add_entities_mock, None)
assert add_entities_mock.call_count == 0
@asynctest.patch('libpurecool.dyson.DysonAccount.login', return_value=True)
@asynctest.patch('libpurecool.dyson.DysonAccount.devices',
return_value=[_get_dyson_purecool_device()])
async def test_purecool_aiq_empty_environment_state(devices, login, hass):
"""Test device with empty environmental state."""
await async_setup_component(hass, dyson_parent.DOMAIN, _get_config())
await hass.async_block_till_done()
device = hass.data[dyson.DYSON_AIQ_DEVICES][0]
device._device.environmental_state = None
assert device.state is None
assert device.particulate_matter_2_5 is None
assert device.particulate_matter_10 is None
assert device.nitrogen_dioxide is None
assert device.volatile_organic_compounds is None
| DavidLP/home-assistant | tests/components/dyson/test_air_quality.py | Python | apache-2.0 | 5,733 | 0 |
"""
Copyright 2017-present Airbnb, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import argparse
import os
import jmespath
from mock import patch, MagicMock
from streamalert.alert_processor import main as alert_processor
from streamalert.alert_processor.helpers import compose_alert
from streamalert.alert_processor.outputs.output_base import OutputDispatcher
from streamalert.classifier import classifier
from streamalert.rules_engine import rules_engine
from streamalert.shared import rule
from streamalert.shared.config import ConfigError
from streamalert.shared.logger import get_logger
from streamalert.shared.stats import RuleStatisticTracker
from streamalert_cli.helpers import check_credentials
from streamalert_cli.test.format import format_green, format_red, format_underline, format_yellow
from streamalert_cli.test.mocks import LookupTableMocks, ThreatIntelMocks
from streamalert_cli.test.event_file import TestEventFile
from streamalert_cli.utils import (
CLICommand,
DirectoryType,
generate_subparser,
UniqueSortedFileListAction,
UniqueSortedListAction,
)
LOGGER = get_logger(__name__)
class TestCommand(CLICommand):
description = 'Perform various integration/functional tests'
@classmethod
def setup_subparser(cls, subparser):
"""Add the test subparser: manage.py test"""
test_subparsers = subparser.add_subparsers(dest='test subcommand', required=True)
cls._setup_test_classifier_subparser(test_subparsers)
cls._setup_test_rules_subparser(test_subparsers)
cls._setup_test_live_subparser(test_subparsers)
@classmethod
def _setup_test_classifier_subparser(cls, subparsers):
"""Add the test validation subparser: manage.py test classifier [options]"""
test_validate_parser = generate_subparser(
subparsers,
'classifier',
description='Validate defined log schemas using integration test files',
subcommand=True
)
cls._add_default_test_args(test_validate_parser)
@classmethod
def _setup_test_rules_subparser(cls, subparsers):
"""Add the test rules subparser: manage.py test rules [options]"""
test_rules_parser = generate_subparser(
subparsers,
'rules',
description='Test rules using integration test files',
subcommand=True
)
# Flag to run additional stats during testing
test_rules_parser.add_argument(
'-s',
'--stats',
action='store_true',
help='Enable outputing of statistical information on rules that run'
)
# Validate the provided repitition value
def _validate_repitition(val):
"""Make sure the input is between 1 and 1000"""
err = ('Invalid repitition value [{}]. Must be an integer between 1 '
'and 1000').format(val)
try:
count = int(val)
except TypeError:
raise test_rules_parser.error(err)
if not 1 <= count <= 1000:
raise test_rules_parser.error(err)
return count
# flag to run these tests a given number of times
test_rules_parser.add_argument(
'-n',
'--repeat',
default=1,
type=_validate_repitition,
help='Number of times to repeat the tests, to be used as a form performance testing'
)
cls._add_default_test_args(test_rules_parser)
@classmethod
def _setup_test_live_subparser(cls, subparsers):
"""Add the test live subparser: manage.py test live [options]"""
test_live_parser = generate_subparser(
subparsers,
'live',
description=(
'Run end-to-end tests that will attempt to send alerts to each rule\'s outputs'
),
subcommand=True
)
cls._add_default_test_args(test_live_parser)
@staticmethod
def _add_default_test_args(test_parser):
"""Add the default arguments to the test parsers"""
test_filter_group = test_parser.add_mutually_exclusive_group(required=False)
# add the optional ability to test specific files
test_filter_group.add_argument(
'-f',
'--test-files',
dest='files',
nargs='+',
help='Full path to one or more file(s) to test, separated by spaces',
action=UniqueSortedFileListAction,
type=argparse.FileType('r'),
default=[]
)
# add the optional ability to test specific rules
test_filter_group.add_argument(
'-r',
'--test-rules',
dest='rules',
nargs='+',
help='One or more rule to test, separated by spaces',
action=UniqueSortedListAction,
default=[]
)
# add the ability to specify rule directories to test
test_parser.add_argument(
'-d',
'--rules-dir',
help='Path to one or more directory containing rules, separated by spaces',
nargs='+',
action=UniqueSortedListAction,
type=DirectoryType(),
default=['rules']
)
# Add the optional ability to log verbosely or use quite logging for tests
verbose_group = test_parser.add_mutually_exclusive_group(required=False)
verbose_group.add_argument(
'-v',
'--verbose',
action='store_true',
help='Output additional information during testing'
)
verbose_group.add_argument(
'-q',
'--quiet',
action='store_true',
help='Suppress output for passing tests, only logging if there is a failure'
)
@classmethod
def handler(cls, options, config):
"""Handler for starting the test framework
Args:
options (argparse.Namespace): Parsed arguments
config (CLIConfig): Loaded StreamAlert config
Returns:
bool: False if errors occurred, True otherwise
"""
result = True
opts = vars(options)
repeat = opts.get('repeat', 1)
for i in range(repeat):
if repeat != 1:
print('\nRepetition #', i+1)
result = result and TestRunner(options, config).run()
if opts.get('stats'):
print(RuleStatisticTracker.statistics_info())
return result
class TestRunner:
"""TestRunner to handle running various tests"""
class Types:
"""Simple types enum for test types"""
CLASSIFY = 'classifier'
RULES = 'rules'
LIVE = 'live'
def __init__(self, options, config):
self._config = config
self._options = options
self._type = options.subcommand
self._files_filter = options.files
self._rules = options.rules
self._rules_dirs = options.rules_dir
self._rules_engine = self._setup_rules_engine(options.rules_dir)
self._verbose = options.verbose
self._quiet = options.quiet
self._s3_mocker = patch('streamalert.classifier.payload.s3.boto3.resource').start()
self._tested_rules = set()
self._passed = 0
self._failed = 0
prefix = self._config['global']['account']['prefix']
env = {
'STREAMALERT_PREFIX': prefix,
'AWS_ACCOUNT_ID': self._config['global']['account']['aws_account_id'],
'ALERTS_TABLE': '{}_streamalert_alerts'.format(prefix),
}
if 'stats' in options and options.stats:
env['STREAMALERT_TRACK_RULE_STATS'] = '1'
patch.dict(os.environ, env).start()
@staticmethod
def _run_classification(record):
"""Create a fresh classifier and classify the record, returning the result"""
with patch.object(classifier, 'SQSClient'), patch.object(classifier, 'FirehoseClient'):
_classifier = classifier.Classifier()
return _classifier.run(records=[record])
@staticmethod
@patch.object(rules_engine, 'AlertForwarder')
@patch('rules.helpers.base.random_bool', return_value=True)
@patch.object(rules_engine.RulesEngine, '_load_rule_table', return_value=None)
def _setup_rules_engine(dirs, *_):
"""Create a fresh rules engine and process the record, returning the result"""
return rules_engine.RulesEngine(*dirs)
def _run_rules_engine(self, record):
"""Create a fresh rules engine and process the record, returning the result"""
with patch.object(rules_engine.ThreatIntel, '_query') as ti_mock:
ti_mock.side_effect = ThreatIntelMocks.get_mock_values
# pylint: disable=protected-access
self._rules_engine._lookup_tables._tables.clear()
for table in LookupTableMocks.get_mock_values():
self._rules_engine._lookup_tables._tables[table.table_name] = table
return self._rules_engine.run(records=record)
@staticmethod
def _run_alerting(record):
"""Create a fresh alerts processor and send the alert(s), returning the result"""
with patch.object(alert_processor, 'AlertTable'):
alert_proc = alert_processor.AlertProcessor()
return alert_proc.run(event=record.dynamo_record())
def _check_prereqs(self):
if self._type == self.Types.LIVE:
return check_credentials()
return True
def _finalize(self):
summary = [
format_underline('\nSummary:\n'),
'Total Tests: {}'.format(self._passed + self._failed),
format_green('Pass: {}'.format(self._passed)) if self._passed else 'Pass: 0',
format_red('Fail: {}\n'.format(self._failed)) if self._failed else 'Fail: 0\n',
]
print('\n'.join(summary))
# If rule are being tested and no filtering is being performed, log any untested rules
if self._testing_rules and not self._is_filtered:
all_rules = set(rule.Rule.rule_names()) - rule.Rule.disabled_rules()
untested_rules = sorted(all_rules.difference(self._tested_rules))
if not untested_rules:
return
print(format_yellow('No test events configured for the following rules:'))
for rule_name in untested_rules:
print(format_yellow(rule_name))
@property
def _is_filtered(self):
return bool(self._files_filter or self._rules)
@property
def _testing_rules(self):
return self._type in {self.Types.RULES, self.Types.LIVE}
def _process_directory(self, directory):
"""Process rules and test files in the the rule directory"""
print('\nRunning tests for files found in: {}'.format(directory))
for root, event_files in self._get_test_files(directory):
for event_file in event_files:
full_path = os.path.join(root, event_file)
if self._files_filter and full_path not in self._files_filter:
continue
self._process_test_file(full_path)
def _process_test_file(self, test_file_path):
"""Process an individual test file"""
# Iterate over the individual test events in the file
event_file = TestEventFile(test_file_path)
for event in event_file.process_file(self._config, self._verbose, self._testing_rules):
# Each test event should be tied to a cluster, via the configured data_sources
# Reset the CLUSTER env var for each test, since it could differ between each event
# This env var is used from within the classifier to load the proper cluster config
if 'CLUSTER' in os.environ:
del os.environ['CLUSTER']
for cluster_name, cluster_value in self._config['clusters'].items():
if event.service not in cluster_value['data_sources']:
LOGGER.debug(
'Cluster "%s" does not have service "%s" configured as a data source',
cluster_name,
event.service
)
continue
sources = set(cluster_value['data_sources'][event.service])
if event.source not in sources:
LOGGER.debug(
'Cluster "%s" does not have the source "%s" configured as a data source '
'for service "%s"',
cluster_name,
event.source,
event.service
)
continue
# If we got here, then this cluster is actually configured for this data source
os.environ['CLUSTER'] = cluster_name
break
# A misconfigured test event and/or cluster config can cause this to be unset
if 'CLUSTER' not in os.environ:
error = (
'Test event\'s "service" ({}) and "source" ({}) are not defined within '
'the "data_sources" of any configured clusters: {}:{}'
).format(event.service, event.source, event_file.path, event.index)
raise ConfigError(error)
classifier_result = self._run_classification(event.record)
event.set_classified_result(classifier_result)
if not event:
continue
# Ensure this event actually contains any specific rules, if filtering is being used
if not event.check_for_rules(self._rules):
continue
if event.classify_only:
continue # Do not run rules on events that are only for validation
self._tested_rules.update(event.expected_rules)
if self._type in {self.Types.RULES, self.Types.LIVE}:
event.alerts = self._run_rules_engine(event.classified_log.sqs_messages)
if event.publisher_tests:
runner = PublisherTestRunner()
runner.run_publisher_tests(event)
if self._type == self.Types.LIVE:
for alert in event.alerts:
alert_result = self._run_alerting(alert)
event.add_live_test_result(alert.rule_name, alert_result)
self._passed += event_file.passed
self._failed += event_file.failed
# It is possible for a test_event to have no results, but contain errors
# so only print it if it does and if quiet mode is not being used
# Quite mode is overridden if not all of the events passed
if event_file.error or not (self._quiet and event_file.all_passed):
if event_file.should_print:
print(event_file)
def run(self):
"""Run the tests"""
if not self._check_prereqs():
return
for directory in self._rules_dirs:
# The CLI checks if these directories exist, so no need to check here
self._process_directory(directory)
self._finalize()
return self._failed == 0
@staticmethod
def _get_test_files(directory):
"""Helper to get rule test files
Args:
directory (str): Path to directory containing test files
Yields:
str: Path to test event file
"""
for root, _, test_event_files in os.walk(directory):
# Simple filter to remove any non-json files first
files = [
file for file in sorted(test_event_files)
if os.path.splitext(file)[1] == '.json'
]
if not files:
continue
yield root, files
class PublisherTestRunner:
PUBLISHER_CONDITIONALS = {
'is': {
'comparator': lambda subject, predicate: subject == predicate,
'clause': 'should have been',
},
'in': {
'comparator': lambda s, p: s in p if isinstance(p, list) else p.contains(s),
'clause': 'should have been one of'
},
'contains': {
'comparator': lambda s, p: p in s,
'clause': 'should have contained'
}
}
def run_publisher_tests(self, event):
"""
Runs all publishers and compares their results to the suite of configured publisher tests.
Args:
event (TestEvent): The integration test
"""
for alert in event.alerts:
publication_results = self._run_publishers(alert)
publisher_test_results = []
for output, individual_tests in event.publisher_tests.items():
for publisher_test in individual_tests:
if isinstance(publisher_test, list):
if len(publisher_test) != 3:
publisher_test_results.append({
'success': False,
'error': (
'Invalid publisher test specified: {}'
'Publisher test must be a triple with elements: '
'(jsonpath, condition, condition_value)'
).format(publisher_test),
'output_descriptor': output,
})
continue
jsonpath, condition, condition_value = publisher_test
elif isinstance(publisher_test, dict):
valid_test_syntax = (
'jmespath_expression' in publisher_test and
'condition' in publisher_test and
'value' in publisher_test
)
if not valid_test_syntax:
publisher_test_results.append({
'success': False,
'error': (
'Invalid publisher test specified: {}'
'Publisher test must be a dict with keys: '
'(jmespath_expression, condition, value)'
).format(publisher_test),
'output_descriptor': output,
})
continue
jsonpath = publisher_test['jmespath_expression']
condition = publisher_test['condition']
condition_value = publisher_test['value']
else:
publisher_test_results.append({
'success': False,
'error': (
'Invalid publisher test specified: {}'
'Publisher test must be list or dict'
),
'output_descriptor': output,
})
continue
if output not in publication_results:
publisher_test_results.append({
'success': False,
'error': (
'No such output {} was configured for this alert'
).format(output),
'output_descriptor': output,
})
continue
publication = publication_results[output]['publication']
subject_value = jmespath.search(jsonpath, publication)
conditional = self.PUBLISHER_CONDITIONALS.get(condition, None)
if not conditional:
publisher_test_results.append({
'success': False,
'error': (
'Invalid condition specified: {}\n'
'Valid conditions are: {}'
).format(condition, list(self.PUBLISHER_CONDITIONALS.keys())),
'output_descriptor': output,
})
continue
res = conditional['comparator'](subject_value, condition_value)
publisher_test_results.append({
'success': res,
'failure': None if res else (
'Item at path "{}" {} "{}",\nActual value: "{}"'.format(
jsonpath,
conditional['clause'],
condition_value,
subject_value
)
),
'output_descriptor': output
})
event.set_publication_results(publisher_test_results)
@staticmethod
def _run_publishers(alert):
"""Runs publishers for all currently configured outputs on the given alert
Args:
alert (Alert): The alert
Returns:
dict: A dict keyed by output:descriptor strings, mapped to nested dicts.
self._rules_engine._lookup_tables The nested dicts have 2 keys:
- publication (dict): The dict publication
- success (bool): True if the publishing finished, False if it errored.
"""
configured_outputs = alert.outputs
results = {}
for configured_output in configured_outputs:
[output_name, descriptor] = configured_output.split(':')
try:
output = MagicMock(spec=OutputDispatcher, __service__=output_name)
results[configured_output] = {
'publication': compose_alert(alert, output, descriptor),
'success': True,
}
except (RuntimeError, TypeError, NameError) as err:
results[configured_output] = {
'success': False,
'error': err,
}
return results
| airbnb/streamalert | streamalert_cli/test/handler.py | Python | apache-2.0 | 22,898 | 0.002533 |
from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.config import ConfigValidationError
from txircd.module_interface import Command, ICommand, IModuleData, ModuleData
from txircd.utils import durationToSeconds, ipAddressToShow, ircLower, now
from zope.interface import implementer
from datetime import datetime, timedelta
from typing import Any, Callable, Dict, List, Optional, Tuple
irc.RPL_WHOWASIP = "379"
@implementer(IPlugin, IModuleData, ICommand)
class WhowasCommand(ModuleData, Command):
name = "WhowasCommand"
core = True
def actions(self) -> List[Tuple[str, int, Callable]]:
return [ ("quit", 10, self.addUserToWhowas),
("remotequit", 10, self.addUserToWhowas),
("localquit", 10, self.addUserToWhowas) ]
def userCommands(self) -> List[Tuple[str, int, Command]]:
return [ ("WHOWAS", 1, self) ]
def load(self) -> None:
if "whowas" not in self.ircd.storage:
self.ircd.storage["whowas"] = {}
def verifyConfig(self, config: Dict[str, Any]) -> None:
if "whowas_duration" in config and not isinstance(config["whowas_duration"], str) and not isinstance(config["whowas_duration"], int):
raise ConfigValidationError("whowas_duration", "value must be an integer or a duration string")
if "whowas_max_entries" in config and (not isinstance(config["whowas_max_entries"], int) or config["whowas_max_entries"] < 0):
raise ConfigValidationError("whowas_max_entries", "invalid number")
def removeOldEntries(self, whowasEntries: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
expireDuration = durationToSeconds(self.ircd.config.get("whowas_duration", "1d"))
maxCount = self.ircd.config.get("whowas_max_entries", 10)
while whowasEntries and len(whowasEntries) > maxCount:
whowasEntries.pop(0)
expireDifference = timedelta(seconds=expireDuration)
expireTime = now() - expireDifference
while whowasEntries and whowasEntries[0]["when"] < expireTime:
whowasEntries.pop(0)
return whowasEntries
def addUserToWhowas(self, user: "IRCUser", reason: str, fromServer: "IRCServer" = None) -> None:
if not user.isRegistered():
# user never registered a nick, so no whowas entry to add
return
lowerNick = ircLower(user.nick)
allWhowas = self.ircd.storage["whowas"]
if lowerNick in allWhowas:
whowasEntries = allWhowas[lowerNick]
else:
whowasEntries = []
serverName = self.ircd.name
if user.uuid[:3] != self.ircd.serverID:
serverName = self.ircd.servers[user.uuid[:3]].name
whowasEntries.append({
"nick": user.nick,
"ident": user.ident,
"host": user.host(),
"realhost": user.realHost,
"ip": ipAddressToShow(user.ip),
"gecos": user.gecos,
"server": serverName,
"when": now()
})
whowasEntries = self.removeOldEntries(whowasEntries)
if whowasEntries:
allWhowas[lowerNick] = whowasEntries
elif lowerNick in allWhowas:
del allWhowas[lowerNick]
def parseParams(self, user: "IRCUser", params: List[str], prefix: str, tags: Dict[str, Optional[str]]) -> Optional[Dict[Any, Any]]:
if not params:
user.sendSingleError("WhowasCmd", irc.ERR_NEEDMOREPARAMS, "WHOWAS", "Not enough parameters")
return None
lowerParam = ircLower(params[0])
if lowerParam not in self.ircd.storage["whowas"]:
user.sendSingleError("WhowasNick", irc.ERR_WASNOSUCHNICK, params[0], "There was no such nickname")
return None
return {
"nick": lowerParam,
"param": params[0]
}
def execute(self, user: "IRCUser", data: Dict[Any, Any]) -> bool:
nick = data["nick"]
allWhowas = self.ircd.storage["whowas"]
whowasEntries = allWhowas[nick]
whowasEntries = self.removeOldEntries(whowasEntries)
if not whowasEntries:
del allWhowas[nick]
self.ircd.storage["whowas"] = allWhowas
user.sendMessage(irc.ERR_WASNOSUCHNICK, data["param"], "There was no such nickname")
return True
allWhowas[nick] = whowasEntries # Save back to the list excluding the removed entries
self.ircd.storage["whowas"] = allWhowas
for entry in whowasEntries:
entryNick = entry["nick"]
user.sendMessage(irc.RPL_WHOWASUSER, entryNick, entry["ident"], entry["host"], "*", entry["gecos"])
if self.ircd.runActionUntilValue("userhasoperpermission", user, "whowas-host", users=[user]):
user.sendMessage(irc.RPL_WHOWASIP, entryNick, "was connecting from {}@{} {}".format(entry["ident"], entry["realhost"], entry["ip"]))
user.sendMessage(irc.RPL_WHOISSERVER, entryNick, entry["server"], str(entry["when"]))
user.sendMessage(irc.RPL_ENDOFWHOWAS, nick, "End of WHOWAS")
return True
whowasCmd = WhowasCommand() | Heufneutje/txircd | txircd/modules/rfc/cmd_whowas.py | Python | bsd-3-clause | 4,564 | 0.027607 |
##########################################################################
#
# Copyright (c) 2012, John Haddon. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
__import__( "GafferScene" )
# GafferArnold makes use of OSL closure plugs, this ensures that the bindings
# are always loaded for these, even if people only import GafferArnold
__import__( "GafferOSL" )
try :
# Make sure we import _GafferArnold _without_ RTLD_GLOBAL. This prevents
# clashes between the LLVM symbols in libai.so and the Mesa OpenGL driver.
# Ideally we wouldn't use RTLD_GLOBAL anywhere - see
# https://github.com/ImageEngine/cortex/pull/810.
import sys
import ctypes
originalDLOpenFlags = sys.getdlopenflags()
sys.setdlopenflags( originalDLOpenFlags & ~ctypes.RTLD_GLOBAL )
from ._GafferArnold import *
finally :
sys.setdlopenflags( originalDLOpenFlags )
del sys, ctypes, originalDLOpenFlags
from .ArnoldShaderBall import ArnoldShaderBall
from .ArnoldTextureBake import ArnoldTextureBake
__import__( "IECore" ).loadConfig( "GAFFER_STARTUP_PATHS", subdirectory = "GafferArnold" )
| hradec/gaffer | python/GafferArnold/__init__.py | Python | bsd-3-clause | 2,708 | 0.011078 |
import re
from nxtools import *
from .common import *
def shorten(instr, nlen):
line = instr.split("\n")[0]
if len(line) < 100:
return line
return line[:nlen] + "..."
def filter_match(f, r):
"""OR"""
if type(f) in [list, tuple]:
res = False
for fl in f:
if re.match(fl, r):
return True
return False
else:
return re.match(f, r)
def tree_indent(data):
has_children = False
for i, row in enumerate(data):
value = row["value"]
depth = len(value.split("."))
parentindex = None
for j in range(i - 1, -1, -1):
if value.startswith(data[j]["value"] + "."):
parentindex = j
data[j]["has_children"] = True
break
if parentindex is None:
data[i]["indent"] = 0
continue
has_children = True
data[i]["indent"] = data[parentindex]["indent"] + 1
for i, row in enumerate(data):
role = row.get("role", "option")
if role in ["label", "hidden"]:
continue
elif has_children and row.get("has_children"):
data[i]["role"] = "header"
else:
data[i]["role"] = "option"
#
# CS Caching
#
class CachedObject(type):
_cache = None
@classmethod
def clear_cache(cls):
cls._cache = None
def __call__(cls, *args):
if not cls._cache:
cls._cache = {}
key = tuple(args)
if key not in cls._cache:
cls._cache[key] = super().__call__(*args)
return cls._cache[key]
# Moved to metadata, but this stub needs to live here so older firefly
# doesn't break.
def clear_cs_cache():
from . import metadata
metadata.clear_cs_cache()
| immstudios/nebula-core | nebulacore/meta_utils.py | Python | gpl-3.0 | 1,789 | 0.002236 |
import matplotlib.pyplot as plt
import numpy as np
import os
from inference import error_fn, infer_interactions, choose_J_from_general_form, solve_true_covariance_from_true_J
from pitchfork_langevin import jacobian_pitchfork, gen_multitraj, steadystate_pitchfork
from settings import DEFAULT_PARAMS, FOLDER_OUTPUT, TAU
from statistical_formulae import collect_multitraj_info, build_diffusion_from_langevin, build_covariance_at_step
from visualize_matrix import plot_matrix
"""
Assess error in JC + (JC)^T + D = 0 as num_traj varies, since C computed from num_traj
"""
# TODO plot heatmaps fn for each step in get_errors_from_one_traj
def get_errors_for_replicates(num_traj=500, num_steps=500, replicates=10, params=DEFAULT_PARAMS, noise=1.0):
true_errors = np.zeros(replicates)
infer_errors = np.zeros(replicates)
# get true J
fp_mid = steadystate_pitchfork(params)[:, 0]
J_true = jacobian_pitchfork(params, fp_mid, print_eig=False)
for k in xrange(replicates):
trials_states, _ = gen_multitraj(num_traj, init_cond=fp_mid, num_steps=num_steps, params=params, noise=noise)
D, C_est, J_infer = collect_multitraj_info(trials_states, params, noise, alpha=0.01, tol=1e-6)
true_errors[k] = error_fn(C_est, D, J_true)
infer_errors[k] = error_fn(C_est, D, J_infer)
return true_errors, infer_errors
def get_errors_from_one_traj(covperiod=5, num_traj=500, num_steps=5000, params=DEFAULT_PARAMS, noise=0.1, infer=True, alpha=0.01):
# get points to measure at
num_pts = int(num_steps/covperiod)
covsteps = [a*covperiod for a in xrange(num_pts)]
plotperiod = covperiod * 100
# prep error vectors
true_errors = np.zeros(num_pts)
infer_errors = None
J_infer_errors = None
if infer:
infer_errors = np.zeros(num_pts)
J_infer_errors = np.zeros(num_pts)
J_U0choice_errors = np.zeros(num_pts)
cov_lyap_errors = np.zeros(num_pts)
# get true J and D
fp_mid = steadystate_pitchfork(params)[:, 0]
J_true = jacobian_pitchfork(params, fp_mid, print_eig=False)
D = build_diffusion_from_langevin(params, noise)
C_lyap = solve_true_covariance_from_true_J(J_true, D)
print 'norm of C_lyap', np.linalg.norm(C_lyap)
plot_matrix(C_lyap, method='C_lyap', title_mod='static', plotdir=FOLDER_OUTPUT)
# compute long traj
multitraj, _ = gen_multitraj(num_traj, init_cond=fp_mid, num_steps=num_steps, params=params, noise=noise)
# get error for all covsteps
for idx, step in enumerate(covsteps):
C_est = build_covariance_at_step(multitraj, params, covstep=step)
J_U0choice = choose_J_from_general_form(C_est, D, scale=0.0)
true_errors[idx] = error_fn(C_est, D, J_true)
J_U0choice_errors[idx] = np.linalg.norm(J_true - J_U0choice)
print step, covperiod*100, step % covperiod*100
if step % plotperiod == 0:
plot_matrix(C_est, method='C_data', title_mod='step%d' % step, plotdir=FOLDER_OUTPUT)
if infer:
print "inferring..."
J_infer = infer_interactions(C_est, D, alpha=alpha, tol=1e-6)
print "done"
infer_errors[idx] = error_fn(C_est, D, J_infer)
J_infer_errors[idx] = np.linalg.norm(J_true - J_infer)
cov_lyap_errors[idx] = np.linalg.norm(C_lyap - C_est)
print idx, step, np.linalg.norm(C_est), cov_lyap_errors[idx]
return covsteps, true_errors, infer_errors, J_infer_errors, J_U0choice_errors, cov_lyap_errors
if __name__ == '__main__':
# run settings
many_reps_endpt = False
one_rep_long = True
if many_reps_endpt:
reps = 10
mod = 'num_steps'
assert mod in ['num_traj', 'num_steps']
num_traj_set = [int(a) for a in np.linspace(10, 600, 6)]
num_steps_set = [int(a) for a in np.linspace(10, 2000, 5)]
param_vary_set = {'num_traj': num_traj_set, 'num_steps': num_steps_set}[mod]
true_errors_mid = np.zeros(len(param_vary_set))
true_errors_sd = np.zeros(len(param_vary_set))
infer_errors_mid = np.zeros(len(param_vary_set))
infer_errors_sd = np.zeros(len(param_vary_set))
# compute errors and do inference
for i, elem in enumerate(param_vary_set):
print "point %d (%s %d)" % (i, mod, elem)
if mod == 'num_traj':
true_errors, infer_errors = get_errors_for_replicates(num_traj=elem, replicates=reps, noise=0.1)
else:
true_errors, infer_errors = get_errors_for_replicates(num_steps=elem, replicates=reps, noise=0.1)
true_errors_mid[i] = np.mean(true_errors)
true_errors_sd[i] = np.std(true_errors)
infer_errors_mid[i] = np.mean(infer_errors)
infer_errors_sd[i] = np.std(infer_errors)
# plot
plt.errorbar(param_vary_set, true_errors_mid, yerr=true_errors_sd, label='true J errors', fmt='o')
plt.errorbar(param_vary_set, infer_errors_mid, yerr=infer_errors_sd, label='infer J errors', fmt='o')
plt.title('Reconstruction error (true J vs inferred) for varying %s' % mod)
plt.xlabel('%s' % mod)
plt.ylabel('F-norm of JC + (JC)^T + D')
plt.legend()
plt.show()
# alternate: errors for one long multi-traj at increasing timepoints points
infer = False
if one_rep_long:
alpha = 1e-8
num_steps = 5000
num_traj = 500 #5000
covsteps, true_errors, infer_errors, J_infer_errors, J_U0choice_errors, cov_errors = \
get_errors_from_one_traj(alpha=alpha, num_steps=num_steps, num_traj=num_traj, infer=infer)
# plotting
f = plt.figure(figsize=(16, 8))
plt.plot(covsteps, true_errors, '--k', label='true error')
if infer:
plt.plot(covsteps, infer_errors, '--b', label='inference error')
plt.title('Reconstruction error (true J vs inference alpha=%.1e) for 1 multiraj (num_steps %s, num_traj %d)' % (alpha, num_steps, num_traj))
plt.xlabel('step')
plt.ylabel('F-norm of JC + (JC)^T + D')
plt.legend()
plt.savefig(FOLDER_OUTPUT + os.sep + 'fnorm_reconstruct_flucdiss_a%.1e_traj%d_steps%d_tau%.2f.png' % (alpha, num_traj, num_steps, TAU))
# J error
f2 = plt.figure(figsize=(16, 8))
if infer:
plt.plot(covsteps, J_infer_errors, '--b', label='inference error')
plt.plot(covsteps, J_U0choice_errors, '--r', label='general form + choose U=0 error')
plt.title('Reconstruction error of J (U=0 choice vs inference alpha=%.1e) for 1 multiraj (num_steps %s, num_traj %d)' % (alpha, num_steps, num_traj))
plt.xlabel('step')
plt.ylabel('F-norm of J_true - J_method')
plt.legend()
plt.savefig(FOLDER_OUTPUT + os.sep + 'fnorm_reconstruct_J_a%.1e_traj%d_steps%d_tau%.2f.png' % (alpha, num_traj, num_steps, TAU))
plt.close()
# C_lyap vs C_data error
f3 = plt.figure(figsize=(16, 8))
plt.plot(covsteps, cov_errors, '--b', label='cov error')
plt.title(
'Reconstruction error of C_lyap from asymptotic C_data for 1 multiraj (num_steps %s, num_traj %d)' %
(num_steps, num_traj))
plt.xlabel('step')
plt.ylabel('F-norm of C_lyap - C_data')
plt.legend()
plt.savefig(FOLDER_OUTPUT + os.sep + 'fnorm_reconstruct_C_lyap_traj%d_steps%d_tau%.2f.png' % (num_traj, num_steps, TAU)) | mattsmart/biomodels | transcriptome_clustering/baseline_reconstruction_error.py | Python | mit | 7,422 | 0.004042 |
"""Platform to control a Zehnder ComfoAir Q350/450/600 ventilation unit."""
import logging
from pycomfoconnect import (
SENSOR_BYPASS_STATE,
SENSOR_DAYS_TO_REPLACE_FILTER,
SENSOR_FAN_EXHAUST_DUTY,
SENSOR_FAN_EXHAUST_FLOW,
SENSOR_FAN_EXHAUST_SPEED,
SENSOR_FAN_SUPPLY_DUTY,
SENSOR_FAN_SUPPLY_FLOW,
SENSOR_FAN_SUPPLY_SPEED,
SENSOR_HUMIDITY_EXHAUST,
SENSOR_HUMIDITY_EXTRACT,
SENSOR_HUMIDITY_OUTDOOR,
SENSOR_HUMIDITY_SUPPLY,
SENSOR_POWER_CURRENT,
SENSOR_TEMPERATURE_EXHAUST,
SENSOR_TEMPERATURE_EXTRACT,
SENSOR_TEMPERATURE_OUTDOOR,
SENSOR_TEMPERATURE_SUPPLY,
)
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
ATTR_DEVICE_CLASS,
CONF_RESOURCES,
DEVICE_CLASS_HUMIDITY,
DEVICE_CLASS_POWER,
DEVICE_CLASS_TEMPERATURE,
PERCENTAGE,
POWER_WATT,
TEMP_CELSIUS,
TIME_DAYS,
TIME_HOURS,
VOLUME_CUBIC_METERS,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity import Entity
from . import DOMAIN, SIGNAL_COMFOCONNECT_UPDATE_RECEIVED, ComfoConnectBridge
ATTR_AIR_FLOW_EXHAUST = "air_flow_exhaust"
ATTR_AIR_FLOW_SUPPLY = "air_flow_supply"
ATTR_BYPASS_STATE = "bypass_state"
ATTR_CURRENT_HUMIDITY = "current_humidity"
ATTR_CURRENT_TEMPERATURE = "current_temperature"
ATTR_DAYS_TO_REPLACE_FILTER = "days_to_replace_filter"
ATTR_EXHAUST_FAN_DUTY = "exhaust_fan_duty"
ATTR_EXHAUST_FAN_SPEED = "exhaust_fan_speed"
ATTR_EXHAUST_HUMIDITY = "exhaust_humidity"
ATTR_EXHAUST_TEMPERATURE = "exhaust_temperature"
ATTR_OUTSIDE_HUMIDITY = "outside_humidity"
ATTR_OUTSIDE_TEMPERATURE = "outside_temperature"
ATTR_POWER_CURRENT = "power_usage"
ATTR_SUPPLY_FAN_DUTY = "supply_fan_duty"
ATTR_SUPPLY_FAN_SPEED = "supply_fan_speed"
ATTR_SUPPLY_HUMIDITY = "supply_humidity"
ATTR_SUPPLY_TEMPERATURE = "supply_temperature"
_LOGGER = logging.getLogger(__name__)
ATTR_ICON = "icon"
ATTR_ID = "id"
ATTR_LABEL = "label"
ATTR_MULTIPLIER = "multiplier"
ATTR_UNIT = "unit"
SENSOR_TYPES = {
ATTR_CURRENT_TEMPERATURE: {
ATTR_DEVICE_CLASS: DEVICE_CLASS_TEMPERATURE,
ATTR_LABEL: "Inside Temperature",
ATTR_UNIT: TEMP_CELSIUS,
ATTR_ICON: "mdi:thermometer",
ATTR_ID: SENSOR_TEMPERATURE_EXTRACT,
ATTR_MULTIPLIER: 0.1,
},
ATTR_CURRENT_HUMIDITY: {
ATTR_DEVICE_CLASS: DEVICE_CLASS_HUMIDITY,
ATTR_LABEL: "Inside Humidity",
ATTR_UNIT: PERCENTAGE,
ATTR_ICON: "mdi:water-percent",
ATTR_ID: SENSOR_HUMIDITY_EXTRACT,
},
ATTR_OUTSIDE_TEMPERATURE: {
ATTR_DEVICE_CLASS: DEVICE_CLASS_TEMPERATURE,
ATTR_LABEL: "Outside Temperature",
ATTR_UNIT: TEMP_CELSIUS,
ATTR_ICON: "mdi:thermometer",
ATTR_ID: SENSOR_TEMPERATURE_OUTDOOR,
ATTR_MULTIPLIER: 0.1,
},
ATTR_OUTSIDE_HUMIDITY: {
ATTR_DEVICE_CLASS: DEVICE_CLASS_HUMIDITY,
ATTR_LABEL: "Outside Humidity",
ATTR_UNIT: PERCENTAGE,
ATTR_ICON: "mdi:water-percent",
ATTR_ID: SENSOR_HUMIDITY_OUTDOOR,
},
ATTR_SUPPLY_TEMPERATURE: {
ATTR_DEVICE_CLASS: DEVICE_CLASS_TEMPERATURE,
ATTR_LABEL: "Supply Temperature",
ATTR_UNIT: TEMP_CELSIUS,
ATTR_ICON: "mdi:thermometer",
ATTR_ID: SENSOR_TEMPERATURE_SUPPLY,
ATTR_MULTIPLIER: 0.1,
},
ATTR_SUPPLY_HUMIDITY: {
ATTR_DEVICE_CLASS: DEVICE_CLASS_HUMIDITY,
ATTR_LABEL: "Supply Humidity",
ATTR_UNIT: PERCENTAGE,
ATTR_ICON: "mdi:water-percent",
ATTR_ID: SENSOR_HUMIDITY_SUPPLY,
},
ATTR_SUPPLY_FAN_SPEED: {
ATTR_DEVICE_CLASS: None,
ATTR_LABEL: "Supply Fan Speed",
ATTR_UNIT: "rpm",
ATTR_ICON: "mdi:fan",
ATTR_ID: SENSOR_FAN_SUPPLY_SPEED,
},
ATTR_SUPPLY_FAN_DUTY: {
ATTR_DEVICE_CLASS: None,
ATTR_LABEL: "Supply Fan Duty",
ATTR_UNIT: PERCENTAGE,
ATTR_ICON: "mdi:fan",
ATTR_ID: SENSOR_FAN_SUPPLY_DUTY,
},
ATTR_EXHAUST_FAN_SPEED: {
ATTR_DEVICE_CLASS: None,
ATTR_LABEL: "Exhaust Fan Speed",
ATTR_UNIT: "rpm",
ATTR_ICON: "mdi:fan",
ATTR_ID: SENSOR_FAN_EXHAUST_SPEED,
},
ATTR_EXHAUST_FAN_DUTY: {
ATTR_DEVICE_CLASS: None,
ATTR_LABEL: "Exhaust Fan Duty",
ATTR_UNIT: PERCENTAGE,
ATTR_ICON: "mdi:fan",
ATTR_ID: SENSOR_FAN_EXHAUST_DUTY,
},
ATTR_EXHAUST_TEMPERATURE: {
ATTR_DEVICE_CLASS: DEVICE_CLASS_TEMPERATURE,
ATTR_LABEL: "Exhaust Temperature",
ATTR_UNIT: TEMP_CELSIUS,
ATTR_ICON: "mdi:thermometer",
ATTR_ID: SENSOR_TEMPERATURE_EXHAUST,
ATTR_MULTIPLIER: 0.1,
},
ATTR_EXHAUST_HUMIDITY: {
ATTR_DEVICE_CLASS: DEVICE_CLASS_HUMIDITY,
ATTR_LABEL: "Exhaust Humidity",
ATTR_UNIT: PERCENTAGE,
ATTR_ICON: "mdi:water-percent",
ATTR_ID: SENSOR_HUMIDITY_EXHAUST,
},
ATTR_AIR_FLOW_SUPPLY: {
ATTR_DEVICE_CLASS: None,
ATTR_LABEL: "Supply airflow",
ATTR_UNIT: f"{VOLUME_CUBIC_METERS}/{TIME_HOURS}",
ATTR_ICON: "mdi:fan",
ATTR_ID: SENSOR_FAN_SUPPLY_FLOW,
},
ATTR_AIR_FLOW_EXHAUST: {
ATTR_DEVICE_CLASS: None,
ATTR_LABEL: "Exhaust airflow",
ATTR_UNIT: f"{VOLUME_CUBIC_METERS}/{TIME_HOURS}",
ATTR_ICON: "mdi:fan",
ATTR_ID: SENSOR_FAN_EXHAUST_FLOW,
},
ATTR_BYPASS_STATE: {
ATTR_DEVICE_CLASS: None,
ATTR_LABEL: "Bypass State",
ATTR_UNIT: PERCENTAGE,
ATTR_ICON: "mdi:camera-iris",
ATTR_ID: SENSOR_BYPASS_STATE,
},
ATTR_DAYS_TO_REPLACE_FILTER: {
ATTR_DEVICE_CLASS: None,
ATTR_LABEL: "Days to replace filter",
ATTR_UNIT: TIME_DAYS,
ATTR_ICON: "mdi:calendar",
ATTR_ID: SENSOR_DAYS_TO_REPLACE_FILTER,
},
ATTR_POWER_CURRENT: {
ATTR_DEVICE_CLASS: DEVICE_CLASS_POWER,
ATTR_LABEL: "Power usage",
ATTR_UNIT: POWER_WATT,
ATTR_ICON: "mdi:flash",
ATTR_ID: SENSOR_POWER_CURRENT,
},
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_RESOURCES, default=[]): vol.All(
cv.ensure_list, [vol.In(SENSOR_TYPES)]
)
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the ComfoConnect fan platform."""
ccb = hass.data[DOMAIN]
sensors = []
for resource in config[CONF_RESOURCES]:
sensors.append(
ComfoConnectSensor(
name=f"{ccb.name} {SENSOR_TYPES[resource][ATTR_LABEL]}",
ccb=ccb,
sensor_type=resource,
)
)
add_entities(sensors, True)
class ComfoConnectSensor(Entity):
"""Representation of a ComfoConnect sensor."""
def __init__(self, name, ccb: ComfoConnectBridge, sensor_type) -> None:
"""Initialize the ComfoConnect sensor."""
self._ccb = ccb
self._sensor_type = sensor_type
self._sensor_id = SENSOR_TYPES[self._sensor_type][ATTR_ID]
self._name = name
async def async_added_to_hass(self):
"""Register for sensor updates."""
_LOGGER.debug(
"Registering for sensor %s (%d)", self._sensor_type, self._sensor_id
)
self.async_on_remove(
async_dispatcher_connect(
self.hass,
SIGNAL_COMFOCONNECT_UPDATE_RECEIVED.format(self._sensor_id),
self._handle_update,
)
)
await self.hass.async_add_executor_job(
self._ccb.comfoconnect.register_sensor, self._sensor_id
)
def _handle_update(self, value):
"""Handle update callbacks."""
_LOGGER.debug(
"Handle update for sensor %s (%d): %s",
self._sensor_type,
self._sensor_id,
value,
)
self._ccb.data[self._sensor_id] = round(
value * SENSOR_TYPES[self._sensor_type].get(ATTR_MULTIPLIER, 1), 2
)
self.schedule_update_ha_state()
@property
def state(self):
"""Return the state of the entity."""
try:
return self._ccb.data[self._sensor_id]
except KeyError:
return None
@property
def should_poll(self) -> bool:
"""Do not poll."""
return False
@property
def unique_id(self):
"""Return a unique_id for this entity."""
return f"{self._ccb.unique_id}-{self._sensor_type}"
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def icon(self):
"""Return the icon to use in the frontend."""
return SENSOR_TYPES[self._sensor_type][ATTR_ICON]
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity."""
return SENSOR_TYPES[self._sensor_type][ATTR_UNIT]
@property
def device_class(self):
"""Return the device_class."""
return SENSOR_TYPES[self._sensor_type][ATTR_DEVICE_CLASS]
| tchellomello/home-assistant | homeassistant/components/comfoconnect/sensor.py | Python | apache-2.0 | 9,175 | 0.000109 |
#Minimal example for running location prediction
from keras.models import load_model
import pickle
from keras.preprocessing.sequence import pad_sequences
import numpy as np
import os
os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID" # see issue #152
os.environ["CUDA_VISIBLE_DEVICES"] = ""
binaryPath= 'data/binaries/' #Place where the serialized training data is
modelPath= 'data/models/' #Place to store the models
#Load Model
textBranch = load_model(modelPath +'/textBranchNorm.h5')
#Load preprocessed data...
file = open(binaryPath +"processors.obj",'rb')
descriptionTokenizer, domainEncoder, tldEncoder, locationTokenizer, sourceEncoder, textTokenizer, nameTokenizer, timeZoneTokenizer, utcEncoder, langEncoder, placeMedian, colnames, classEncoder = pickle.load(file)
#Predict text (e.g., 'Montmartre is truly beautiful')
testTexts=[];
testTexts.append("Montmartre is truly beautiful")
textSequences = textTokenizer.texts_to_sequences(testTexts)
textSequences = np.asarray(textSequences)
textSequences = pad_sequences(textSequences)
predict = textBranch.predict(textSequences)
#Print the top 5
for index in reversed(predict.argsort()[0][-5:]):
print("%s with score=%.3f" % (colnames[index], float(predict[0][index])) )
| Erechtheus/geolocation | predictText.py | Python | gpl-3.0 | 1,245 | 0.013655 |
# Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import mock
import unittest
from eventlet import Timeout
from swift.common.swob import Request
from swift.proxy import server as proxy_server
from swift.proxy.controllers.base import headers_to_container_info
from test.unit import fake_http_connect, FakeRing, FakeMemcache
from swift.common.storage_policy import StoragePolicy
from swift.common.request_helpers import get_sys_meta_prefix
from test.unit import patch_policies, mocked_http_conn, debug_logger
from test.unit.common.ring.test_ring import TestRingBase
from test.unit.proxy.test_server import node_error_count
@patch_policies([StoragePolicy(0, 'zero', True, object_ring=FakeRing())])
class TestContainerController(TestRingBase):
def setUp(self):
TestRingBase.setUp(self)
self.logger = debug_logger()
self.container_ring = FakeRing(max_more_nodes=9)
self.app = proxy_server.Application(None, FakeMemcache(),
logger=self.logger,
account_ring=FakeRing(),
container_ring=self.container_ring)
self.account_info = {
'status': 200,
'container_count': '10',
'total_object_count': '100',
'bytes': '1000',
'meta': {},
'sysmeta': {},
}
class FakeAccountInfoContainerController(
proxy_server.ContainerController):
def account_info(controller, *args, **kwargs):
patch_path = 'swift.proxy.controllers.base.get_info'
with mock.patch(patch_path) as mock_get_info:
mock_get_info.return_value = dict(self.account_info)
return super(FakeAccountInfoContainerController,
controller).account_info(
*args, **kwargs)
_orig_get_controller = self.app.get_controller
def wrapped_get_controller(*args, **kwargs):
with mock.patch('swift.proxy.server.ContainerController',
new=FakeAccountInfoContainerController):
return _orig_get_controller(*args, **kwargs)
self.app.get_controller = wrapped_get_controller
def test_container_info_in_response_env(self):
controller = proxy_server.ContainerController(self.app, 'a', 'c')
with mock.patch('swift.proxy.controllers.base.http_connect',
fake_http_connect(200, 200, body='')):
req = Request.blank('/v1/a/c', {'PATH_INFO': '/v1/a/c'})
resp = controller.HEAD(req)
self.assertEqual(2, resp.status_int // 100)
self.assertTrue("swift.container/a/c" in resp.environ)
self.assertEqual(headers_to_container_info(resp.headers),
resp.environ['swift.container/a/c'])
def test_swift_owner(self):
owner_headers = {
'x-container-read': 'value', 'x-container-write': 'value',
'x-container-sync-key': 'value', 'x-container-sync-to': 'value'}
controller = proxy_server.ContainerController(self.app, 'a', 'c')
req = Request.blank('/v1/a/c')
with mock.patch('swift.proxy.controllers.base.http_connect',
fake_http_connect(200, 200, headers=owner_headers)):
resp = controller.HEAD(req)
self.assertEqual(2, resp.status_int // 100)
for key in owner_headers:
self.assertTrue(key not in resp.headers)
req = Request.blank('/v1/a/c', environ={'swift_owner': True})
with mock.patch('swift.proxy.controllers.base.http_connect',
fake_http_connect(200, 200, headers=owner_headers)):
resp = controller.HEAD(req)
self.assertEqual(2, resp.status_int // 100)
for key in owner_headers:
self.assertTrue(key in resp.headers)
def _make_callback_func(self, context):
def callback(ipaddr, port, device, partition, method, path,
headers=None, query_string=None, ssl=False):
context['method'] = method
context['path'] = path
context['headers'] = headers or {}
return callback
def test_sys_meta_headers_PUT(self):
# check that headers in sys meta namespace make it through
# the container controller
sys_meta_key = '%stest' % get_sys_meta_prefix('container')
sys_meta_key = sys_meta_key.title()
user_meta_key = 'X-Container-Meta-Test'
controller = proxy_server.ContainerController(self.app, 'a', 'c')
context = {}
callback = self._make_callback_func(context)
hdrs_in = {sys_meta_key: 'foo',
user_meta_key: 'bar',
'x-timestamp': '1.0'}
req = Request.blank('/v1/a/c', headers=hdrs_in)
with mock.patch('swift.proxy.controllers.base.http_connect',
fake_http_connect(200, 200, give_connect=callback)):
controller.PUT(req)
self.assertEqual(context['method'], 'PUT')
self.assertTrue(sys_meta_key in context['headers'])
self.assertEqual(context['headers'][sys_meta_key], 'foo')
self.assertTrue(user_meta_key in context['headers'])
self.assertEqual(context['headers'][user_meta_key], 'bar')
self.assertNotEqual(context['headers']['x-timestamp'], '1.0')
def test_sys_meta_headers_POST(self):
# check that headers in sys meta namespace make it through
# the container controller
sys_meta_key = '%stest' % get_sys_meta_prefix('container')
sys_meta_key = sys_meta_key.title()
user_meta_key = 'X-Container-Meta-Test'
controller = proxy_server.ContainerController(self.app, 'a', 'c')
context = {}
callback = self._make_callback_func(context)
hdrs_in = {sys_meta_key: 'foo',
user_meta_key: 'bar',
'x-timestamp': '1.0'}
req = Request.blank('/v1/a/c', headers=hdrs_in)
with mock.patch('swift.proxy.controllers.base.http_connect',
fake_http_connect(200, 200, give_connect=callback)):
controller.POST(req)
self.assertEqual(context['method'], 'POST')
self.assertTrue(sys_meta_key in context['headers'])
self.assertEqual(context['headers'][sys_meta_key], 'foo')
self.assertTrue(user_meta_key in context['headers'])
self.assertEqual(context['headers'][user_meta_key], 'bar')
self.assertNotEqual(context['headers']['x-timestamp'], '1.0')
def test_node_errors(self):
self.app.sort_nodes = lambda n: n
for method in ('PUT', 'DELETE', 'POST'):
def test_status_map(statuses, expected):
self.app._error_limiting = {}
req = Request.blank('/v1/a/c', method=method)
with mocked_http_conn(*statuses) as fake_conn:
print('a' * 50)
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, expected)
for req in fake_conn.requests:
self.assertEqual(req['method'], method)
self.assertTrue(req['path'].endswith('/a/c'))
base_status = [201] * 3
# test happy path
test_status_map(list(base_status), 201)
for i in range(3):
self.assertEqual(node_error_count(
self.app, self.container_ring.devs[i]), 0)
# single node errors and test isolation
for i in range(3):
status_list = list(base_status)
status_list[i] = 503
status_list.append(201)
test_status_map(status_list, 201)
for j in range(3):
expected = 1 if j == i else 0
self.assertEqual(node_error_count(
self.app, self.container_ring.devs[j]), expected)
# timeout
test_status_map((201, Timeout(), 201, 201), 201)
self.assertEqual(node_error_count(
self.app, self.container_ring.devs[1]), 1)
# exception
test_status_map((Exception('kaboom!'), 201, 201, 201), 201)
self.assertEqual(node_error_count(
self.app, self.container_ring.devs[0]), 1)
# insufficient storage
test_status_map((201, 201, 507, 201), 201)
self.assertEqual(node_error_count(
self.app, self.container_ring.devs[2]),
self.app.error_suppression_limit + 1)
if __name__ == '__main__':
unittest.main()
| bkolli/swift | test/unit/proxy/controllers/test_container.py | Python | apache-2.0 | 9,332 | 0 |
#! /usr/bin/env python3
def find_locations( sequence_file, pattern ):
"""
Find the most common kmers of a given size in a given text
"""
sequence = parse_sequence_file( sequence_file )
k = len(pattern)
for i in range(len(sequence)-k+1):
if sequence[i:i+k] == pattern:
yield i
def parse_sequence_file( sequence_file ):
seq = ''
with open(sequence_file) as handle:
for line in handle:
seq += line.strip()
return seq
if __name__ == '__main__':
import sys
sequence_file = sys.argv[1]
pattern = sys.argv[2]
starts = list(find_locations(sequence_file, pattern))
print(' '.join([str(s) for s in starts]))
| bnbowman/BifoAlgo | src/Chapter1/Sec3_PatternMatching.py | Python | gpl-2.0 | 627 | 0.041467 |
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from optparse import OptionParser
import unittest
import six
from telemetry.page import shared_page_state
from contrib.cluster_telemetry import rasterize_and_record_micro_ct
from contrib.cluster_telemetry import repaint
from contrib.cluster_telemetry import skpicture_printer
class MockErrorParser(object):
def __init__(self):
self.err_msg = None
def error(self, err_msg):
self.err_msg = err_msg
class CTBenchmarks(unittest.TestCase):
def setUp(self):
self.ct_benchmarks = [
rasterize_and_record_micro_ct.RasterizeAndRecordMicroCT(),
repaint.RepaintCT(),
skpicture_printer.SkpicturePrinterCT(),
]
self.shared_page_state_class = shared_page_state.SharedMobilePageState
self.archive_data_file = '/b/test'
self.urls_list = 'http://test1.com,http://test2.com,http://test3.net'
self.mock_parser = MockErrorParser()
def testCTBenchmarks(self):
for benchmark in self.ct_benchmarks:
parser = OptionParser()
parser.user_agent = 'mobile'
parser.archive_data_file = self.archive_data_file
parser.urls_list = self.urls_list
benchmark.AddBenchmarkCommandLineArgs(parser)
benchmark.ProcessCommandLineArgs(None, parser)
ct_page_set = benchmark.CreateStorySet(parser)
self.assertEquals(
len(self.urls_list.split(',')), len(ct_page_set.stories))
self.assertEquals(
self.archive_data_file, ct_page_set.archive_data_file)
for i in range(len(self.urls_list.split(','))):
url = self.urls_list.split(',')[i]
story = ct_page_set.stories[i]
self.assertEquals(url, story.url)
self.assertEquals(
self.shared_page_state_class, story.shared_state_class)
self.assertEquals(self.archive_data_file, story.archive_data_file)
def testCTBenchmarks_wrongAgent(self):
for benchmark in self.ct_benchmarks:
parser = OptionParser()
parser.user_agent = 'mobileeeeee'
parser.archive_data_file = self.archive_data_file
parser.urls_list = self.urls_list
benchmark.AddBenchmarkCommandLineArgs(parser)
benchmark.ProcessCommandLineArgs(None, parser)
try:
benchmark.CreateStorySet(parser)
self.fail('Expected ValueError')
except ValueError as e:
self.assertEquals('user_agent mobileeeeee is unrecognized', str(e))
def testCTBenchmarks_missingDataFile(self):
for benchmark in self.ct_benchmarks:
parser = OptionParser()
parser.user_agent = 'mobile'
parser.urls_list = self.urls_list
parser.use_live_sites = False
benchmark.AddBenchmarkCommandLineArgs(parser)
# Should fail due to missing archive_data_file.
try:
benchmark.ProcessCommandLineArgs(None, parser)
self.fail('Expected AttributeError')
except AttributeError as e:
if six.PY2:
expected_error = (
"OptionParser instance has no attribute 'archive_data_file'")
actual_error = e.message
else:
expected_error = (
"'OptionParser' object has no attribute 'archive_data_file'")
actual_error = str(e)
self.assertEquals(actual_error, expected_error)
# Now add an empty archive_data_file.
parser.archive_data_file = ''
benchmark.ProcessCommandLineArgs(self.mock_parser, parser)
self.assertEquals(
'Please specify --archive-data-file.', self.mock_parser.err_msg)
def testCTBenchmarks_missingDataFileUseLiveSites(self):
for benchmark in self.ct_benchmarks:
parser = OptionParser()
parser.user_agent = 'mobile'
parser.urls_list = self.urls_list
parser.use_live_sites = True
parser.archive_data_file = None
benchmark.AddBenchmarkCommandLineArgs(parser)
# Should pass.
benchmark.ProcessCommandLineArgs(self.mock_parser, parser)
self.assertIsNone(self.mock_parser.err_msg)
def testCTBenchmarks_missingUrlsList(self):
for benchmark in self.ct_benchmarks:
parser = OptionParser()
parser.user_agent = 'mobile'
parser.archive_data_file = self.archive_data_file
benchmark.AddBenchmarkCommandLineArgs(parser)
# Should fail due to missing urls_list.
try:
benchmark.ProcessCommandLineArgs(None, parser)
self.fail('Expected AttributeError')
except AttributeError as e:
if six.PY2:
self.assertEquals(
"OptionParser instance has no attribute 'urls_list'", str(e))
else:
self.assertEquals(
"'OptionParser' object has no attribute 'urls_list'", str(e))
# Now add an empty urls_list.
parser.urls_list = ''
benchmark.ProcessCommandLineArgs(self.mock_parser, parser)
self.assertEquals('Please specify --urls-list.', self.mock_parser.err_msg)
| scheib/chromium | tools/perf/contrib/cluster_telemetry/ct_benchmarks_unittest.py | Python | bsd-3-clause | 4,992 | 0.013021 |
EGA2RGB = [
(0x00, 0x00, 0x00),
(0x00, 0x00, 0xAA),
(0x00, 0xAA, 0x00),
(0x00, 0xAA, 0xAA),
(0xAA, 0x00, 0x00),
(0xAA, 0x00, 0xAA),
(0xAA, 0x55, 0x00),
(0xAA, 0xAA, 0xAA),
(0x55, 0x55, 0x55),
(0x55, 0x55, 0xFF),
(0x55, 0xFF, 0x55),
(0x55, 0xFF, 0xFF),
(0xFF, 0x55, 0x55),
(0xFF, 0x55, 0xFF),
(0xFF, 0xFF, 0x55),
(0xFF, 0xFF, 0xFF),
]
def load_shapes():
shapes = []
bytes = open("ULT/SHAPES.EGA").read()
for i in range(256):
shape = []
for j in range(16):
for k in range(8):
d = ord(bytes[k + 8 * j + 128 * i])
a, b = divmod(d, 16)
shape.append(EGA2RGB[a])
shape.append(EGA2RGB[b])
shapes.append(shape)
return shapes
| jtauber/ultima4 | shapes.py | Python | mit | 800 | 0 |
# Jacqueline Kory Westlund
# May 2016
#
# The MIT License (MIT)
#
# Copyright (c) 2016 Personal Robots Group
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import sys # For getting generic exception info
import datetime # For getting time deltas for timeouts
import time # For sleep
import json # For packing ros message properties
import random # For picking robot responses and shuffling answer options
import logging # Log messages
import Queue # for queuing messages for the main game loop
from SS_Errors import NoStoryFound # Custom exception when no stories found
from ss_script_parser import ss_script_parser # Parses scripts
from ss_personalization_manager import ss_personalization_manager
from ss_ros import ss_ros # Our ROS connection
class ss_script_handler():
""" Social stories script handler parses and deals with script lines. Uses
the script parser to get the next line in a script. We keep loading script
lines and parsing script lines separate on the offchance that we might want
to replace how scripts are stored and accessed (e.g., in a database versus
in text files).
"""
# Constants for script playback:
# Time to pause after showing answer feedback and playing robot
# feedback speech before moving on to the next question.
ANSWER_FEEDBACK_PAUSE_TIME = 2
# Time to wait for robot to finish speaking or acting before
# moving on to the next script line (in seconds).
WAIT_TIME = 30
def __init__(self, ros_node, session, participant, script_path,
story_script_path, session_script_path, database, queue,
percent_correct_to_level):
""" Save references to ROS connection and logger, get scripts and
set up to read script lines
"""
# Set up logger.
self._logger = logging.getLogger(__name__)
self._logger.info("Setting up script handler...")
# Save reference to our ros node so we can publish messages.
self._ros_node = ros_node
# Save script paths so we can load scripts later.
self._script_path = script_path
if (story_script_path is None):
self._story_script_path = ""
else:
self._story_script_path = story_script_path
if (session_script_path is None):
self._session_script_path = ""
else:
self._session_script_path = session_script_path
# We get a reference to the main game node's queue so we can
# give it messages.
self._game_node_queue = queue
# Set up personalization manager so we can get personalized
# stories for this participant.
self._personalization_man = ss_personalization_manager(session,
participant, database, percent_correct_to_level)
# Set up script parser.
self._script_parser = ss_script_parser()
# These are other script parsers we may use later.
self._story_parser = None
self._repeat_parser = None
# If we have a repeating script, we will need to save its filename so
# we can re-load it when we repeat it.
self._repeating_script_name = ""
# Get session script from script parser and give to the script
# parser. Story scripts we will get later from the
# personalization manager.
try:
self._script_parser.load_script(self._script_path
+ self._session_script_path
+ self._script_parser.get_session_script(session))
except IOError:
self._logger.exception("Script parser could not open session "
+ "script!")
# Pass exception up so whoever wanted a script handler knows
# they didn't get a script.
raise
# Initialize flags and counters:
# Set up counter for how many stories have been told this session.
self._stories_told = 0
# When we start, we are not currently telling a story or
# repeating a script, or at the end of the game.
self._doing_story = False
self._repeating = False
self._end_game = False
# When we start, we are not asking a question, and so there is no
# current question type or number.
self._current_question_type = ""
self._current_question_num = 0
# For counting repetitions of a repeating script.
self._repetitions = 0
# The script will tell us the max number of repetitions.
self._max_repetitions = 1
# The script will tell us the max number of stories.
self._max_stories = 1
# The maximum number of incorrect user responses before the
# game moves on (can also be set in the script).
self._max_incorrect_responses = 2
# Set the maximum game time, in minutes. This can also be set
# in the game script.
self._max_game_time = datetime.timedelta(minutes=10)
# Sometimes we may need to know what the last user response we
# waited for was, and how long we waited.
self._last_response_to_get = None
self._last_response_timeout = None
# Save start time so we can check whether we've run out of time.
self._start_time = datetime.datetime.now()
# Initialize total time paused.
self._total_time_paused = datetime.timedelta(seconds=0)
# Initialize pause start time in case someone calls the resume
# game timer function before the pause game function.
self._pause_start_time = None
def iterate_once(self):
""" Play the next commands from the script """
try:
# We check whether we've reached the game time limit when
# we load new stories or when we are about to start a
# repeating script over again.
# Get next line from story script.
if self._doing_story and self._story_parser is not None:
self._logger.debug("Getting next line from story script.")
line = self._story_parser.next_line()
# If not in a story, get next line from repeating script.
elif self._repeating and self._repeat_parser is not None:
self._logger.debug("Getting next line from repeating script.")
line = self._repeat_parser.next_line()
# If not repeating, get next line from main session script.
else:
self._logger.debug("Getting next line from main session script.")
line = self._script_parser.next_line()
# We didn't read a line!
# If we get a stop iteration exception, we're at the end of the
# file and will stop iterating over lines.
except StopIteration as e:
# If we were doing a story, now we're done, go back to
# the previous script.
if self._doing_story:
self._logger.info("Finished story " + str(self._stories_told + 1)
+ " of " + str(self._max_stories) + "!")
self._doing_story = False
self._stories_told += 1
# If we were repeating a script, increment counter.
elif self._repeating:
self._repetitions += 1
self._logger.info("Finished repetition " + str(self._repetitions)
+ " of " + str(self._max_repetitions) + "!")
# If we've done enough repetitions, or if we've run out
# of game time, go back to the main session script (set
# the repeating flag to false).
if (self._repetitions >= self._max_repetitions) \
or self._end_game \
or ((datetime.datetime.now() - self._start_time) \
- self._total_time_paused >= self._max_game_time):
self._logger.info("Done repeating!")
self._repeating = False
# Otherwise, we need to repeat again. Reload the repeating
# script.
else:
# Create a script parser for the filename provided,
# assume it is in the session_scripts directory.
self._repeat_parser = ss_script_parser()
try:
self._repeat_parser.load_script(self._script_path
+ self._session_script_path
+ self._repeating_script_name)
except IOError:
self._logger.exception("Script parser could not open "
+ "session script to repeat! Skipping REPEAT line.")
sself._repeating = False
return
# Otherwise we're at the end of the main script.
else:
self._logger.info("No more script lines to get!")
# Pass on the stop iteration exception, with additional
# information about the player's performance during the
# game, formatted as a json object.
emotion, tom, order = self._personalization_man. \
get_performance_this_session()
performance = {}
if emotion is not None:
performance["child-emotion-question-accuracy"] = \
emotion
if tom is not None:
performance["child-tom-question-accuracy"] = \
tom
if order is not None:
performance["child-order-question-accuracy"] = \
order
e.performance = json.dumps(performance)
raise
except ValueError:
# We may get this exception if we try to get the next line
# but the script file is closed. If that happens, something
# probably went wrong with ending playback of a story script
# or a repeating script. End repeating and end the current
# story so we go back to the main session script.
if self._doing_story:
self._doing_story = False
if self._repeating:
self._repeating = False
# Oh no got some unexpected error! Raise it again so we can
# figure out what happened and deal with it during debugging.
except Exception as e:
self._logger.exception("Unexpected exception! Error: %s", e)
raise
# We got a line: parse it!
else:
# Make sure we got a line before we try parsing it. We
# might not get a line if the file has closed or if
# next_line has some other problem.
if not line:
self._logger.warning("[iterate_once] Tried to get next line, "
+ "but got None!")
return
# Got a line - print for debugging.
self._logger.debug("LINE: " + repr(line))
# Parse line!
# Split on tabs.
elements = line.rstrip().split('\t')
self._logger.debug("... " + str(len(elements)) + " elements: \n... "
+ str(elements))
if len(elements) < 1:
self._logger.info("Line had no elements! Going to next line...")
return
# Do different stuff depending on what the first element is.
#########################################################
# Some STORY lines have only one part to the command.
elif len(elements) == 1:
# For STORY lines, play back the next story for this
# participant.
if "STORY" in elements[0]:
self._logger.debug("STORY")
# If line indicates we need to start a story, do so.
self._doing_story = True
# Create a script parser for the filename provided,
# assuming it is in the story scripts directory.
self._story_parser = ss_script_parser()
try:
self._story_parser.load_script(self._script_path
+ self._story_script_path
+ self._personalization_man.get_next_story_script())
except IOError:
self._logger.exception("Script parser could not open "
+ "story script! Skipping STORY line.")
self._doing_story = False
except AttributeError:
self._logger.exception("Script parser could not open "
+ "story script because no script was loaded! "
+ "Skipping STORY line.")
self._doing_story = False
except NoStoryFound:
self._logger.exception("Script parser could not get \
the next story script because no script was \
found by the personalization manager! \
Skipping STORY line.")
self._doing_story = False
# Line has 2+ elements, so check the other commands.
#########################################################
# For STORY SETUP lines, pick the next story to play so
# we can load its graphics and play back the story.
elif "STORY" in elements[0] and "SETUP" in elements[1]:
self._logger.debug("STORY SETUP")
# Pick the next story to play.
self._personalization_man.pick_next_story()
#########################################################
# For ROBOT lines, send command to the robot.
elif "ROBOT" in elements[0]:
self._logger.debug("ROBOT")
# Play a randomly selected story intro from the list.
if "STORY_INTRO" in elements[1]:
self._ros_node.send_robot_command("DO",
response="ROBOT_NOT_SPEAKING",
timeout=datetime.timedelta(seconds=int(
self.WAIT_TIME)),
properties=self._story_intros[
random.randint(0,len(self._story_intros)-1)])
# Play a randomly selected story closing from the list.
elif "STORY_CLOSING" in elements[1]:
self._ros_node.send_robot_command("DO",
response="ROBOT_NOT_SPEAKING",
timeout=datetime.timedelta(seconds=int(
self.WAIT_TIME)),
properties=self._story_closings[
random.randint(0,len(self._story_closings)-1)])
# Send a command to the robot, with properties.
elif len(elements) > 2:
self._ros_node.send_robot_command(elements[1],
response="ROBOT_NOT_SPEAKING",
timeout=datetime.timedelta(seconds=int(
self.WAIT_TIME)),
properties=elements[2])
# Send a command to the robot, without properties.
else:
self._ros_node.send_robot_command(elements[1], "")
#########################################################
# For OPAL lines, send command to Opal game
elif "OPAL" in elements[0]:
self._logger.debug("OPAL")
if "LOAD_ALL" in elements[1] and len(elements) >= 3:
# Load all objects listed in file -- the file is
# assumed to have properties for one object on each
# line.
to_load = self._read_list_from_file(
self._script_path + self._session_script_path +
elements[2])
for obj in to_load:
self._ros_node.send_opal_command("LOAD_OBJECT", obj)
# Get the next story and load graphics into game.
elif "LOAD_STORY" in elements[1]:
self._load_next_story()
# Load answers for game.
elif "LOAD_ANSWERS" in elements[1] and len(elements) >= 3:
self._load_answers(elements[2])
# Send an opal command, with properties.
elif len(elements) > 2:
self._ros_node.send_opal_command(elements[1], elements[2])
# Send an opal command, without properties.
else:
self._ros_node.send_opal_command(elements[1])
#########################################################
# For PAUSE lines, sleep for the specified number of
# seconds before continuing script playback.
elif "PAUSE" in elements[0] and len(elements) >= 2:
self._logger.debug("PAUSE")
try:
time.sleep(int(elements[1]))
except ValueError:
self._logger.exception("Not pausing! PAUSE command was "
+ "given an invalid argument (should be an int)!")
#########################################################
# For ADD lines, get a list of robot commands that can be
# used in response to particular triggers from the specified
# file and save them for later use -- all ADD lines should
# have 3 elements.
elif "ADD" in elements[0] and len(elements) >= 3:
self._logger.debug("ADD")
# Read list of responses from the specified file into the
# appropriate variable.
try:
if "INCORRECT_RESPONSES" in elements[1]:
self._incorrect_responses = self._read_list_from_file(
self._script_path + self._session_script_path +
elements[2])
self._logger.debug("... Got "
+ str(len(self._incorrect_responses)))
if "CORRECT_RESPONSES" in elements[1]:
self._correct_responses = self._read_list_from_file(
self._script_path + self._session_script_path +
elements[2])
self._logger.debug("... Got "
+ str(len(self._correct_responses)))
elif "START_RESPONSES" in elements[1]:
self._start_responses = self._read_list_from_file(
self._script_path + self._session_script_path +
elements[2])
self._logger.debug("... Got "
+ str(len(self._start_responses)))
elif "NO_RESPONSES" in elements[1]:
self._no_responses = self._read_list_from_file(
self._script_path + self._session_script_path +
elements[2])
self._logger.debug("... Got "
+ str(len(self._no_responses)))
elif "ANSWER_FEEDBACK" in elements[1]:
self._answer_feedback = self._read_list_from_file(
self._script_path + self._session_script_path +
elements[2])
self._logger.debug("... Got "
+ str(len(self._answer_feedback)))
elif "STORY_INTROS" in elements[1]:
self._story_intros = self._read_list_from_file(
self._script_path + self._session_script_path +
elements[2])
self._logger.debug("... Got "
+ str(len(self._story_intros)))
elif "STORY_CLOSINGS" in elements[1]:
self._story_closings = self._read_list_from_file(
self._script_path + self._session_script_path +
elements[2])
self._logger.debug("... Got "
+ str(len(self._story_closings)))
elif "TIMEOUT_CLOSINGS" in elements[1]:
self._timeout_closings = self._read_list_from_file(
self._script_path + self._session_script_path +
elements[2])
self._logger.debug("Got "
+ str(len(self._timeout_closings)))
elif "MAX_STORIES_REACHED" in elements[1]:
self._max_stories_reached = self._read_list_from_file(
self._script_path + self._session_script_path +
elements[2])
self._logger.debug("... Got "
+ str(len(self._max_stories_reached)))
except IOError:
self._logger.exception("Failed to add responses!")
else:
self._logger.info("Added " + elements[1])
#########################################################
# For SET lines, set the specified constant.
elif "SET" in elements[0] and len(elements) >= 3:
self._logger.debug("SET")
if "MAX_INCORRECT_RESPONSES" in elements[1]:
self._max_incorrect_responses = int(elements[2])
self._logger.info("Set MAX_INCORRECT_RESPONSES to " +
elements[2])
elif "MAX_GAME_TIME" in elements[1]:
self._max_game_time = datetime.timedelta(minutes=
int(elements[2]))
self._logger.info("Set MAX_GAME_TIME to " + elements[2])
elif "MAX_STORIES" in elements[1]:
self._max_stories = int(elements[2])
self._logger.info("Set MAX_STORIES to " + elements[2])
#########################################################
# For WAIT lines, wait for the specified user response,
# or for a timeout.
elif "WAIT" in elements[0] and len(elements) >= 3:
self._logger.debug("WAIT")
self.wait_for_response(elements[1], int(elements[2]))
#########################################################
# For QUESTION lines, save the question type and question number
# for later use.
elif "QUESTION" in elements[0] and len(elements) >= 3:
self._current_question_type = elements[1]
self._current_question_num = int(elements[2])
self._logger.info("Current question: type " + elements[1]
+ ", num " + elements[2])
#########################################################
# For REPEAT lines, repeat lines in the specified script
# file the specified number of times.
elif "REPEAT" in elements[0] and len(elements) >= 3:
self._logger.debug("REPEAT")
self._repeating = True
self._repetitions = 0
# Create a script parser for the filename provided,
# assume it is in the session_scripts directory.
self._repeat_parser = ss_script_parser()
self._repeating_script_name = elements[2]
try:
self._repeat_parser.load_script(self._script_path
+ self._session_script_path
+ elements[2])
except IOError:
self._logger.exception("Script parser could not open "
+ "session script to repeat! Skipping REPEAT line.")
self._repeating = False
return
# Figure out how many times we should repeat the script.
if "MAX_STORIES" in elements[1]:
try:
self._max_repetitions = self._max_stories
except AttributeError:
self._logger.exception("Tried to set MAX_REPETITIONS to"
+ " MAX_STORIES, but MAX_STORIES has not been "
+ "set . Setting to 1 repetition instead.")
self._max_repetitions = 1
else:
self._max_repetitions = int(elements[1])
self._logger.debug("Going to repeat " + elements[2] + " " +
str(self._max_repetitions) + " time(s).")
def _read_list_from_file(self, filename):
""" Read a list of robot responses from a file, return a list
of the lines from the file
"""
# Open script for reading.
try:
fh = open(filename, "r")
return fh.readlines()
except IOError as e:
self._logger.exception("Cannot open file: " + filename)
# Pass exception up so anyone trying to add a response list
# from a script knows it didn't work.
raise
def wait_for_response(self, response_to_get, timeout):
""" Wait for a user response or wait until the specified time
has elapsed. If the response is incorrect, allow multiple
attempts up to the maximum number of incorrect responses.
"""
for i in range(0, self._max_incorrect_responses):
self._logger.info("Waiting for user response...")
# Save the response we were trying to get in case we need
# to try again.
self._last_response_to_get = response_to_get
self._last_response_timeout = timeout
# Wait for the specified type of response, or until the
# specified time has elapsed.
response, answer = self._ros_node.wait_for_response(response_to_get,
datetime.timedelta(seconds=int(timeout)))
# After waiting for a response, need to play back an
# appropriate robot response.
# If we didn't receive a response, then it was probably
# because we didn't send a valid response to wait for.
# This is different from a TIMEOUT since we didn't time
# out -- we just didn't get a response of any kind.
if not response:
self._logger.info("Done waiting -- did not get valid response!")
return False
# If we received no user response before timing out, send a
# TIMEOUT message and pause the game.
elif "TIMEOUT" in response:
# Announce we timed out.
self._ros_node.send_game_state("TIMEOUT")
# Pause game and wait to be told whether we should try
# waiting again for a response or whether we should
# skip it and move on. Queue up the pause command so the
# main game loop can take action.
self._game_node_queue.put("PAUSE")
# Announce the game is pausing.
self._ros_node.send_game_state("PAUSE")
# Indicate that we did not get a response.
# We don't break and let the user try again because the
# external game monitor deals with TIMEOUT events, and
# will tell us whether to try waiting again or to just
# skip waiting for this response.
return False
# If response was INCORRECT, randomly select a robot
# response to an incorrect user action.
elif "INCORRECT" in response:
# Record incorrect response in the db.
self._personalization_man.record_user_response(
self._current_question_num, self._current_question_type,
answer)
try:
self._ros_node.send_robot_command("DO",
response="ROBOT_NOT_SPEAKING",
timeout=datetime.timedelta(seconds=int(
self.WAIT_TIME)),
properties=self._incorrect_responses[random.randint(0,
len(self._incorrect_responses)-1)])
except AttributeError:
self._logger.exception("Could not play an incorrect "
+ "response. Maybe none were loaded?")
# Don't break so we allow the user a chance to respond
# again.
# If response was NO, randomly select a robot response to
# the user selecting no.
elif "NO" in response:
try:
self._ros_node.send_robot_command("DO",
response="ROBOT_NOT_SPEAKING",
timeout=datetime.timedelta(seconds=int(
self.WAIT_TIME)),
properties=self._no_responses[random.randint(0,
len(self._no_responses)-1)])
except AttributeError:
self._logger.exception("Could not play a response to "
+ "user's NO. Maybe none were loaded?")
# Don't break so we allow the user a chance to respond
# again.
# If response was CORRECT, randomly select a robot response
# to a correct user action, highlight the correct answer,
# and break out of response loop.
elif "CORRECT" in response:
# Record correct response in the db.
self._personalization_man.record_user_response(
self._current_question_num, self._current_question_type,
answer)
try:
self._ros_node.send_robot_command("DO",
response="ROBOT_NOT_SPEAKING",
timeout=datetime.timedelta(seconds=int(
self.WAIT_TIME)),
properties=self._correct_responses[random.randint(0,
len(self._correct_responses)-1)])
self._ros_node.send_opal_command("SHOW_CORRECT")
self._ros_node.send_robot_command("DO",
response="ROBOT_NOT_SPEAKING",
timeout=datetime.timedelta(seconds=int(
self.WAIT_TIME)),
properties=self._answer_feedback[random.randint(0,
len(self._answer_feedback)-1)])
# Pause after speaking before hiding correct again
time.sleep(self.ANSWER_FEEDBACK_PAUSE_TIME)
self._ros_node.send_opal_command("HIDE_CORRECT")
except AttributeError:
self._logger.exception("Could not play a correct "
+ "response or could not play robot's answer"
+ " feedback. Maybe none were loaded?")
# Break from the for loop so we don't give the user
# a chance to respond again.
break
# If response was START, randomly select a robot response to
# the user selecting START, and break out of response loop.
elif "START" in response:
try:
self._ros_node.send_robot_command("DO",
response="ROBOT_NOT_SPEAKING",
timeout=datetime.timedelta(seconds=int(
self.WAIT_TIME)),
properties=self._start_responses[random.randint(0,
len(self._start_responses)-1)])
except AttributeError:
self._logger.exception("Could not play response to"
+ "user's START. Maybe none were loaded?")
# Break from the for loop so we don't give the user
# a chance to respond again.
break
# We exhausted our allowed number of user responses, so have
# the robot do something instead of waiting more.
else:
# If user was never correct, play robot's correct answer
# feedback and show which answer was correct in the game.
if "CORRECT" in response_to_get:
try:
self._ros_node.send_opal_command("SHOW_CORRECT")
self._ros_node.send_robot_command("DO",
response="ROBOT_NOT_SPEAKING",
timeout=datetime.timedelta(seconds=int(
self.WAIT_TIME)),
properties=self._answer_feedback[random.randint(0,
len(self._answer_feedback)-1)])
# Pause after speaking before hiding correct again.
time.sleep(self.ANSWER_FEEDBACK_PAUSE_TIME)
self._ros_node.send_opal_command("HIDE_CORRECT")
except AttributeError:
self._logger.exception("Could not play robot's answer"
+ " feedback! Maybe none were loaded?")
# If user never selects START (which is used to ask the user
# if they are ready to play), stop all stories and repeating
# scripts, continue with main script so we go to the end.
elif "START" in response_to_get:
self._repeating = False
self.story = False
# We got a user response and responded to it!
return True
def skip_wait_for_response(self):
""" Skip waiting for a response; treat the skipped response as
a NO or INCORRECT response.
"""
# If the response to wait for was CORRECT or INCORRECT,
# randomly select a robot response to an incorrect user
# action.
if "CORRECT" in self._last_response_to_get:
try:
self._ros_node.send_robot_command("DO",
response="ROBOT_NOT_SPEAKING",
timeout=datetime.timedelta(seconds=int(self.WAIT_TIME)),
properties=self._incorrect_responses[random.randint(0, \
len(self._incorrect_responses)-1)])
except AttributeError:
self._logger.exception("Could not play an incorrect "
+ "response. Maybe none were loaded?")
# If response to wait for was YES or NO, randomly select a
# robot response for a NO user action.
elif "NO" in self._last_response_to_get:
try:
self._ros_node.send_robot_command("DO",
response="ROBOT_NOT_SPEAKING",
timeout=datetime.timedelta(seconds=int(self.WAIT_TIME)),
properties=self._no_responses[random.randint(0,
len(self._no_responses)-1)])
except AttributeError:
self._logger.exception("Could not play a response to "
+ "user's NO. Maybe none were loaded?")
def set_end_game(self):
""" End the game gracefully -- stop any stories or repeating
scripts, go back to main session script and finish.
"""
# For now, we just need to set a flag indicating we should end
# the game. When we check whether we should load another story
# or repeat a repeating script, this flag will be used to skip
# back to the main session script, to the end of the game.
self._end_game = True
def set_start_level(self, level):
""" When the game starts, a level to start at can be provided.
Pass this to the personalization manager to deal with, since it
deals with picking the levels of stories to play.
"""
self._personalization_man.set_start_level(level)
def pause_game_timer(self):
""" Track how much time we spend paused so when we check
whether we have reached the max game time, we don't include
time spent paused.
"""
self._pause_start_time = datetime.datetime.now()
def resume_game_timer(self):
""" Add how much time we spent paused to our total time spent
paused.
"""
# Since this function could theoretically be called before we
# get a call to pause_game_timer, we have to check that there
# is a pause start time, and then later, reset it so we can't
# add the same pause length multiple times to our total pause
# time.
if self._pause_start_time is not None:
self._total_time_paused += datetime.datetime.now() \
- self._pause_start_time
# Reset pause start time.
self._pause_start_time = None
def wait_for_last_response_again(self):
""" Wait for the same response that we just waited for again,
with the same parameters for the response and the timeout.
"""
return self.wait_for_response(self._last_response_to_get,
self._last_response_timeout)
def _load_answers(self, answer_list):
""" Load the answer graphics for this story """
# We are given a list of words that indicate what the answer
# options are. By convention, the first word is probably the
# correct answer; the others are incorrect answers. However,
# we won't set this now because this convention may not hold.
# We expect the SET_CORRECT OpalCommand to be used to set
# which answers are correct or incorrect.
# split the list of answers on commas.
answers = answer_list.strip().split(',')
# Shuffle answers to display them in a random order.
random.shuffle(answers)
# Load in the graphic for each answer.
for answer in answers:
toload = {}
# Remove whitespace from name before using it.
toload["name"] = answer.strip()
toload["tag"] = "PlayObject"
toload["slot"] = answers.index(answer) + 1
toload["draggable"] = False
toload["isAnswerSlot"] = True
self._ros_node.send_opal_command("LOAD_OBJECT", json.dumps(toload))
def _load_next_story(self):
""" Get the next story, set up the game scene with scene and
answer slots, and load scene graphics.
"""
# If we've told the max number of stories, or if we've reached
# the max game time, don't load another story even though we
# were told to load one -- instead, play error message from
# robot saying we have to be done now.
if self._stories_told >= self._max_stories \
or ((datetime.datetime.now() - self._start_time) \
- self._total_time_paused >= self._max_game_time) or self._end_game:
self._logger.info("We were told to load another story, but we've "
+ "already played the maximum number of stories or we ran"
" out of time! Skipping and ending now.")
self._doing_story = False
try:
self._ros_node.send_robot_command("DO",
response="ROBOT_NOT_SPEAKING",
timeout=datetime.timedelta(seconds=int(self.WAIT_TIME)),
properties=self._max_stories_reached
[random.randint(0, len(self._no_responses)-1)])
except AttributeError:
self._logger.exception("Could not play a max stories reached "
+ "response. Maybe none were loaded?")
# We were either told to play another story because a
# repeating script loads a story and the max number of
# repetitions is greater than the max number of stories,
# so more stories were requested than can be played, or
# because we ran out of time and were supposed to play more
# stories than we have time for. Either way, stop the
# repeating script if there is one.
self._repeating = False
return
# Get the details for the next story.
try:
scenes, in_order, num_answers = \
self._personalization_man.get_next_story_details()
except NoStoryFound:
# If no story was found, we can't load the story!
self._logger.exception("Cannot load story - no story to load was" +
" found!")
self._doing_story = False
return
# Set up the story scene in the game.
setup = {}
setup["numScenes"] = len(scenes)
setup["scenesInOrder"] = in_order
setup["numAnswers"] = num_answers
self._ros_node.send_opal_command("SETUP_STORY_SCENE", json.dumps(setup))
# Load the scene graphics.
for scene in scenes:
toload = {}
toload["name"] = "scenes/" + scene
toload["tag"] = "PlayObject"
toload["slot"] = scenes.index(scene) + 1
if not in_order:
toload["correctSlot"] = scenes.index(scene) + 1
toload["draggable"] = False if in_order else True
toload["isAnswerSlot"] = False
self._ros_node.send_opal_command("LOAD_OBJECT", json.dumps(toload))
# Tell the personalization manager that we loaded the story so
# it can keep track of which stories have been played.
self._personalization_man.record_story_loaded()
| personal-robots/sar_social_stories | src/ss_script_handler.py | Python | mit | 43,375 | 0.00302 |
#!/usr/bin/env python3
# Copyright (c) 2018-2019 TurboCoin
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Tests some generic aspects of the RPC interface."""
from test_framework.authproxy import JSONRPCException
from test_framework.test_framework import TurbocoinTestFramework
from test_framework.util import assert_equal, assert_greater_than_or_equal
def expect_http_status(expected_http_status, expected_rpc_code,
fcn, *args):
try:
fcn(*args)
raise AssertionError("Expected RPC error %d, got none" % expected_rpc_code)
except JSONRPCException as exc:
assert_equal(exc.error["code"], expected_rpc_code)
assert_equal(exc.http_status, expected_http_status)
class RPCInterfaceTest(TurbocoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.setup_clean_chain = True
def test_getrpcinfo(self):
self.log.info("Testing getrpcinfo...")
info = self.nodes[0].getrpcinfo()
assert_equal(len(info['active_commands']), 1)
command = info['active_commands'][0]
assert_equal(command['method'], 'getrpcinfo')
assert_greater_than_or_equal(command['duration'], 0)
def test_batch_request(self):
self.log.info("Testing basic JSON-RPC batch request...")
results = self.nodes[0].batch([
# A basic request that will work fine.
{"method": "getblockcount", "id": 1},
# Request that will fail. The whole batch request should still
# work fine.
{"method": "invalidmethod", "id": 2},
# Another call that should succeed.
{"method": "getbestblockhash", "id": 3},
])
result_by_id = {}
for res in results:
result_by_id[res["id"]] = res
assert_equal(result_by_id[1]['error'], None)
assert_equal(result_by_id[1]['result'], 0)
assert_equal(result_by_id[2]['error']['code'], -32601)
assert_equal(result_by_id[2]['result'], None)
assert_equal(result_by_id[3]['error'], None)
assert result_by_id[3]['result'] is not None
def test_http_status_codes(self):
self.log.info("Testing HTTP status codes for JSON-RPC requests...")
expect_http_status(404, -32601, self.nodes[0].invalidmethod)
expect_http_status(500, -8, self.nodes[0].getblockhash, 42)
def run_test(self):
self.test_getrpcinfo()
self.test_batch_request()
self.test_http_status_codes()
if __name__ == '__main__':
RPCInterfaceTest().main()
| Phonemetra/TurboCoin | test/functional/interface_rpc.py | Python | mit | 2,668 | 0.001124 |
# Authors:
# Jason Gerard DeRose <jderose@redhat.com>
#
# Copyright (C) 2008 Red Hat
# see file 'COPYING' for use and warranty information
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Test the `ipalib.frontend` module.
"""
# FIXME: Pylint errors
# pylint: disable=no-member
from ipatests.util import raises, read_only
from ipatests.util import ClassChecker, create_test_api
from ipatests.util import assert_equal
from ipalib.constants import TYPE_ERROR
from ipalib.base import NameSpace
from ipalib import frontend, backend, plugable, errors, parameters, config
from ipalib import output, messages
from ipalib.parameters import Str
from ipapython.version import API_VERSION
def test_RULE_FLAG():
assert frontend.RULE_FLAG == 'validation_rule'
def test_rule():
"""
Test the `ipalib.frontend.rule` function.
"""
flag = frontend.RULE_FLAG
rule = frontend.rule
def my_func():
pass
assert not hasattr(my_func, flag)
rule(my_func)
assert getattr(my_func, flag) is True
@rule
def my_func2():
pass
assert getattr(my_func2, flag) is True
def test_is_rule():
"""
Test the `ipalib.frontend.is_rule` function.
"""
is_rule = frontend.is_rule
flag = frontend.RULE_FLAG
class no_call(object):
def __init__(self, value):
if value is not None:
assert value in (True, False)
setattr(self, flag, value)
class call(no_call):
def __call__(self):
pass
assert is_rule(call(True))
assert not is_rule(no_call(True))
assert not is_rule(call(False))
assert not is_rule(call(None))
class test_HasParam(ClassChecker):
"""
Test the `ipalib.frontend.Command` class.
"""
_cls = frontend.HasParam
def test_get_param_iterable(self):
"""
Test the `ipalib.frontend.HasParam._get_param_iterable` method.
"""
api = 'the api instance'
class WithTuple(self.cls):
takes_stuff = ('one', 'two')
o = WithTuple(api)
assert o._get_param_iterable('stuff') is WithTuple.takes_stuff
junk = ('three', 'four')
class WithCallable(self.cls):
def takes_stuff(self):
return junk
o = WithCallable(api)
assert o._get_param_iterable('stuff') is junk
class WithParam(self.cls):
takes_stuff = parameters.Str('five')
o = WithParam(api)
assert o._get_param_iterable('stuff') == (WithParam.takes_stuff,)
class WithStr(self.cls):
takes_stuff = 'six'
o = WithStr(api)
assert o._get_param_iterable('stuff') == ('six',)
class Wrong(self.cls):
takes_stuff = ['seven', 'eight']
o = Wrong(api)
e = raises(TypeError, o._get_param_iterable, 'stuff')
assert str(e) == '%s.%s must be a tuple, callable, or spec; got %r' % (
'Wrong', 'takes_stuff', Wrong.takes_stuff
)
def test_filter_param_by_context(self):
"""
Test the `ipalib.frontend.HasParam._filter_param_by_context` method.
"""
api = 'the api instance'
class Example(self.cls):
def get_stuff(self):
return (
'one', # Make sure create_param() is called for each spec
'two',
parameters.Str('three', include='cli'),
parameters.Str('four', exclude='server'),
parameters.Str('five', exclude=['whatever', 'cli']),
)
o = Example(api)
# Test when env is None:
params = list(o._filter_param_by_context('stuff'))
assert list(p.name for p in params) == [
'one', 'two', 'three', 'four', 'five'
]
for p in params:
assert type(p) is parameters.Str
# Test when env.context == 'cli':
cli = config.Env(context='cli')
assert cli.context == 'cli'
params = list(o._filter_param_by_context('stuff', cli))
assert list(p.name for p in params) == ['one', 'two', 'three', 'four']
for p in params:
assert type(p) is parameters.Str
# Test when env.context == 'server'
server = config.Env(context='server')
assert server.context == 'server'
params = list(o._filter_param_by_context('stuff', server))
assert list(p.name for p in params) == ['one', 'two', 'five']
for p in params:
assert type(p) is parameters.Str
# Test with no get_stuff:
class Missing(self.cls):
pass
o = Missing(api)
gen = o._filter_param_by_context('stuff')
e = raises(NotImplementedError, list, gen)
assert str(e) == 'Missing.get_stuff()'
# Test when get_stuff is not callable:
class NotCallable(self.cls):
get_stuff = ('one', 'two')
o = NotCallable(api)
gen = o._filter_param_by_context('stuff')
e = raises(TypeError, list, gen)
assert str(e) == '%s.%s must be a callable; got %r' % (
'NotCallable', 'get_stuff', NotCallable.get_stuff
)
class test_Command(ClassChecker):
"""
Test the `ipalib.frontend.Command` class.
"""
_cls = frontend.Command
def get_subcls(self):
"""
Return a standard subclass of `ipalib.frontend.Command`.
"""
class Rule(object):
def __init__(self, name):
self.name = name
def __call__(self, _, value):
if value != self.name:
return _('must equal %r') % self.name
default_from = parameters.DefaultFrom(
lambda arg: arg,
'default_from'
)
normalizer = lambda value: value.lower()
class example(self.cls):
takes_options = (
parameters.Str('option0', Rule('option0'),
normalizer=normalizer,
default_from=default_from,
),
parameters.Str('option1', Rule('option1'),
normalizer=normalizer,
default_from=default_from,
),
)
return example
def get_instance(self, args=tuple(), options=tuple()):
"""
Helper method used to test args and options.
"""
class api(object):
@staticmethod
def is_production_mode():
return False
class example(self.cls):
takes_args = args
takes_options = options
o = example(api)
o.finalize()
return o
def test_class(self):
"""
Test the `ipalib.frontend.Command` class.
"""
assert self.cls.takes_options == tuple()
assert self.cls.takes_args == tuple()
def test_get_args(self):
"""
Test the `ipalib.frontend.Command.get_args` method.
"""
api = 'the api instance'
assert list(self.cls(api).get_args()) == []
args = ('login', 'stuff')
o = self.get_instance(args=args)
assert tuple(o.get_args()) == args
def test_get_options(self):
"""
Test the `ipalib.frontend.Command.get_options` method.
"""
api = 'the api instance'
options = list(self.cls(api).get_options())
assert len(options) == 1
assert options[0].name == 'version'
options = ('verbose', 'debug')
o = self.get_instance(options=options)
assert len(tuple(o.get_options())) == 3
assert 'verbose' in tuple(o.get_options())
assert 'debug' in tuple(o.get_options())
def test_args(self):
"""
Test the ``ipalib.frontend.Command.args`` instance attribute.
"""
class api(object):
@staticmethod
def is_production_mode():
return False
o = self.cls(api)
o.finalize()
assert type(o.args) is plugable.NameSpace
assert len(o.args) == 0
args = ('destination', 'source?')
ns = self.get_instance(args=args).args
assert type(ns) is plugable.NameSpace
assert len(ns) == len(args)
assert list(ns) == ['destination', 'source']
assert type(ns.destination) is parameters.Str
assert type(ns.source) is parameters.Str
assert ns.destination.required is True
assert ns.destination.multivalue is False
assert ns.source.required is False
assert ns.source.multivalue is False
# Test TypeError:
e = raises(TypeError, self.get_instance, args=(u'whatever',))
assert str(e) == TYPE_ERROR % (
'spec', (str, parameters.Param), u'whatever', unicode)
# Test ValueError, required after optional:
e = raises(ValueError, self.get_instance, args=('arg1?', 'arg2'))
assert str(e) == "arg2: required argument after optional in %s arguments ['arg1?', 'arg2']" % (self.get_instance().name)
# Test ValueError, scalar after multivalue:
e = raises(ValueError, self.get_instance, args=('arg1+', 'arg2'))
assert str(e) == 'arg2: only final argument can be multivalue'
def test_max_args(self):
"""
Test the ``ipalib.frontend.Command.max_args`` instance attribute.
"""
o = self.get_instance()
assert o.max_args == 0
o = self.get_instance(args=('one?',))
assert o.max_args == 1
o = self.get_instance(args=('one', 'two?'))
assert o.max_args == 2
o = self.get_instance(args=('one', 'multi+',))
assert o.max_args is None
o = self.get_instance(args=('one', 'multi*',))
assert o.max_args is None
def test_options(self):
"""
Test the ``ipalib.frontend.Command.options`` instance attribute.
"""
class api(object):
@staticmethod
def is_production_mode():
return False
o = self.cls(api)
o.finalize()
assert type(o.options) is plugable.NameSpace
assert len(o.options) == 1
options = ('target', 'files*')
ns = self.get_instance(options=options).options
assert type(ns) is plugable.NameSpace
assert len(ns) == len(options) + 1
assert list(ns) == ['target', 'files', 'version']
assert type(ns.target) is parameters.Str
assert type(ns.files) is parameters.Str
assert ns.target.required is True
assert ns.target.multivalue is False
assert ns.files.required is False
assert ns.files.multivalue is True
def test_output(self):
"""
Test the ``ipalib.frontend.Command.output`` instance attribute.
"""
class api(object):
@staticmethod
def is_production_mode():
return False
inst = self.cls(api)
inst.finalize()
assert type(inst.output) is plugable.NameSpace
assert list(inst.output) == ['result']
assert type(inst.output.result) is output.Output
def test_iter_output(self):
"""
Test the ``ipalib.frontend.Command._iter_output`` instance attribute.
"""
api = 'the api instance'
class Example(self.cls):
pass
inst = Example(api)
inst.has_output = tuple()
assert list(inst._iter_output()) == []
wrong = ['hello', 'world']
inst.has_output = wrong
e = raises(TypeError, list, inst._iter_output())
assert str(e) == 'Example.has_output: need a %r; got a %r: %r' % (
tuple, list, wrong
)
wrong = ('hello', 17)
inst.has_output = wrong
e = raises(TypeError, list, inst._iter_output())
assert str(e) == 'Example.has_output[1]: need a %r; got a %r: %r' % (
(str, output.Output), int, 17
)
okay = ('foo', output.Output('bar'), 'baz')
inst.has_output = okay
items = list(inst._iter_output())
assert len(items) == 3
assert list(o.name for o in items) == ['foo', 'bar', 'baz']
for o in items:
assert type(o) is output.Output
def test_soft_validate(self):
"""
Test the `ipalib.frontend.Command.soft_validate` method.
"""
class api(object):
env = config.Env(context='cli')
@staticmethod
def is_production_mode():
return False
class user_add(frontend.Command):
takes_args = parameters.Str('uid',
normalizer=lambda value: value.lower(),
default_from=lambda givenname, sn: givenname[0] + sn,
)
takes_options = ('givenname', 'sn')
cmd = user_add(api)
cmd.finalize()
assert list(cmd.params) == ['givenname', 'sn', 'uid', 'version']
ret = cmd.soft_validate({})
assert sorted(ret['values']) == ['version']
assert sorted(ret['errors']) == ['givenname', 'sn', 'uid']
assert cmd.soft_validate(dict(givenname=u'First', sn=u'Last')) == dict(
values=dict(givenname=u'First', sn=u'Last', uid=u'flast',
version=None),
errors=dict(),
)
def test_convert(self):
"""
Test the `ipalib.frontend.Command.convert` method.
"""
class api(object):
@staticmethod
def is_production_mode():
return False
kw = dict(
option0=u'1.5',
option1=u'7',
)
o = self.subcls(api)
o.finalize()
for (key, value) in o.convert(**kw).iteritems():
assert_equal(unicode(kw[key]), value)
def test_normalize(self):
"""
Test the `ipalib.frontend.Command.normalize` method.
"""
class api(object):
@staticmethod
def is_production_mode():
return False
kw = dict(
option0=u'OPTION0',
option1=u'OPTION1',
)
norm = dict((k, v.lower()) for (k, v) in kw.items())
sub = self.subcls(api)
sub.finalize()
assert sub.normalize(**kw) == norm
def test_get_default(self):
"""
Test the `ipalib.frontend.Command.get_default` method.
"""
# FIXME: Add an updated unit tests for get_default()
def test_default_from_chaining(self):
"""
Test chaining of parameters through default_from.
"""
class my_cmd(self.cls):
takes_options = (
Str('option0'),
Str('option1', default_from=lambda option0: option0),
Str('option2', default_from=lambda option1: option1),
)
def run(self, *args, **options):
return dict(result=options)
kw = dict(option0=u'some value')
(api, home) = create_test_api()
api.finalize()
o = my_cmd(api)
o.finalize()
e = o(**kw) # pylint: disable=not-callable
assert type(e) is dict
assert 'result' in e
assert 'option2' in e['result']
assert e['result']['option2'] == u'some value'
def test_validate(self):
"""
Test the `ipalib.frontend.Command.validate` method.
"""
class api(object):
env = config.Env(context='cli')
@staticmethod
def is_production_mode():
return False
sub = self.subcls(api)
sub.finalize()
# Check with valid values
okay = dict(
option0=u'option0',
option1=u'option1',
another_option='some value',
version=API_VERSION,
)
sub.validate(**okay)
# Check with an invalid value
fail = dict(okay)
fail['option0'] = u'whatever'
e = raises(errors.ValidationError, sub.validate, **fail)
assert_equal(e.name, 'option0')
assert_equal(e.value, u'whatever')
assert_equal(e.error, u"must equal 'option0'")
assert e.rule.__class__.__name__ == 'Rule'
assert e.index is None
# Check with a missing required arg
fail = dict(okay)
fail.pop('option1')
e = raises(errors.RequirementError, sub.validate, **fail)
assert e.name == 'option1'
def test_execute(self):
"""
Test the `ipalib.frontend.Command.execute` method.
"""
api = 'the api instance'
o = self.cls(api)
e = raises(NotImplementedError, o.execute)
assert str(e) == 'Command.execute()'
def test_args_options_2_params(self):
"""
Test the `ipalib.frontend.Command.args_options_2_params` method.
"""
# Test that ZeroArgumentError is raised:
o = self.get_instance()
e = raises(errors.ZeroArgumentError, o.args_options_2_params, 1)
assert e.name == 'example'
# Test that MaxArgumentError is raised (count=1)
o = self.get_instance(args=('one?',))
e = raises(errors.MaxArgumentError, o.args_options_2_params, 1, 2)
assert e.name == 'example'
assert e.count == 1
assert str(e) == "command 'example' takes at most 1 argument"
# Test that MaxArgumentError is raised (count=2)
o = self.get_instance(args=('one', 'two?'))
e = raises(errors.MaxArgumentError, o.args_options_2_params, 1, 2, 3)
assert e.name == 'example'
assert e.count == 2
assert str(e) == "command 'example' takes at most 2 arguments"
# Test that OptionError is raised when an extra option is given:
o = self.get_instance()
e = raises(errors.OptionError, o.args_options_2_params, bad_option=True)
assert e.option == 'bad_option'
# Test that OverlapError is raised:
o = self.get_instance(args=('one', 'two'), options=('three', 'four'))
e = raises(errors.OverlapError, o.args_options_2_params,
1, 2, three=3, two=2, four=4, one=1)
assert e.names == ['one', 'two']
# Test the permutations:
o = self.get_instance(args=('one', 'two*'), options=('three', 'four'))
mthd = o.args_options_2_params
assert mthd() == dict()
assert mthd(1) == dict(one=1)
assert mthd(1, 2) == dict(one=1, two=(2,))
assert mthd(1, 21, 22, 23) == dict(one=1, two=(21, 22, 23))
assert mthd(1, (21, 22, 23)) == dict(one=1, two=(21, 22, 23))
assert mthd(three=3, four=4) == dict(three=3, four=4)
assert mthd(three=3, four=4, one=1, two=2) == \
dict(one=1, two=2, three=3, four=4)
assert mthd(1, 21, 22, 23, three=3, four=4) == \
dict(one=1, two=(21, 22, 23), three=3, four=4)
assert mthd(1, (21, 22, 23), three=3, four=4) == \
dict(one=1, two=(21, 22, 23), three=3, four=4)
def test_args_options_2_entry(self):
"""
Test `ipalib.frontend.Command.args_options_2_entry` method.
"""
class my_cmd(self.cls):
takes_args = (
parameters.Str('one', attribute=True),
parameters.Str('two', attribute=False),
)
takes_options = (
parameters.Str('three', attribute=True, multivalue=True),
parameters.Str('four', attribute=True, multivalue=False),
)
def run(self, *args, **kw):
return self.args_options_2_entry(*args, **kw)
args = ('one', 'two')
kw = dict(three=('three1', 'three2'), four='four')
(api, home) = create_test_api()
api.finalize()
o = my_cmd(api)
o.finalize()
e = o.run(*args, **kw)
assert type(e) is dict
assert 'one' in e
assert 'two' not in e
assert 'three' in e
assert 'four' in e
assert e['one'] == 'one'
assert e['three'] == ['three1', 'three2']
assert e['four'] == 'four'
def test_params_2_args_options(self):
"""
Test the `ipalib.frontend.Command.params_2_args_options` method.
"""
o = self.get_instance(args='one', options='two')
assert o.params_2_args_options() == ((None,), {})
assert o.params_2_args_options(one=1) == ((1,), {})
assert o.params_2_args_options(two=2) == ((None,), dict(two=2))
assert o.params_2_args_options(two=2, one=1) == ((1,), dict(two=2))
def test_run(self):
"""
Test the `ipalib.frontend.Command.run` method.
"""
class my_cmd(self.cls):
def execute(self, *args, **kw):
return ('execute', args, kw)
def forward(self, *args, **kw):
return ('forward', args, kw)
args = ('Hello,', 'world,')
kw = dict(how_are='you', on_this='fine day?', version=API_VERSION)
# Test in server context:
(api, home) = create_test_api(in_server=True)
api.finalize()
o = my_cmd(api)
assert o.run.__func__ is self.cls.run.__func__
out = o.run(*args, **kw)
assert ('execute', args, kw) == out
# Test in non-server context
(api, home) = create_test_api(in_server=False)
api.finalize()
o = my_cmd(api)
assert o.run.__func__ is self.cls.run.__func__
assert ('forward', args, kw) == o.run(*args, **kw)
def test_messages(self):
"""
Test correct handling of messages
"""
class TestMessage(messages.PublicMessage):
type = 'info'
format = 'This is a message.'
errno = 1234
class my_cmd(self.cls):
def execute(self, *args, **kw):
result = {'name': 'execute'}
messages.add_message(kw['version'], result, TestMessage())
return result
def forward(self, *args, **kw):
result = {'name': 'forward'}
messages.add_message(kw['version'], result, TestMessage())
return result
args = ('Hello,', 'world,')
kw = dict(how_are='you', on_this='fine day?', version=API_VERSION)
expected = [TestMessage().to_dict()]
# Test in server context:
(api, home) = create_test_api(in_server=True)
api.finalize()
o = my_cmd(api)
assert o.run.__func__ is self.cls.run.__func__
assert {'name': 'execute', 'messages': expected} == o.run(*args, **kw)
# Test in non-server context
(api, home) = create_test_api(in_server=False)
api.finalize()
o = my_cmd(api)
assert o.run.__func__ is self.cls.run.__func__
assert {'name': 'forward', 'messages': expected} == o.run(*args, **kw)
def test_validate_output_basic(self):
"""
Test the `ipalib.frontend.Command.validate_output` method.
"""
class api(object):
@staticmethod
def is_production_mode():
return False
class Example(self.cls):
has_output = ('foo', 'bar', 'baz')
inst = Example(api)
inst.finalize()
# Test with wrong type:
wrong = ('foo', 'bar', 'baz')
e = raises(TypeError, inst.validate_output, wrong)
assert str(e) == '%s.validate_output(): need a %r; got a %r: %r' % (
'Example', dict, tuple, wrong
)
# Test with a missing keys:
wrong = dict(bar='hello')
e = raises(ValueError, inst.validate_output, wrong)
assert str(e) == '%s.validate_output(): missing keys %r in %r' % (
'Example', ['baz', 'foo'], wrong
)
# Test with extra keys:
wrong = dict(foo=1, bar=2, baz=3, fee=4, azz=5)
e = raises(ValueError, inst.validate_output, wrong)
assert str(e) == '%s.validate_output(): unexpected keys %r in %r' % (
'Example', ['azz', 'fee'], wrong
)
# Test with different keys:
wrong = dict(baz=1, xyzzy=2, quux=3)
e = raises(ValueError, inst.validate_output, wrong)
assert str(e) == '%s.validate_output(): missing keys %r in %r' % (
'Example', ['bar', 'foo'], wrong
), str(e)
def test_validate_output_per_type(self):
"""
Test `ipalib.frontend.Command.validate_output` per-type validation.
"""
class api(object):
@staticmethod
def is_production_mode():
return False
class Complex(self.cls):
has_output = (
output.Output('foo', int),
output.Output('bar', list),
)
inst = Complex(api)
inst.finalize()
wrong = dict(foo=17.9, bar=[18])
e = raises(TypeError, inst.validate_output, wrong)
assert str(e) == '%s:\n output[%r]: need %r; got %r: %r' % (
'Complex.validate_output()', 'foo', int, float, 17.9
)
wrong = dict(foo=18, bar=17)
e = raises(TypeError, inst.validate_output, wrong)
assert str(e) == '%s:\n output[%r]: need %r; got %r: %r' % (
'Complex.validate_output()', 'bar', list, int, 17
)
def test_validate_output_nested(self):
"""
Test `ipalib.frontend.Command.validate_output` nested validation.
"""
class api(object):
@staticmethod
def is_production_mode():
return False
class Subclass(output.ListOfEntries):
pass
# Test nested validation:
class nested(self.cls):
has_output = (
output.Output('hello', int),
Subclass('world'),
)
inst = nested(api)
inst.finalize()
okay = dict(foo='bar')
nope = ('aye', 'bee')
wrong = dict(hello=18, world=[okay, nope, okay])
e = raises(TypeError, inst.validate_output, wrong)
assert str(e) == output.emsg % (
'nested', 'Subclass', 'world', 1, dict, tuple, nope
)
wrong = dict(hello=18, world=[okay, okay, okay, okay, nope])
e = raises(TypeError, inst.validate_output, wrong)
assert str(e) == output.emsg % (
'nested', 'Subclass', 'world', 4, dict, tuple, nope
)
def test_get_output_params(self):
"""
Test the `ipalib.frontend.Command.get_output_params` method.
"""
class api(object):
@staticmethod
def is_production_mode():
return False
class example(self.cls):
has_output_params = (
'one',
'two',
'three',
)
takes_args = (
'foo',
)
takes_options = (
Str('bar', flags='no_output'),
'baz',
)
inst = example(api)
inst.finalize()
assert list(inst.get_output_params()) == [
'one', 'two', 'three', inst.params.foo, inst.params.baz
]
assert list(inst.output_params) == ['one', 'two', 'three', 'foo', 'baz']
class test_LocalOrRemote(ClassChecker):
"""
Test the `ipalib.frontend.LocalOrRemote` class.
"""
_cls = frontend.LocalOrRemote
def test_init(self):
"""
Test the `ipalib.frontend.LocalOrRemote.__init__` method.
"""
class api(object):
@staticmethod
def is_production_mode():
return False
o = self.cls(api)
o.finalize()
assert list(o.args) == []
assert list(o.options) == ['server', 'version']
op = o.options.server
assert op.required is False
assert op.default is False
def test_run(self):
"""
Test the `ipalib.frontend.LocalOrRemote.run` method.
"""
class example(self.cls):
takes_args = 'key?'
def forward(self, *args, **options):
return dict(result=('forward', args, options))
def execute(self, *args, **options):
return dict(result=('execute', args, options))
# Test when in_server=False:
(api, home) = create_test_api(in_server=False)
api.add_plugin(example)
api.finalize()
cmd = api.Command.example
assert cmd(version=u'2.47') == dict(
result=('execute', (None,), dict(version=u'2.47', server=False))
)
assert cmd(u'var', version=u'2.47') == dict(
result=('execute', (u'var',), dict(version=u'2.47', server=False))
)
assert cmd(server=True, version=u'2.47') == dict(
result=('forward', (None,), dict(version=u'2.47', server=True))
)
assert cmd(u'var', server=True, version=u'2.47') == dict(
result=('forward', (u'var',), dict(version=u'2.47', server=True))
)
# Test when in_server=True (should always call execute):
(api, home) = create_test_api(in_server=True)
api.add_plugin(example)
api.finalize()
cmd = api.Command.example
assert cmd(version=u'2.47') == dict(
result=('execute', (None,), dict(version=u'2.47', server=False))
)
assert cmd(u'var', version=u'2.47') == dict(
result=('execute', (u'var',), dict(version=u'2.47', server=False))
)
assert cmd(server=True, version=u'2.47') == dict(
result=('execute', (None,), dict(version=u'2.47', server=True))
)
assert cmd(u'var', server=True, version=u'2.47') == dict(
result=('execute', (u'var',), dict(version=u'2.47', server=True))
)
class test_Object(ClassChecker):
"""
Test the `ipalib.frontend.Object` class.
"""
_cls = frontend.Object
def test_class(self):
"""
Test the `ipalib.frontend.Object` class.
"""
assert self.cls.backend is None
assert self.cls.methods is None
assert self.cls.params is None
assert self.cls.params_minus_pk is None
assert self.cls.takes_params == tuple()
def test_init(self):
"""
Test the `ipalib.frontend.Object.__init__` method.
"""
# Setup for test:
class DummyAttribute(object):
def __init__(self, obj_name, attr_name, name=None):
self.obj_name = obj_name
self.attr_name = attr_name
if name is None:
self.name = '%s_%s' % (obj_name, attr_name)
else:
self.name = name
self.param = frontend.create_param(attr_name)
def __clone__(self, attr_name):
return self.__class__(
self.obj_name,
self.attr_name,
getattr(self, attr_name)
)
def get_attributes(cnt, format):
for name in ['other', 'user', 'another']:
for i in xrange(cnt):
yield DummyAttribute(name, format % i)
cnt = 10
methods_format = 'method_%d'
class FakeAPI(object):
Method = plugable.NameSpace(
get_attributes(cnt, methods_format)
)
def __contains__(self, key):
return hasattr(self, key)
def __getitem__(self, key):
return getattr(self, key)
def is_production_mode(self):
return False
api = FakeAPI()
assert len(api.Method) == cnt * 3
class user(self.cls):
pass
# Actually perform test:
o = user(api)
assert read_only(o, 'api') is api
namespace = o.methods
assert isinstance(namespace, plugable.NameSpace)
assert len(namespace) == cnt
f = methods_format
for i in xrange(cnt):
attr_name = f % i
attr = namespace[attr_name]
assert isinstance(attr, DummyAttribute)
assert attr is getattr(namespace, attr_name)
assert attr.obj_name == 'user'
assert attr.attr_name == attr_name
assert attr.name == '%s_%s' % ('user', attr_name)
# Test params instance attribute
o = self.cls(api)
ns = o.params
assert type(ns) is plugable.NameSpace
assert len(ns) == 0
class example(self.cls):
takes_params = ('banana', 'apple')
o = example(api)
ns = o.params
assert type(ns) is plugable.NameSpace
assert len(ns) == 2, repr(ns)
assert list(ns) == ['banana', 'apple']
for p in ns():
assert type(p) is parameters.Str
assert p.required is True
assert p.multivalue is False
def test_primary_key(self):
"""
Test the `ipalib.frontend.Object.primary_key` attribute.
"""
(api, home) = create_test_api()
api.finalize()
# Test with no primary keys:
class example1(self.cls):
takes_params = (
'one',
'two',
)
o = example1(api)
assert o.primary_key is None
# Test with 1 primary key:
class example2(self.cls):
takes_params = (
'one',
'two',
parameters.Str('three', primary_key=True),
'four',
)
o = example2(api)
pk = o.primary_key
assert type(pk) is parameters.Str
assert pk.name == 'three'
assert pk.primary_key is True
assert o.params[2] is o.primary_key
assert isinstance(o.params_minus_pk, plugable.NameSpace)
assert list(o.params_minus_pk) == ['one', 'two', 'four']
# Test with multiple primary_key:
class example3(self.cls):
takes_params = (
parameters.Str('one', primary_key=True),
parameters.Str('two', primary_key=True),
'three',
parameters.Str('four', primary_key=True),
)
o = example3(api)
e = raises(ValueError, o.finalize)
assert str(e) == \
'example3 (Object) has multiple primary keys: one, two, four'
def test_backend(self):
"""
Test the `ipalib.frontend.Object.backend` attribute.
"""
(api, home) = create_test_api()
class ldap(backend.Backend):
whatever = 'It worked!'
api.add_plugin(ldap)
class user(frontend.Object):
backend_name = 'ldap'
api.add_plugin(user)
api.finalize()
b = api.Object.user.backend
assert isinstance(b, ldap)
assert b.whatever == 'It worked!'
def test_get_dn(self):
"""
Test the `ipalib.frontend.Object.get_dn` method.
"""
api = 'the api instance'
o = self.cls(api)
e = raises(NotImplementedError, o.get_dn, 'primary key')
assert str(e) == 'Object.get_dn()'
class user(self.cls):
pass
o = user(api)
e = raises(NotImplementedError, o.get_dn, 'primary key')
assert str(e) == 'user.get_dn()'
def test_params_minus(self):
"""
Test the `ipalib.frontend.Object.params_minus` method.
"""
class example(self.cls):
takes_params = ('one', 'two', 'three', 'four')
(api, home) = create_test_api()
api.finalize()
o = example(api)
p = o.params
assert tuple(o.params_minus()) == tuple(p())
assert tuple(o.params_minus([])) == tuple(p())
assert tuple(o.params_minus('two', 'three')) == (p.one, p.four)
assert tuple(o.params_minus(['two', 'three'])) == (p.one, p.four)
assert tuple(o.params_minus(p.two, p.three)) == (p.one, p.four)
assert tuple(o.params_minus([p.two, p.three])) == (p.one, p.four)
ns = NameSpace([p.two, p.three])
assert tuple(o.params_minus(ns)) == (p.one, p.four)
class test_Attribute(ClassChecker):
"""
Test the `ipalib.frontend.Attribute` class.
"""
_cls = frontend.Attribute
def test_class(self):
"""
Test the `ipalib.frontend.Attribute` class.
"""
assert self.cls.__bases__ == (plugable.Plugin,)
assert type(self.cls.obj) is property
assert type(self.cls.obj_name) is property
assert type(self.cls.attr_name) is property
def test_init(self):
"""
Test the `ipalib.frontend.Attribute.__init__` method.
"""
user_obj = 'The user frontend.Object instance'
class api(object):
Object = dict(user=user_obj)
@staticmethod
def is_production_mode():
return False
class user_add(self.cls):
pass
o = user_add(api)
assert read_only(o, 'api') is api
assert read_only(o, 'obj') is user_obj
assert read_only(o, 'obj_name') == 'user'
assert read_only(o, 'attr_name') == 'add'
class test_Method(ClassChecker):
"""
Test the `ipalib.frontend.Method` class.
"""
_cls = frontend.Method
def get_api(self, args=tuple(), options=tuple()):
"""
Return a finalized `ipalib.plugable.API` instance.
"""
(api, home) = create_test_api()
class user(frontend.Object):
takes_params = (
'givenname',
'sn',
frontend.Param('uid', primary_key=True),
'initials',
)
class user_verb(self.cls):
takes_args = args
takes_options = options
api.add_plugin(user)
api.add_plugin(user_verb)
api.finalize()
return api
def test_class(self):
"""
Test the `ipalib.frontend.Method` class.
"""
assert self.cls.__bases__ == (frontend.Attribute, frontend.Command)
def test_init(self):
"""
Test the `ipalib.frontend.Method.__init__` method.
"""
api = 'the api instance'
class user_add(self.cls):
pass
o = user_add(api)
assert o.name == 'user_add'
assert o.obj_name == 'user'
assert o.attr_name == 'add'
| pspacek/freeipa | ipatests/test_ipalib/test_frontend.py | Python | gpl-3.0 | 39,007 | 0.001077 |
from unittest import TestCase
from scrapy.http import Response, Request
from scrapy.spider import Spider
from scrapy.contrib.spidermiddleware.referer import RefererMiddleware
class TestRefererMiddleware(TestCase):
def setUp(self):
self.spider = Spider('foo')
self.mw = RefererMiddleware()
def test_process_spider_output(self):
res = Response('http://scrapytest.org')
reqs = [Request('http://scrapytest.org/')]
out = list(self.mw.process_spider_output(res, reqs, self.spider))
self.assertEquals(out[0].headers.get('Referer'),
'http://scrapytest.org')
| hy-2013/scrapy | tests/test_spidermiddleware_referer.py | Python | bsd-3-clause | 639 | 0.001565 |
#coding: cp936
#³£Î¢·Ö·½³Ì(ordinary differential equation)Çó½âÖ®¸Ä½øµÄÅ·À·¨dy=f*dx
import numpy as np
def odeiem(f, y0, x): #for: f(x, y)
'''fÊÇ΢·Ö·½³Ì£¬y0ÊdzõÖµ£¬xÊǸø¶¨µÄÐòÁУ¬×¢Òâf(x,y)º¯ÊýµÄ²ÎÊý˳ÐòÊÇxÓëy'''
y = np.array([y0])
for i in xrange(len(x)-1):
h = x[i+1]-x[i]
yp = y[i,:]+h*f(x[i],y[i])
yc = y[i,:]+h/2*(f(x[i],y[i])+f(x[i+1],yp))
y = np.vstack([y,yc])
return y
def odeiems(f, y0, x): #for: f(x)
'''fÊÇ΢·Ö·½³Ì£¬y0ÊdzõÖµ£¬xÊǸø¶¨µÄÐòÁУ¬×¢Òâf(x)´øÓÐΨһµÄ²ÎÊýx'''
y=np.array([y0])
for i in xrange(len(x)-1):
h = x[i+1] - x[i]
yc = y[i,:] + h/2 * (f(x[i]) + f(x[i+1]))
y = np.vstack([y,yc])
return y
if __name__=='__main__':
f = lambda x, y: np.array([2*x, x, x**2]) #f(x, y)
g = lambda x : np.array([2*x, x, x**2]) #f(x)
print odeiem(f, [0, 0, 0], [0, 0.2, 0.4])
print odeiems(g, [0, 0, 0], [0, 0.2, 0.4])
| bravesnow/nurbspy | assistlib/ode.py | Python | gpl-2.0 | 937 | 0.029883 |
import unittest
from typing import List
import utils
# O(n) time. O(n) space. DFS, hash set.
class Solution:
def circularArrayLoop(self, nums: List[int]) -> bool:
n = len(nums)
for start, move in enumerate(nums):
if move == 0:
continue
forward = move > 0
curr = start
visited = set()
visited.add(curr)
while True:
move = nums[curr]
if move == 0 or (move > 0) != forward:
break
nums[curr] = 0
nxt = (curr + move) % n
if nxt == curr:
break
if nxt in visited:
return True
visited.add(nxt)
curr = nxt
return False
class Test(unittest.TestCase):
def test(self):
cases = utils.load_test_json(__file__).test_cases
for case in cases:
args = str(case.args)
actual = Solution().circularArrayLoop(**case.args.__dict__)
self.assertEqual(case.expected, actual, msg=args)
if __name__ == '__main__':
unittest.main()
| chrisxue815/leetcode_python | problems/test_0457_dfs_hashset.py | Python | unlicense | 1,180 | 0 |
# Copyright (c) 2015, Max Fillinger <max@max-fillinger.net>
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
# OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
# PERFORMANCE OF THIS SOFTWARE.
# The epub format specification is available at http://idpf.org/epub/201
'''Contains the EpubBuilder class to build epub2.0.1 files with the getebook
module.'''
import html
import re
import datetime
import getebook
import os.path
import re
import zipfile
__all__ = ['EpubBuilder', 'EpubTOC', 'Author']
def _normalize(name):
'''Transform "Firstname [Middlenames] Lastname" into
"Lastname, Firstname [Middlenames]".'''
split = name.split()
if len(split) == 1:
return name
return split[-1] + ', ' + ' '.join(name[0:-1])
def _make_starttag(tag, attrs):
'Write a starttag.'
out = '<' + tag
for key in attrs:
out += ' {}="{}"'.format(key, html.escape(attrs[key]))
out += '>'
return out
def _make_xml_elem(tag, text, attr = []):
'Write a flat xml element.'
out = ' <' + tag
for (key, val) in attr:
out += ' {}="{}"'.format(key, val)
if text:
out += '>{}</{}>\n'.format(text, tag)
else:
out += ' />\n'
return out
class EpubTOC(getebook.TOC):
'Table of contents.'
_head = ((
'<?xml version="1.0" encoding="UTF-8"?>\n'
'<ncx xmlns="http://www.daisy.org/z3986/2005/ncx/" version="2005-1" xml:lang="en-US">\n'
' <head>\n'
' <meta name="dtb:uid" content="{}" />\n'
' <meta name="dtb:depth" content="{}" />\n'
' <meta name="dtb:totalPageCount" content="0" />\n'
' <meta name="dtb:maxPageNumber" content="0" />\n'
' </head>\n'
' <docTitle>\n'
' <text>{}</text>\n'
' </docTitle>\n'
))
_doc_author = ((
' <docAuthor>\n'
' <text>{}</text>\n'
' </docAuthor>\n'
))
_navp = ((
'{0}<navPoint id="nav{1}">\n'
'{0} <navLabel>\n'
'{0} <text>{2}</text>\n'
'{0} </navLabel>\n'
'{0} <content src="{3}" />\n'
))
def _navp_xml(self, entry, indent_lvl):
'Write xml for an entry and all its subentries.'
xml = self._navp.format(' '*indent_lvl, str(entry.no), entry.text,
entry.target)
for sub in entry.entries:
xml += self._navp_xml(sub, indent_lvl+1)
xml += ' '*indent_lvl + '</navPoint>\n'
return xml
def write_xml(self, uid, title, authors):
'Write the xml code for the table of contents.'
xml = self._head.format(uid, self.max_depth, title)
for aut in authors:
xml += self._doc_author.format(aut)
xml += ' <navMap>\n'
for entry in self.entries:
xml += self._navp_xml(entry, 2)
xml += ' </navMap>\n</ncx>'
return xml
class _Fileinfo:
'Information about a component file of an epub.'
def __init__(self, name, in_spine = True, guide_title = None,
guide_type = None):
'''Initialize the object. If the file does not belong in the
reading order, in_spine should be set to False. If it should
appear in the guide, set guide_title and guide_type.'''
self.name = name
(self.ident, ext) = os.path.splitext(name)
name_split = name.rsplit('.', 1)
self.ident = name_split[0]
self.in_spine = in_spine
self.guide_title = guide_title
self.guide_type = guide_type
# Infer media-type from file extension
ext = ext.lower()
if ext in ('.htm', '.html', '.xhtml'):
self.media_type = 'application/xhtml+xml'
elif ext in ('.png', '.gif', '.jpeg'):
self.media_type = 'image/' + ext
elif ext == '.jpg':
self.media_type = 'image/jpeg'
elif ext == '.css':
self.media_type = 'text/css'
elif ext == '.ncx':
self.media_type = 'application/x-dtbncx+xml'
else:
raise ValueError('Can\'t infer media-type from extension: %s' % ext)
def manifest_entry(self):
'Write the XML element for the manifest.'
return _make_xml_elem('item', '',
[
('href', self.name),
('id', self.ident),
('media-type', self.media_type)
])
def spine_entry(self):
'''Write the XML element for the spine.
(Empty string if in_spine is False.)'''
if self.in_spine:
return _make_xml_elem('itemref', '', [('idref', self.ident)])
else:
return ''
def guide_entry(self):
'''Write the XML element for the guide.
(Empty string if no guide title and type are given.)'''
if self.guide_title and self.guide_type:
return _make_xml_elem('reference', '',
[
('title', self.guide_title),
('type', self.guide_type),
('href', self.name)
])
else:
return ''
class _EpubMeta:
'Metadata entry for an epub file.'
def __init__(self, tag, text, *args):
'''The metadata entry is an XML element. *args is used for
supplying the XML element's attributes as (key, value) pairs.'''
self.tag = tag
self.text = text
self.attr = args
def write_xml(self):
'Write the XML element.'
return _make_xml_elem(self.tag, self.text, self.attr)
def __repr__(self):
'Returns the text.'
return self.text
def __str__(self):
'Returns the text.'
return self.text
class _EpubDate(_EpubMeta):
'Metadata element for the publication date.'
_date_re = re.compile('^([0-9]{4})(-[0-9]{2}(-[0-9]{2})?)?$')
def __init__(self, date):
'''date must be a string of the form "YYYY[-MM[-DD]]". If it is
not of this form, or if the date is invalid, ValueError is
raised.'''
m = self._date_re.match(date)
if not m:
raise ValueError('invalid date format')
year = int(m.group(1))
try:
mon = int(m.group(2)[1:])
if mon < 0 or mon > 12:
raise ValueError('month must be in 1..12')
except IndexError:
pass
try:
day = int(m.group(3)[1:])
datetime.date(year, mon, day) # raises ValueError if invalid
except IndexError:
pass
self.tag = 'dc:date'
self.text = date
self.attr = ()
class _EpubLang(_EpubMeta):
'Metadata element for the language of the book.'
_lang_re = re.compile('^[a-z]{2}(-[A-Z]{2})?$')
def __init__(self, lang):
'''lang must be a lower-case two-letter language code,
optionally followed by a "-" and a upper-case two-letter country
code. (e.g., "en", "en-US", "en-UK", "de", "de-DE", "de-AT")'''
if self._lang_re.match(lang):
self.tag = 'dc:language'
self.text = lang
self.attr = ()
else:
raise ValueError('invalid language format')
class Author(_EpubMeta):
'''To control the file-as and role attribute for the authors, pass
an Author object to the EpubBuilder instead of a string. The file-as
attribute is a form of the name used for sorting. The role attribute
describes how the person was involved in the work.
You ONLY need this if an author's name is not of the form
"Given-name Family-name", or if you want to specify a role other
than author. Otherwise, you can just pass a string.
The value of role should be a MARC relator, e.g., "aut" for author
or "edt" for editor. See http://www.loc.gov/marc/relators/ for a
full list.'''
def __init__(self, name, fileas = None, role = 'aut'):
'''Initialize the object. If the argument "fileas" is not given,
"Last-name, First-name" is used for the file-as attribute. If
the argument "role" is not given, "aut" is used for the role
attribute.'''
if not fileas:
fileas = _normalize(name)
self.tag = 'dc:creator'
self.text = name
self.attr = (('opf:file-as', fileas), ('opf:role', role))
class _OPFfile:
'''Class for writing the OPF (Open Packaging Format) file for an
epub file. The OPF file contains the metadata, a manifest of all
component files in the epub, a "spine" which specifies the reading
order and a guide which points to important components of the book
such as the title page.'''
_opf = (
'<?xml version="1.0" encoding="UTF-8"?>\n'
'<package version="2.0" xmlns="http://www.idpf.org/2007/opf" unique_identifier="uid_id">\n'
' <metadata xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:opf="http://www.idpf.org/2007/opf">\n'
'{}'
' </metadata>\n'
' <manifest>\n'
'{}'
' </manifest>\n'
' <spine toc="toc">\n'
'{}'
' </spine>\n'
' <guide>\n'
'{}'
' </guide>\n'
'</package>\n'
)
def __init__(self):
'Initialize.'
self.meta = []
self.filelist = []
def write_xml(self):
'Write the XML code for the OPF file.'
metadata = ''
for elem in self.meta:
metadata += elem.write_xml()
manif = ''
spine = ''
guide = ''
for finfo in self.filelist:
manif += finfo.manifest_entry()
spine += finfo.spine_entry()
guide += finfo.guide_entry()
return self._opf.format(metadata, manif, spine, guide)
class EpubBuilder:
'''Builds an epub2.0.1 file. Some of the attributes of this class
(title, uid, lang) are marked as "mandatory" because they represent
metadata that is required by the epub specification. If these
attributes are left unset, default values will be used.'''
_style_css = (
'h1, h2, h3, h4, h5, h6 {\n'
' text-align: center;\n'
'}\n'
'p {\n'
' text-align: justify;\n'
' margin-top: 0.125em;\n'
' margin-bottom: 0em;\n'
' text-indent: 1.0em;\n'
'}\n'
'.getebook-tp {\n'
' margin-top: 8em;\n'
'}\n'
'.getebook-tp-authors {\n'
' font-size: 2em;\n'
' text-align: center;\n'
' margin-bottom: 1em;\n'
'}\n'
'.getebook-tp-title {\n'
' font-weight: bold;\n'
' font-size: 3em;\n'
' text-align: center;\n'
'}\n'
'.getebook-tp-sub {\n'
' text-align: center;\n'
' font-weight: normal;\n'
' font-size: 0.8em;\n'
' margin-top: 1em;\n'
'}\n'
'.getebook-false-h {\n'
' font-weight: bold;\n'
' font-size: 1.5em;\n'
'}\n'
'.getebook-small-h {\n'
' font-style: normal;\n'
' font-weight: normal;\n'
' font-size: 0.8em;\n'
'}\n'
)
_container_xml = (
'<?xml version="1.0"?>\n'
'<container version="1.0" xmlns="urn:oasis:names:tc:opendocument:xmlns:container">\n'
' <rootfiles>\n'
' <rootfile full-path="package.opf" media-type="application/oebps-package+xml"/>\n'
' </rootfiles>\n'
'</container>\n'
)
_html = (
'<?xml version="1.0" encoding="utf-8"?>\n'
'<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">\n'
'<html xmlns="http://www.w3.org/1999/xhtml">\n'
' <head>\n'
' <title>{}</title>\n'
' <meta http-equiv="content-type" content="application/xtml+xml; charset=utf-8" />\n'
' <link href="style.css" rel="stylesheet" type="text/css" />\n'
' </head>\n'
' <body>\n{}'
' </body>\n'
'</html>\n'
)
_finalized = False
def __init__(self, epub_file):
'''Initialize the EpubBuilder instance. "epub_file" is the
filename of the epub to be created.'''
self.epub_f = zipfile.ZipFile(epub_file, 'w', zipfile.ZIP_DEFLATED)
self.epub_f.writestr('mimetype', 'application/epub+zip')
self.epub_f.writestr('META-INF/container.xml', self._container_xml)
self.toc = EpubTOC()
self.opf = _OPFfile()
self.opf.filelist.append(_Fileinfo('toc.ncx', False))
self.opf.filelist.append(_Fileinfo('style.css', False))
self._authors = []
self.opt_meta = {} # Optional metadata (other than authors)
self.content = ''
self.part_no = 0
self.cont_filename = 'part%03d.html' % self.part_no
def __enter__(self):
'Return self for use in with ... as ... statement.'
return self
def __exit__(self, except_type, except_val, traceback):
'Call finalize() and close the file.'
try:
self.finalize()
finally:
# Close again in case an exception happened in finalize()
self.epub_f.close()
return False
@property
def uid(self):
'''Unique identifier of the ebook. (mandatory)
If this property is left unset, a pseudo-random string will be
generated which is long enough for collisions with existing
ebooks to be extremely unlikely.'''
try:
return self._uid
except AttributeError:
import random
from string import (ascii_letters, digits)
alnum = ascii_letters + digits
self.uid = ''.join([random.choice(alnum) for i in range(15)])
return self._uid
@uid.setter
def uid(self, val):
self._uid = _EpubMeta('dc:identifier', str(val), ('id', 'uid_id'))
@property
def title(self):
'''Title of the ebook. (mandatory)
If this property is left unset, it defaults to "Untitled".'''
try:
return self._title
except AttributeError:
self.title = 'Untitled'
return self._title
@title.setter
def title(self, val):
# If val is not a string, raise TypeError now rather than later.
self._title = _EpubMeta('dc:title', '' + val)
@property
def lang(self):
'''Language of the ebook. (mandatory)
The language must be given as a lower-case two-letter code, optionally
followed by a "-" and an upper-case two-letter country code.
(e.g., "en", "en-US", "en-UK", "de", "de-DE", "de-AT")
If this property is left unset, it defaults to "en".'''
try:
return self._lang
except AttributeError:
self.lang = 'en'
return self._lang
@lang.setter
def lang(self, val):
self._lang = _EpubLang(val)
@property
def author(self):
'''Name of the author. (optional)
If there are multiple authors, pass a list of strings.
To control the file-as and role attribute, use author objects instead
of strings; file-as is an alternate form of the name used for sorting.
For a description of the role attribute, see the docstring of the
author class.'''
if len(self._authors) == 1:
return self._authors[0]
return tuple([aut for aut in self._authors])
@author.setter
def author(self, val):
if isinstance(val, Author) or isinstance(val, str):
authors = [val]
else:
authors = val
for aut in authors:
try:
self._authors.append(Author('' + aut))
except TypeError:
# aut is not a string, so it should be an Author object
self._authors.append(aut)
@author.deleter
def author(self):
self._authors = []
@property
def date(self):
'''Publication date. (optional)
Must be given in "YYYY[-MM[-DD]]" format.'''
try:
return self.opt_meta['date']
except KeyError:
return None
@date.setter
def date(self, val):
self.opt_meta['date'] = _EpubDate(val)
@date.deleter
def date(self):
del self._date
@property
def rights(self):
'Copyright/licensing information. (optional)'
try:
return self.opt_meta['rights']
except KeyError:
return None
@rights.setter
def rights(self, val):
self.opt_meta['rights'] = _EpubMeta('dc:rights', '' + val)
@rights.deleter
def rights(self):
del self._rights
@property
def publisher(self):
'Publisher name. (optional)'
try:
return self.opt_meta['publisher']
except KeyError:
return None
@publisher.setter
def publisher(self, val):
self.opt_meta['publisher'] = _EpubMeta('dc:publisher', '' + val)
@publisher.deleter
def publisher(self):
del self._publisher
@property
def style_css(self):
'''CSS stylesheet for the files that are generated by the EpubBuilder
instance. Can be overwritten or extended, but not deleted.'''
return self._style_css
@style_css.setter
def style_css(self, val):
self._style_css = '' + val
def titlepage(self, main_title = None, subtitle = None):
'''Create a title page for the ebook. If no main_title is given,
the title attribute of the EpubBuilder instance is used.'''
tp = '<div class="getebook-tp">\n'
if len(self._authors) >= 1:
if len(self._authors) == 1:
aut_str = str(self._authors[0])
else:
aut_str = ', '.join(str(self._authors[0:-1])) + ', and ' \
+ str(self._authors[-1])
tp += '<div class="getebook-tp-authors">%s</div>\n' % aut_str
if not main_title:
main_title = str(self.title)
tp += '<div class="getebook-tp-title">%s' % main_title
if subtitle:
tp += '<div class="getebook-tp-sub">%s</div>' % subtitle
tp += '</div>\n</div>\n'
self.opf.filelist.insert(0, _Fileinfo('title.html',
guide_title = 'Titlepage', guide_type = 'title-page'))
self.epub_f.writestr('title.html', self._html.format(self.title, tp))
def headingpage(self, heading, subtitle = None, toc_text = None):
'''Create a page containing only a (large) heading, optionally
with a smaller subtitle. If toc_text is not given, it defaults
to the heading.'''
self.new_part()
tag = 'h%d' % min(6, self.toc.depth)
self.content += '<div class="getebook-tp">'
self.content += '<{} class="getebook-tp-title">{}'.format(tag, heading)
if subtitle:
self.content += '<div class="getebook-tp-sub">%s</div>' % subtitle
self.content += '</%s>\n' % tag
if not toc_text:
toc_text = heading
self.toc.new_entry(toc_text, self.cont_filename)
self.new_part()
def insert_file(self, name, in_spine = False, guide_title = None,
guide_type = None, arcname = None):
'''Include an external file into the ebook. By default, it will
be added to the archive under its basename; the argument
"arcname" can be used to specify a different name.'''
if not arcname:
arcname = os.path.basename(name)
self.opf.filelist.append(_Fileinfo(arcname, in_spine, guide_title,
guide_type))
self.epub_f.write(name, arcname)
def add_file(self, arcname, str_or_bytes, in_spine = False,
guide_title = None, guide_type = None):
'''Add the string or bytes instance str_or_bytes to the archive
under the name arcname.'''
self.opf.filelist.append(_Fileinfo(arcname, in_spine, guide_title,
guide_type))
self.epub_f.writestr(arcname, str_or_bytes)
def false_heading(self, elem):
'''Handle a "false heading", i.e., text that appears in heading
tags in the source even though it is not a chapter heading.'''
elem.attrs['class'] = 'getebook-false-h'
elem.tag = 'p'
self.handle_elem(elem)
def _heading(self, elem):
'''Write a heading.'''
# Handle paragraph heading if we have one waiting (see the
# par_heading method). We don\'t use _handle_par_h here because
# we merge it with the subsequent proper heading.
try:
par_h = self.par_h
del self.par_h
except AttributeError:
toc_text = elem.text
else:
# There is a waiting paragraph heading, we merge it with the
# new heading.
toc_text = par_h.text + '. ' + elem.text
par_h.tag = 'div'
par_h.attrs['class'] = 'getebook-small-h'
elem.children.insert(0, par_h)
# Set the class attribute value.
elem.attrs['class'] = 'getebook-chapter-h'
self.toc.new_entry(toc_text, self.cont_filename)
# Add heading to the epub.
tag = 'h%d' % min(self.toc.depth, 6)
self.content += _make_starttag(tag, elem.attrs)
for elem in elem.children:
self.handle_elem(elem)
self.content += '</%s>\n' % tag
def par_heading(self, elem):
'''Handle a "paragraph heading", i.e., a chaper heading or part
of a chapter heading inside paragraph tags. If it is immediately
followed by a heading, they will be merged into one.'''
self.par_h = elem
def _handle_par_h(self):
'Check if there is a waiting paragraph heading and handle it.'
try:
self._heading(self.par_h)
except AttributeError:
pass
def handle_elem(self, elem):
'Handle html element as supplied by getebook.EbookParser.'
try:
tag = elem.tag
except AttributeError:
# elem should be a string
is_string = True
tag = None
else:
is_string = False
if tag in getebook._headings:
self._heading(elem)
else:
# Handle waiting par_h if necessary (see par_heading)
try:
self._heading(self.par_h)
except AttributeError:
pass
if is_string:
self.content += elem
elif tag == 'br':
self.content += '<br />\n'
elif tag == 'img':
self.content += self._handle_image(elem.attrs) + '\n'
elif tag == 'a' or tag == 'noscript':
# Ignore tag, just write child elements
for child in elem.children:
self.handle_elem(child)
else:
self.content += _make_starttag(tag, elem.attrs)
for child in elem.children:
self.handle_elem(child)
self.content += '</%s>' % tag
if tag == 'p':
self.content += '\n'
def _handle_image(self, attrs):
'Returns the alt text of an image tag.'
try:
return attrs['alt']
except KeyError:
return ''
def new_part(self):
'''Begin a new part of the epub. Write the current html document
to the archive and begin a new one.'''
# Handle waiting par_h (see par_heading)
try:
self._heading(self.par_h)
except AttributeError:
pass
if self.content:
html = self._html.format(self.title, self.content)
self.epub_f.writestr(self.cont_filename, html)
self.part_no += 1
self.content = ''
self.cont_filename = 'part%03d.html' % self.part_no
self.opf.filelist.append(_Fileinfo(self.cont_filename))
def finalize(self):
'Complete and close the epub file.'
# Handle waiting par_h (see par_heading)
if self._finalized:
# Avoid finalizing twice. Otherwise, calling finalize inside
# a with-block would lead to an exception when __exit__
# calls finalize again.
return
try:
self._heading(self.par_h)
except AttributeError:
pass
if self.content:
html = self._html.format(self.title, self.content)
self.epub_f.writestr(self.cont_filename, html)
self.opf.meta = [self.uid, self.lang, self.title] + self._authors
self.opf.meta += self.opt_meta.values()
self.epub_f.writestr('package.opf', self.opf.write_xml())
self.epub_f.writestr('toc.ncx',
self.toc.write_xml(self.uid, self.title, self._authors))
self.epub_f.writestr('style.css', self._style_css)
self.epub_f.close()
self._finalized = True
| mfil/getebook | getebook/epub.py | Python | isc | 25,314 | 0.003713 |
""" Django support. """
from __future__ import absolute_import
import datetime
from os import path
from types import GeneratorType
import decimal
from django import VERSION
if VERSION < (1, 8):
from django.contrib.contenttypes.generic import (
GenericForeignKey, GenericRelation)
else:
from django.contrib.contenttypes.fields import (
GenericForeignKey, GenericRelation)
from django.contrib.contenttypes.models import ContentType
from django.core.files.base import ContentFile
from django.core.validators import (
validate_ipv4_address, validate_ipv6_address)
from django.db import models
from django.conf import settings
from .. import mix_types as t, _compat as _
from ..main import (
SKIP_VALUE, TypeMixerMeta as BaseTypeMixerMeta, TypeMixer as BaseTypeMixer,
GenFactory as BaseFactory, Mixer as BaseMixer, _Deffered, partial, faker)
get_contentfile = ContentFile
MOCK_FILE = path.abspath(path.join(
path.dirname(path.dirname(__file__)), 'resources', 'file.txt'
))
MOCK_IMAGE = path.abspath(path.join(
path.dirname(path.dirname(__file__)), 'resources', 'image.jpg'
))
def get_file(filepath=MOCK_FILE, **kwargs):
""" Generate a content file.
:return ContentFile:
"""
with open(filepath, 'rb') as f:
name = path.basename(filepath)
return get_contentfile(f.read(), name)
def get_image(filepath=MOCK_IMAGE):
""" Generate a content image.
:return ContentFile:
"""
return get_file(filepath)
def get_relation(_scheme=None, _typemixer=None, **params):
""" Function description. """
if VERSION < (1, 8):
scheme = _scheme.related.parent_model
else:
scheme = _scheme.related_model
if scheme is ContentType:
choices = [m for m in models.get_models() if m is not ContentType]
return ContentType.objects.get_for_model(faker.random_element(choices))
return TypeMixer(scheme, mixer=_typemixer._TypeMixer__mixer,
factory=_typemixer._TypeMixer__factory,
fake=_typemixer._TypeMixer__fake,).blend(**params)
def get_datetime(**params):
""" Support Django TZ support. """
return faker.datetime(tzinfo=settings.USE_TZ)
class GenFactory(BaseFactory):
""" Map a django classes to simple types. """
types = {
(models.AutoField, models.PositiveIntegerField): t.PositiveInteger,
models.BigIntegerField: t.BigInteger,
models.BooleanField: bool,
(models.CharField, models.SlugField): str,
models.DateField: datetime.date,
models.DecimalField: decimal.Decimal,
models.EmailField: t.EmailString,
models.FloatField: float,
models.GenericIPAddressField: t.IPString,
models.IPAddressField: t.IP4String,
models.IntegerField: int,
models.PositiveSmallIntegerField: t.PositiveSmallInteger,
models.SmallIntegerField: t.SmallInteger,
models.TextField: t.Text,
models.TimeField: datetime.time,
models.URLField: t.URL,
}
generators = {
models.BinaryField: faker.pybytes,
models.DateTimeField: get_datetime,
models.FileField: get_file,
models.FilePathField: lambda: MOCK_FILE,
models.ForeignKey: get_relation,
models.ImageField: get_image,
models.ManyToManyField: get_relation,
models.OneToOneField: get_relation,
}
class TypeMixerMeta(BaseTypeMixerMeta):
""" Load django models from strings. """
def __new__(mcs, name, bases, params):
""" Associate Scheme with Django models.
Cache Django models.
:return mixer.backend.django.TypeMixer: A generated class.
"""
params['models_cache'] = dict()
cls = super(TypeMixerMeta, mcs).__new__(mcs, name, bases, params)
return cls
def __load_cls(cls, cls_type):
if isinstance(cls_type, _.string_types):
if '.' in cls_type:
app_label, model_name = cls_type.split(".")
return models.get_model(app_label, model_name)
else:
try:
if cls_type not in cls.models_cache:
cls.__update_cache()
return cls.models_cache[cls_type]
except KeyError:
raise ValueError('Model "%s" not found.' % cls_type)
return cls_type
def __update_cache(cls):
""" Update apps cache for Django < 1.7. """
if VERSION < (1, 7):
for app_models in models.loading.cache.app_models.values():
for name, model in app_models.items():
cls.models_cache[name] = model
else:
from django.apps import apps
for app in apps.all_models:
for name, model in apps.all_models[app].items():
cls.models_cache[name] = model
class TypeMixer(_.with_metaclass(TypeMixerMeta, BaseTypeMixer)):
""" TypeMixer for Django. """
__metaclass__ = TypeMixerMeta
factory = GenFactory
def postprocess(self, target, postprocess_values):
""" Fill postprocess_values. """
for name, deffered in postprocess_values:
if not type(deffered.scheme) is GenericForeignKey:
continue
name, value = self._get_value(name, deffered.value)
setattr(target, name, value)
if self.__mixer:
target = self.__mixer.postprocess(target)
for name, deffered in postprocess_values:
if type(deffered.scheme) is GenericForeignKey or not target.pk:
continue
name, value = self._get_value(name, deffered.value)
# # If the ManyToMany relation has an intermediary model,
# # the add and remove methods do not exist.
if not deffered.scheme.rel.through._meta.auto_created and self.__mixer: # noqa
self.__mixer.blend(
deffered.scheme.rel.through, **{
deffered.scheme.m2m_field_name(): target,
deffered.scheme.m2m_reverse_field_name(): value})
continue
if not isinstance(value, (list, tuple)):
value = [value]
setattr(target, name, value)
return target
def get_value(self, name, value):
""" Set value to generated instance.
:return : None or (name, value) for later use
"""
field = self.__fields.get(name)
if field:
if (field.scheme in self.__scheme._meta.local_many_to_many or
type(field.scheme) is GenericForeignKey):
return name, _Deffered(value, field.scheme)
return self._get_value(name, value, field)
return super(TypeMixer, self).get_value(name, value)
def _get_value(self, name, value, field=None):
if isinstance(value, GeneratorType):
return self._get_value(name, next(value), field)
if not isinstance(value, t.Mix) and value is not SKIP_VALUE:
if callable(value):
return self._get_value(name, value(), field)
if field:
value = field.scheme.to_python(value)
return name, value
def gen_select(self, field_name, select):
""" Select exists value from database.
:param field_name: Name of field for generation.
:return : None or (name, value) for later use
"""
if field_name not in self.__fields:
return field_name, None
try:
field = self.__fields[field_name]
return field.name, field.scheme.rel.to.objects.filter(**select.params).order_by('?')[0]
except Exception:
raise Exception("Cannot find a value for the field: '{0}'".format(field_name))
def gen_field(self, field):
""" Generate value by field.
:param relation: Instance of :class:`Field`
:return : None or (name, value) for later use
"""
if isinstance(field.scheme, GenericForeignKey):
return field.name, SKIP_VALUE
if field.params and not field.scheme:
raise ValueError('Invalid relation %s' % field.name)
return super(TypeMixer, self).gen_field(field)
def make_fabric(self, field, fname=None, fake=False, kwargs=None): # noqa
""" Make a fabric for field.
:param field: A mixer field
:param fname: Field name
:param fake: Force fake data
:return function:
"""
kwargs = {} if kwargs is None else kwargs
fcls = type(field)
stype = self.__factory.cls_to_simple(fcls)
if fcls is models.CommaSeparatedIntegerField:
return partial(faker.choices, range(0, 10), length=field.max_length)
if field and field.choices:
try:
choices, _ = list(zip(*field.choices))
return partial(faker.random_element, choices)
except ValueError:
pass
if stype in (str, t.Text):
fab = super(TypeMixer, self).make_fabric(
fcls, field_name=fname, fake=fake, kwargs=kwargs)
return lambda: fab()[:field.max_length]
if stype is decimal.Decimal:
kwargs['left_digits'] = field.max_digits - field.decimal_places
kwargs['right_digits'] = field.decimal_places
elif stype is t.IPString:
# Hack for support Django 1.4/1.5
protocol = getattr(field, 'protocol', None)
if not protocol:
validator = field.default_validators[0]
protocol = 'both'
if validator is validate_ipv4_address:
protocol = 'ipv4'
elif validator is validate_ipv6_address:
protocol = 'ipv6'
# protocol matching is case insensitive
# default address is either IPv4 or IPv6
kwargs['protocol'] = protocol.lower()
elif isinstance(field, models.fields.related.RelatedField):
kwargs.update({'_typemixer': self, '_scheme': field})
return super(TypeMixer, self).make_fabric(
fcls, field_name=fname, fake=fake, kwargs=kwargs)
@staticmethod
def is_unique(field):
""" Return True is field's value should be a unique.
:return bool:
"""
if VERSION < (1, 7) and isinstance(field.scheme, models.OneToOneField):
return True
return field.scheme.unique
@staticmethod
def is_required(field):
""" Return True is field's value should be defined.
:return bool:
"""
if field.params:
return True
if field.scheme.has_default() or field.scheme.null and field.scheme.blank:
return False
if field.scheme.auto_created:
return False
if isinstance(field.scheme, models.ManyToManyField):
return False
if isinstance(field.scheme, GenericRelation):
return False
return True
def guard(self, *args, **kwargs):
""" Look objects in database.
:returns: A finded object or False
"""
qs = self.__scheme.objects.filter(*args, **kwargs)
count = qs.count()
if count == 1:
return qs.get()
if count:
return list(qs)
return False
def reload(self, obj):
""" Reload object from database. """
if not obj.pk:
raise ValueError("Cannot load the object: %s" % obj)
return self.__scheme._default_manager.get(pk=obj.pk)
def __load_fields(self):
for field in self.__scheme._meta.virtual_fields:
yield field.name, t.Field(field, field.name)
for field in self.__scheme._meta.fields:
if isinstance(field, models.AutoField)\
and self.__mixer and self.__mixer.params.get('commit'):
continue
yield field.name, t.Field(field, field.name)
for field in self.__scheme._meta.local_many_to_many:
yield field.name, t.Field(field, field.name)
class Mixer(BaseMixer):
""" Integration with Django. """
type_mixer_cls = TypeMixer
def __init__(self, commit=True, **params):
"""Initialize Mixer instance.
:param commit: (True) Save object to database.
"""
super(Mixer, self).__init__(**params)
self.params['commit'] = commit
def postprocess(self, target):
""" Save objects in db.
:return value: A generated value
"""
if self.params.get('commit'):
target.save()
return target
# Default mixer
mixer = Mixer()
# pylama:ignore=E1120
| mechaxl/mixer | mixer/backend/django.py | Python | bsd-3-clause | 12,824 | 0.000468 |
# -*- coding: utf8 -*-
#
# Copyright (C) 2017 NDP Systèmes (<http://www.ndp-systemes.fr>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
#
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from . import stock_procurement_split
| ndp-systemes/odoo-addons | stock_procurement_split/__init__.py | Python | agpl-3.0 | 822 | 0 |
# Copyright 2017 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for make_rpm."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import unittest
from tools.build_defs.pkg import make_rpm
def WriteFile(filename, *contents):
with open(filename, 'w') as text_file:
text_file.write('\n'.join(contents))
def DirExists(dirname):
return os.path.exists(dirname) and os.path.isdir(dirname)
def FileExists(filename):
return os.path.exists(filename) and not os.path.isdir(filename)
def FileContents(filename):
with open(filename, 'r') as text_file:
return [s.strip() for s in text_file.readlines()]
class MakeRpmTest(unittest.TestCase):
def testFindOutputFile(self):
log = """
Lots of data.
Wrote: /path/to/file/here.rpm
More data present.
"""
result = make_rpm.FindOutputFile(log)
self.assertEquals('/path/to/file/here.rpm', result)
def testFindOutputFile_missing(self):
log = """
Lots of data.
More data present.
"""
result = make_rpm.FindOutputFile(log)
self.assertEquals(None, result)
def testCopyAndRewrite(self):
with make_rpm.Tempdir():
WriteFile('test.txt', 'Some: data1', 'Other: data2', 'More: data3')
make_rpm.CopyAndRewrite('test.txt', 'out.txt', {
'Some:': 'data1a',
'More:': 'data3a',
})
self.assertTrue(FileExists('out.txt'))
self.assertItemsEqual(['Some: data1a', 'Other: data2', 'More: data3a'],
FileContents('out.txt'))
def testSetupWorkdir(self):
builder = make_rpm.RpmBuilder('test', '1.0', 'x86')
with make_rpm.Tempdir() as outer:
# Create spec_file, test files.
WriteFile('test.spec', 'Name: test', 'Version: 0.1', 'Summary: test data')
WriteFile('file1.txt', 'Hello')
WriteFile('file2.txt', 'Goodbye')
builder.AddFiles(['file1.txt', 'file2.txt'])
with make_rpm.Tempdir():
# Call RpmBuilder.
builder.SetupWorkdir('test.spec', outer)
# Make sure files exist.
self.assertTrue(DirExists('SOURCES'))
self.assertTrue(DirExists('BUILD'))
self.assertTrue(DirExists('TMP'))
self.assertTrue(FileExists('test.spec'))
self.assertItemsEqual(
['Name: test', 'Version: 1.0', 'Summary: test data'],
FileContents('test.spec'))
self.assertTrue(FileExists('BUILD/file1.txt'))
self.assertItemsEqual(['Hello'], FileContents('BUILD/file1.txt'))
self.assertTrue(FileExists('BUILD/file2.txt'))
self.assertItemsEqual(['Goodbye'], FileContents('BUILD/file2.txt'))
if __name__ == '__main__':
unittest.main()
| juhalindfors/bazel-patches | tools/build_defs/pkg/make_rpm_test.py | Python | apache-2.0 | 3,259 | 0.006137 |
# -*- coding: utf-8 -*-
# Coh-Metrix-Dementia - Automatic text analysis and classification for dementia.
# Copyright (C) 2014 Andre Luiz Verucci da Cunha
#
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals, print_function, division
from sqlalchemy import create_engine as _create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Column, Integer, String, Float, Boolean
from sqlalchemy.orm import sessionmaker
Base = declarative_base()
DEFAULT_OPTIONS = {
'dialect': 'postgresql',
'driver': 'psycopg2',
'username': 'cohmetrix',
'password': 'cohmetrix',
'host': 'localhost',
'port': '5432',
'database': 'cohmetrix_pt_br',
}
def create_engine(options=DEFAULT_OPTIONS, echo=False):
connect_string =\
'{dialect}+{driver}://{username}:{password}@{host}:{port}/{database}'\
.format(**options)
return _create_engine(connect_string, echo=echo)
def create_session(engine):
return sessionmaker(bind=engine)()
class DelafVerb(Base):
__tablename__ = 'delaf_verbs'
word = Column(String, primary_key=True)
lemma = Column(String, primary_key=True)
pos = Column(String, primary_key=True)
tense = Column(String, primary_key=True)
person = Column(String, primary_key=True)
def __repr__(self):
return ('<DelafVerb: word={0}, lemma={1}, pos={2}, tense={3},' +
' person={4}>')\
.format(self.word, self.lemma, self.pos, self.tense, self.person)
class DelafNoun(Base):
__tablename__ = 'delaf_nouns'
word = Column(String, primary_key=True)
lemma = Column(String, primary_key=True)
pos = Column(String, primary_key=True)
morf = Column(String, primary_key=True)
def __repr__(self):
return '<DelafNoun: word={0}, lemma={1}, pos={2}, morf={3}>'\
.format(self.word, self.lemma, self.pos, self.morf)
class DelafWord(Base):
__tablename__ = 'delaf_words'
word = Column(String, primary_key=True)
lemma = Column(String, primary_key=True)
pos = Column(String, primary_key=True)
def __repr__(self):
return '<DelafWord: word={0}, lemma={1}, pos={2}>'\
.format(self.word, self.lemma, self.pos)
class TepWord(Base):
__tablename__ = 'tep_words'
group = Column(Integer, primary_key=True)
word = Column(String, primary_key=True)
pos = Column(String)
antonym = Column(Integer)
def __repr__(self):
return '<TepWord: group={0}, word={1}, pos={2}, antonym={3}>'\
.format(self.group, self.word, self.pos, self.antonym)
class Frequency(Base):
__tablename__ = 'frequencies'
id = Column(Integer, primary_key=True)
word = Column(String)
freq = Column(Integer)
freq_perc = Column(Float)
texts = Column(Integer)
texts_perc = Column(Float)
def __repr__(self):
return '<Frequency: word=%s, freq=%s, freq_perc=%s, texts=%s, texts_perc=%s>'\
% (self.word, str(self.freq), str(self.freq_perc), str(self.texts),
str(self.texts_perc))
class Hypernym(Base):
__tablename__ = 'hypernyms_verbs'
word = Column(String, primary_key=True)
category = Column(String, primary_key=True)
grammar_attrs = Column(String)
hyper_levels = Column(Integer)
def __repr__(self):
return '<Hypernym: word={0}, cat={1}, attrs={2}, levels={3}>'\
.format(self.word, self.category, self.grammar_attrs,
self.hyper_levels)
class Connective(Base):
__tablename__ = 'connectives'
connective = Column(String, primary_key=True)
additive_pos = Column(Boolean)
additive_neg = Column(Boolean)
temporal_pos = Column(Boolean)
temporal_neg = Column(Boolean)
causal_pos = Column(Boolean)
causal_neg = Column(Boolean)
logic_pos = Column(Boolean)
logic_neg = Column(Boolean)
def __repr__(self):
attrs = []
if self.additive_pos:
attrs.append('add pos')
if self.additive_neg:
attrs.append('add neg')
if self.temporal_pos:
attrs.append('tmp pos')
if self.temporal_neg:
attrs.append('tmp neg')
if self.causal_pos:
attrs.append('cau pos')
if self.causal_neg:
attrs.append('cau neg')
if self.logic_pos:
attrs.append('log pos')
if self.logic_neg:
attrs.append('log neg')
return '<Connective: conn={0}, {1}>'.format(self.connective,
', '.join(attrs))
class Helper(object):
def __init__(self, session):
"""@todo: Docstring for __init__.
:session: @todo
:returns: @todo
"""
self._session = session
def get_frequency(self, word):
return self._session.query(Frequency).filter_by(word=word).first()
def get_hypernyms(self, verb):
"""@todo: Docstring for get_hypernyms.
:verb: @todo
:returns: @todo
"""
return self._session.query(Hypernym).filter_by(word=verb).first()
def get_delaf_verb(self, verb):
"""@todo: Docstring for get_verb.
:verb: @todo
:returns: @todo
"""
return self._session.query(DelafVerb).filter_by(word=verb).first()
def get_delaf_noun(self, noun):
"""@todo: Docstring for get_noun.
:noun: @todo
:returns: @todo
"""
return self._session.query(DelafNoun).filter_by(word=noun).first()
def get_delaf_word(self, word, pos=None):
"""@todo: Docstring for get_word.
:word: @todo
:pos: @todo
:returns: @todo
"""
if pos is None:
# Ignore PoS
result = self._session.query(DelafWord).filter_by(word=word).first()
else:
result = self._session.query(DelafWord)\
.filter_by(word=word, pos=pos).first()
return result
def get_tep_word(self, word, pos=None):
"""@todo: Docstring for get_tep_word.
:word: @todo
:pos: @todo
:returns: @todo
"""
if pos is None:
# Ignore PoS
result = self._session.query(TepWord).filter_by(word=word).first()
else:
result = self._session.query(TepWord)\
.filter_by(word=word, pos=pos).first()
return result
def get_all_tep_words(self, word, pos=None):
"""@todo: Docstring for get_all_tep_words.
:word: @todo
:pos: @todo
:returns: @todo
"""
if pos is None:
# Ignore PoS
result = self._session.query(TepWord).filter_by(word=word).all()
else:
result = self._session.query(TepWord)\
.filter_by(word=word, pos=pos).all()
return result
def get_tep_words_count(self, word, pos=None):
"""@todo: Docstring for get_tep_words_count.
:word: @todo
:pos: @todo
:returns: @todo
"""
if pos is None:
# Ignore PoS
result = self._session.query(TepWord).filter_by(word=word).count()
else:
result = self._session.query(TepWord)\
.filter_by(word=word, pos=pos).count()
return result
def get_connective(self, connective):
"""TODO: Docstring for get_connective.
:connective: TODO
:returns: TODO
"""
return self._session.query(Connective).filter_by(connective=connective)\
.first()
def get_all_connectives(self):
"""TODO: Docstring for get_connective.
:connective: TODO
:returns: TODO
"""
return self._session.query(Connective).all()
if __name__ == '__main__':
engine = create_engine()
session = create_session(engine)
helper = Helper(session)
print(helper.get_frequency('abacaxi'))
print(helper.get_frequency('maçã'))
print(helper.get_hypernyms('dar'))
print(helper.get_hypernyms('abalançar'))
print(helper.get_delaf_verb('apareceu'))
print(helper.get_delaf_verb('abraçarão'))
print(helper.get_delaf_noun('abraço'))
print(helper.get_delaf_noun('carrinho'))
print(helper.get_delaf_noun('carrão'))
print(helper.get_delaf_word('bonito'))
print(helper.get_delaf_word('finalmente'))
print(helper.get_delaf_word('canto', pos='N'))
print(helper.get_delaf_word('canto', pos='V'))
print(helper.get_tep_word('cantar', pos='Substantivo'))
print(helper.get_tep_word('cantar', pos='Verbo'))
print(helper.get_all_tep_words('cantar'))
print(helper.get_tep_words_count('cantar'))
print(helper.get_all_tep_words('cantar', pos='Verbo'))
print(helper.get_tep_words_count('cantar', pos='Verbo'))
print(helper.get_connective('na realidade'))
print(helper.get_connective('além disso'))
# print(helper.get_all_connectives())
| andrecunha/coh-metrix-dementia | coh/database.py | Python | gpl-3.0 | 9,555 | 0.000419 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
from datetime import datetime, timedelta
from functools import wraps
from contextlib2 import contextmanager
from flask import request
from superset import app, cache
from superset.utils.dates import now_as_float
# If a user sets `max_age` to 0, for long the browser should cache the
# resource? Flask-Caching will cache forever, but for the HTTP header we need
# to specify a "far future" date.
FAR_FUTURE = 365 * 24 * 60 * 60 # 1 year in seconds
@contextmanager
def stats_timing(stats_key, stats_logger):
"""Provide a transactional scope around a series of operations."""
start_ts = now_as_float()
try:
yield start_ts
except Exception as e:
raise e
finally:
stats_logger.timing(stats_key, now_as_float() - start_ts)
def etag_cache(max_age, check_perms=bool):
"""
A decorator for caching views and handling etag conditional requests.
The decorator adds headers to GET requests that help with caching: Last-
Modified, Expires and ETag. It also handles conditional requests, when the
client send an If-Matches header.
If a cache is set, the decorator will cache GET responses, bypassing the
dataframe serialization. POST requests will still benefit from the
dataframe cache for requests that produce the same SQL.
"""
def decorator(f):
@wraps(f)
def wrapper(*args, **kwargs):
# check if the user can access the resource
check_perms(*args, **kwargs)
# for POST requests we can't set cache headers, use the response
# cache nor use conditional requests; this will still use the
# dataframe cache in `superset/viz.py`, though.
if request.method == "POST":
return f(*args, **kwargs)
response = None
if cache:
try:
# build the cache key from the function arguments and any
# other additional GET arguments (like `form_data`, eg).
key_args = list(args)
key_kwargs = kwargs.copy()
key_kwargs.update(request.args)
cache_key = wrapper.make_cache_key(f, *key_args, **key_kwargs)
response = cache.get(cache_key)
except Exception: # pylint: disable=broad-except
if app.debug:
raise
logging.exception("Exception possibly due to cache backend.")
# if no response was cached, compute it using the wrapped function
if response is None:
response = f(*args, **kwargs)
# add headers for caching: Last Modified, Expires and ETag
response.cache_control.public = True
response.last_modified = datetime.utcnow()
expiration = max_age if max_age != 0 else FAR_FUTURE
response.expires = response.last_modified + timedelta(
seconds=expiration
)
response.add_etag()
# if we have a cache, store the response from the request
if cache:
try:
cache.set(cache_key, response, timeout=max_age)
except Exception: # pylint: disable=broad-except
if app.debug:
raise
logging.exception("Exception possibly due to cache backend.")
return response.make_conditional(request)
if cache:
wrapper.uncached = f
wrapper.cache_timeout = max_age
wrapper.make_cache_key = cache._memoize_make_cache_key( # pylint: disable=protected-access
make_name=None, timeout=max_age
)
return wrapper
return decorator
| zhouyao1994/incubator-superset | superset/utils/decorators.py | Python | apache-2.0 | 4,658 | 0.000859 |
"""Creates ACME accounts for server."""
import datetime
import hashlib
import logging
import os
import socket
from cryptography.hazmat.primitives import serialization
import pyrfc3339
import pytz
import zope.component
from acme import fields as acme_fields
from acme import jose
from acme import messages
from letsencrypt import errors
from letsencrypt import interfaces
from letsencrypt import le_util
logger = logging.getLogger(__name__)
class Account(object): # pylint: disable=too-few-public-methods
"""ACME protocol registration.
:ivar .RegistrationResource regr: Registration Resource
:ivar .JWK key: Authorized Account Key
:ivar .Meta: Account metadata
:ivar str id: Globally unique account identifier.
"""
class Meta(jose.JSONObjectWithFields):
"""Account metadata
:ivar datetime.datetime creation_dt: Creation date and time (UTC).
:ivar str creation_host: FQDN of host, where account has been created.
.. note:: ``creation_dt`` and ``creation_host`` are useful in
cross-machine migration scenarios.
"""
creation_dt = acme_fields.RFC3339Field("creation_dt")
creation_host = jose.Field("creation_host")
def __init__(self, regr, key, meta=None):
self.key = key
self.regr = regr
self.meta = self.Meta(
# pyrfc3339 drops microseconds, make sure __eq__ is sane
creation_dt=datetime.datetime.now(
tz=pytz.UTC).replace(microsecond=0),
creation_host=socket.getfqdn()) if meta is None else meta
self.id = hashlib.md5(
self.key.key.public_key().public_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PublicFormat.SubjectPublicKeyInfo)
).hexdigest()
# Implementation note: Email? Multiple accounts can have the
# same email address. Registration URI? Assigned by the
# server, not guaranteed to be stable over time, nor
# canonical URI can be generated. ACME protocol doesn't allow
# account key (and thus its fingerprint) to be updated...
@property
def slug(self):
"""Short account identification string, useful for UI."""
return "{1}@{0} ({2})".format(pyrfc3339.generate(
self.meta.creation_dt), self.meta.creation_host, self.id[:4])
def __repr__(self):
return "<{0}({1})>".format(self.__class__.__name__, self.id)
def __eq__(self, other):
return (isinstance(other, self.__class__) and
self.key == other.key and self.regr == other.regr and
self.meta == other.meta)
def report_new_account(acc, config):
"""Informs the user about their new Let's Encrypt account."""
reporter = zope.component.queryUtility(interfaces.IReporter)
if reporter is None:
return
reporter.add_message(
"Your account credentials have been saved in your Let's Encrypt "
"configuration directory at {0}. You should make a secure backup "
"of this folder now. This configuration directory will also "
"contain certificates and private keys obtained by Let's Encrypt "
"so making regular backups of this folder is ideal.".format(
config.config_dir),
reporter.MEDIUM_PRIORITY)
if acc.regr.body.emails:
recovery_msg = ("If you lose your account credentials, you can "
"recover through e-mails sent to {0}.".format(
", ".join(acc.regr.body.emails)))
reporter.add_message(recovery_msg, reporter.MEDIUM_PRIORITY)
class AccountMemoryStorage(interfaces.AccountStorage):
"""In-memory account strage."""
def __init__(self, initial_accounts=None):
self.accounts = initial_accounts if initial_accounts is not None else {}
def find_all(self):
return self.accounts.values()
def save(self, account):
if account.id in self.accounts:
logger.debug("Overwriting account: %s", account.id)
self.accounts[account.id] = account
def load(self, account_id):
try:
return self.accounts[account_id]
except KeyError:
raise errors.AccountNotFound(account_id)
class AccountFileStorage(interfaces.AccountStorage):
"""Accounts file storage.
:ivar .IConfig config: Client configuration
"""
def __init__(self, config):
self.config = config
le_util.make_or_verify_dir(config.accounts_dir, 0o700, os.geteuid(),
self.config.strict_permissions)
def _account_dir_path(self, account_id):
return os.path.join(self.config.accounts_dir, account_id)
@classmethod
def _regr_path(cls, account_dir_path):
return os.path.join(account_dir_path, "regr.json")
@classmethod
def _key_path(cls, account_dir_path):
return os.path.join(account_dir_path, "private_key.json")
@classmethod
def _metadata_path(cls, account_dir_path):
return os.path.join(account_dir_path, "meta.json")
def find_all(self):
try:
candidates = os.listdir(self.config.accounts_dir)
except OSError:
return []
accounts = []
for account_id in candidates:
try:
accounts.append(self.load(account_id))
except errors.AccountStorageError:
logger.debug("Account loading problem", exc_info=True)
return accounts
def load(self, account_id):
account_dir_path = self._account_dir_path(account_id)
if not os.path.isdir(account_dir_path):
raise errors.AccountNotFound(
"Account at %s does not exist" % account_dir_path)
try:
with open(self._regr_path(account_dir_path)) as regr_file:
regr = messages.RegistrationResource.json_loads(regr_file.read())
with open(self._key_path(account_dir_path)) as key_file:
key = jose.JWK.json_loads(key_file.read())
with open(self._metadata_path(account_dir_path)) as metadata_file:
meta = Account.Meta.json_loads(metadata_file.read())
except IOError as error:
raise errors.AccountStorageError(error)
acc = Account(regr, key, meta)
if acc.id != account_id:
raise errors.AccountStorageError(
"Account ids mismatch (expected: {0}, found: {1}".format(
account_id, acc.id))
return acc
def save(self, account):
account_dir_path = self._account_dir_path(account.id)
le_util.make_or_verify_dir(account_dir_path, 0o700, os.geteuid(),
self.config.strict_permissions)
try:
with open(self._regr_path(account_dir_path), "w") as regr_file:
regr_file.write(account.regr.json_dumps())
with le_util.safe_open(self._key_path(account_dir_path),
"w", chmod=0o400) as key_file:
key_file.write(account.key.json_dumps())
with open(self._metadata_path(account_dir_path), "w") as metadata_file:
metadata_file.write(account.meta.json_dumps())
except IOError as error:
raise errors.AccountStorageError(error)
| mitnk/letsencrypt | letsencrypt/account.py | Python | apache-2.0 | 7,359 | 0.000408 |
import os
import shutil
import csv
class Restorer:
def __init__(self, backupDir):
self.backupDir = backupDir
if not self.backupDir.endswith('/'):
self.backupDir += '/'
def Run(self, filenamesListFname, doDelete=False):
if not os.path.exists(self.backupDir + filenamesListFname):
return
with open(self.backupDir + filenamesListFname, 'rb') as fnamesList:
filenameReader = reversed(list(csv.reader(fnamesList, delimiter='\t')))
for line in filenameReader:
shutil.copyfile(line[0], line[1])
if doDelete:
os.remove(line[0])
if doDelete:
os.remove(self.backupDir + filenamesListFname)
| mozafari/vprofiler | src/Restorer/Restorer.py | Python | apache-2.0 | 744 | 0.002688 |
import sublime, sublime_plugin
try:
# ST 3
from .app.sublime_command import SublimeCommand
from .app.settings import Settings
except ValueError:
# ST 2
from app.sublime_command import SublimeCommand
from app.settings import Settings
class CalculateHoursCommand(sublime_plugin.TextCommand):
def run(self, edit):
SublimeCommand(Settings(sublime)).calculate_hours(edit, self.view)
class ConvertHoursToSecondsCommand(sublime_plugin.TextCommand):
def run(self, edit):
SublimeCommand(Settings(sublime)).convert_hours_to_seconds(edit, self.view)
| ldgit/hours-calculator | calculator.py | Python | mit | 592 | 0.003378 |
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow_models.im2txt.show_and_tell_model."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
from . import configuration
from . import show_and_tell_model
class ShowAndTellModel(show_and_tell_model.ShowAndTellModel):
"""Subclass of ShowAndTellModel without the disk I/O."""
def build_inputs(self):
if self.mode == "inference":
# Inference mode doesn't read from disk, so defer to parent.
return super(ShowAndTellModel, self).build_inputs()
else:
# Replace disk I/O with random Tensors.
self.images = tf.random_uniform(
shape=[self.config.batch_size, self.config.image_height,
self.config.image_width, 3],
minval=-1,
maxval=1)
self.input_seqs = tf.random_uniform(
[self.config.batch_size, 15],
minval=0,
maxval=self.config.vocab_size,
dtype=tf.int64)
self.target_seqs = tf.random_uniform(
[self.config.batch_size, 15],
minval=0,
maxval=self.config.vocab_size,
dtype=tf.int64)
self.input_mask = tf.ones_like(self.input_seqs)
class ShowAndTellModelTest(tf.test.TestCase):
def setUp(self):
super(ShowAndTellModelTest, self).setUp()
self._model_config = configuration.ModelConfig()
def _countModelParameters(self):
"""Counts the number of parameters in the model at top level scope."""
counter = {}
for v in tf.global_variables():
name = v.op.name.split("/")[0]
num_params = v.get_shape().num_elements()
assert num_params
counter[name] = counter.get(name, 0) + num_params
return counter
def _checkModelParameters(self):
"""Verifies the number of parameters in the model."""
param_counts = self._countModelParameters()
expected_param_counts = {
"InceptionV3": 21802784,
# inception_output_size * embedding_size
"image_embedding": 1048576,
# vocab_size * embedding_size
"seq_embedding": 6144000,
# (embedding_size + num_lstm_units + 1) * 4 * num_lstm_units
"lstm": 2099200,
# (num_lstm_units + 1) * vocab_size
"logits": 6156000,
"global_step": 1,
}
self.assertDictEqual(expected_param_counts, param_counts)
def _checkOutputs(self, expected_shapes, feed_dict=None):
"""Verifies that the model produces expected outputs.
Args:
expected_shapes: A dict mapping Tensor or Tensor name to expected output
shape.
feed_dict: Values of Tensors to feed into Session.run().
"""
fetches = expected_shapes.keys()
with self.test_session() as sess:
sess.run(tf.global_variables_initializer())
outputs = sess.run(fetches, feed_dict)
for index, output in enumerate(outputs):
tensor = fetches[index]
expected = expected_shapes[tensor]
actual = output.shape
if expected != actual:
self.fail("Tensor %s has shape %s (expected %s)." %
(tensor, actual, expected))
def testBuildForTraining(self):
model = ShowAndTellModel(self._model_config, mode="train")
model.build()
self._checkModelParameters()
expected_shapes = {
# [batch_size, image_height, image_width, 3]
model.images: (32, 299, 299, 3),
# [batch_size, sequence_length]
model.input_seqs: (32, 15),
# [batch_size, sequence_length]
model.target_seqs: (32, 15),
# [batch_size, sequence_length]
model.input_mask: (32, 15),
# [batch_size, embedding_size]
model.image_embeddings: (32, 512),
# [batch_size, sequence_length, embedding_size]
model.seq_embeddings: (32, 15, 512),
# Scalar
model.total_loss: (),
# [batch_size * sequence_length]
model.target_cross_entropy_losses: (480,),
# [batch_size * sequence_length]
model.target_cross_entropy_loss_weights: (480,),
}
self._checkOutputs(expected_shapes)
def testBuildForEval(self):
model = ShowAndTellModel(self._model_config, mode="eval")
model.build()
self._checkModelParameters()
expected_shapes = {
# [batch_size, image_height, image_width, 3]
model.images: (32, 299, 299, 3),
# [batch_size, sequence_length]
model.input_seqs: (32, 15),
# [batch_size, sequence_length]
model.target_seqs: (32, 15),
# [batch_size, sequence_length]
model.input_mask: (32, 15),
# [batch_size, embedding_size]
model.image_embeddings: (32, 512),
# [batch_size, sequence_length, embedding_size]
model.seq_embeddings: (32, 15, 512),
# Scalar
model.total_loss: (),
# [batch_size * sequence_length]
model.target_cross_entropy_losses: (480,),
# [batch_size * sequence_length]
model.target_cross_entropy_loss_weights: (480,),
}
self._checkOutputs(expected_shapes)
def testBuildForInference(self):
model = ShowAndTellModel(self._model_config, mode="inference")
model.build()
self._checkModelParameters()
# Test feeding an image to get the initial LSTM state.
images_feed = np.random.rand(1, 299, 299, 3)
feed_dict = {model.images: images_feed}
expected_shapes = {
# [batch_size, embedding_size]
model.image_embeddings: (1, 512),
# [batch_size, 2 * num_lstm_units]
"lstm/initial_state:0": (1, 1024),
}
self._checkOutputs(expected_shapes, feed_dict)
# Test feeding a batch of inputs and LSTM states to get softmax output and
# LSTM states.
input_feed = np.random.randint(0, 10, size=3)
state_feed = np.random.rand(3, 1024)
feed_dict = {"input_feed:0": input_feed, "lstm/state_feed:0": state_feed}
expected_shapes = {
# [batch_size, 2 * num_lstm_units]
"lstm/state:0": (3, 1024),
# [batch_size, vocab_size]
"softmax:0": (3, 12000),
}
self._checkOutputs(expected_shapes, feed_dict)
if __name__ == "__main__":
tf.test.main()
| Reinaesaya/OUIRL-ChatBot | chatterbot/imgcaption/im2txt/show_and_tell_model_test.py | Python | bsd-3-clause | 6,824 | 0.003957 |
"""A multiline string
"""
function('aeozrijz\
earzer', hop)
# XXX write test
x = [i for i in range(5)
if i % 4]
fonction(1,
2,
3,
4)
def definition(a,
b,
c):
return a + b + c
class debile(dict,
object):
pass
if aaaa: pass
else:
aaaa,bbbb = 1,2
aaaa,bbbb = bbbb,aaaa
# XXX write test
hop = \
aaaa
__revision__.lower();
| ruchee/vimrc | vimfiles/bundle/vim-python/submodules/astroid/tests/testdata/python3/data/format.py | Python | mit | 421 | 0.023753 |
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This package defines helpful utilities for FTL ."""
import os
import time
import logging
import subprocess
import tempfile
import datetime
import json
from ftl.common import constants
from ftl.common import ftl_error
from containerregistry.client.v2_2 import append
from containerregistry.transform.v2_2 import metadata
class FTLException(Exception):
pass
def AppendLayersIntoImage(imgs):
with Timing('Stitching layers into final image'):
for i, img in enumerate(imgs):
if i == 0:
result_image = img
continue
diff_ids = img.diff_ids()
for diff_id in diff_ids:
lyr = img.blob(img._diff_id_to_digest(diff_id))
overrides = CfgDctToOverrides(json.loads(img.config_file()))
result_image = append.Layer(
result_image, lyr, diff_id=diff_id, overrides=overrides)
return result_image
# This is a 'whitelist' of values to pass from the
# config_file of a DockerImage to an Overrides object
# _OVERRIDES_VALUES = ['created', 'Entrypoint', 'Env']
def CfgDctToOverrides(config_dct):
"""
Takes a dct of config values and runs them through
the whitelist
"""
overrides_dct = {}
for k, v in config_dct.iteritems():
if k == 'created':
# this key change is made as the key is
# 'creation_time' in an Overrides object
# but 'created' in the config_file
overrides_dct['creation_time'] = v
for k, v in config_dct['config'].iteritems():
if k == 'Entrypoint':
# this key change is made as the key is
# 'entrypoint' in an Overrides object
# but 'Entrypoint' in the config_file
overrides_dct['entrypoint'] = v
elif k == 'Env':
# this key change is made as the key is
# 'env' in an Overrides object
# but 'Env' in the config_file
overrides_dct['env'] = v
elif k == 'ExposedPorts':
# this key change is made as the key is
# 'ports' in an Overrides object
# but 'ExposedPorts' in the config_file
overrides_dct['ports'] = v
return metadata.Overrides(**overrides_dct)
class Timing(object):
def __init__(self, descriptor):
logging.info("starting: %s" % descriptor)
self.descriptor = descriptor
def __enter__(self):
self.start = time.time()
return self
def __exit__(self, unused_type, unused_value, unused_traceback):
end = time.time()
logging.info('%s took %d seconds', self.descriptor, end - self.start)
def zip_dir_to_layer_sha(pkg_dir):
tar_path = tempfile.mktemp(suffix='.tar')
with Timing('tar_runtime_package'):
subprocess.check_call(['tar', '-C', pkg_dir, '-cf', tar_path, '.'])
u_blob = open(tar_path, 'r').read()
# We use gzip for performance instead of python's zip.
with Timing('gzip_runtime_tar'):
subprocess.check_call(['gzip', tar_path, '-1'])
return open(os.path.join(pkg_dir, tar_path + '.gz'), 'rb').read(), u_blob
def has_pkg_descriptor(descriptor_files, ctx):
for f in descriptor_files:
if ctx.Contains(f):
return True
return False
def descriptor_parser(descriptor_files, ctx):
descriptor = None
for f in descriptor_files:
if ctx.Contains(f):
descriptor = f
descriptor_contents = ctx.GetFile(descriptor)
break
if not descriptor:
logging.info("No package descriptor found. No packages installed.")
return None
return descriptor_contents
def descriptor_copy(ctx, descriptor_files, app_dir):
for f in descriptor_files:
if ctx.Contains(f):
with open(os.path.join(app_dir, f), 'w') as w:
w.write(ctx.GetFile(f))
def gen_tmp_dir(dirr):
tmp_dir = tempfile.mkdtemp()
dir_name = os.path.join(tmp_dir, dirr)
os.mkdir(dir_name)
return dir_name
def creation_time(image):
logging.info(image.config_file())
cfg = json.loads(image.config_file())
return cfg.get('created')
def timestamp_to_time(dt_str):
dt = dt_str.rstrip('Z')
return datetime.datetime.strptime(dt, "%Y-%m-%dT%H:%M:%S")
def generate_overrides(set_env, venv_dir=constants.VENV_DIR):
overrides_dct = {
'created': str(datetime.date.today()) + 'T00:00:00Z',
}
if set_env:
env = {
'VIRTUAL_ENV': venv_dir,
}
path_dir = os.path.join(venv_dir, "bin")
env['PATH'] = '%s:$PATH' % path_dir
overrides_dct['env'] = venv_dir
return overrides_dct
def parseCacheLogEntry(entry):
"""
This takes an FTL log entry and parses out relevant caching information
It returns a map with the information parsed from the entry
Example entry (truncated for line length):
INFO [CACHE][MISS] v1:PYTHON:click:==6.7->f1ea...
Return value for this entry:
{
"key_version": "v1",
"language": "python",
"phase": 2,
"package": "click",
"version": "6.7",
"key": "f1ea...",
"hit": True
}
"""
if "->" not in entry or "[CACHE]" not in entry:
logging.warn("cannot parse non-cache log entry %s" % entry)
return None
entry = entry.rstrip("\n").lstrip("INFO").lstrip(" ").lstrip("[CACHE]")
hit = True if entry.startswith("[HIT]") else False
entry = entry.lstrip("[HIT]").lstrip("[MISS]").lstrip(" ")
parts = entry.split("->")[0]
key = entry.split("->")[1]
parts = parts.split(":")
if len(parts) == 2:
# phase 1 entry
return {
"key_version": parts[0],
"language": parts[1],
"phase": 1,
"key": key,
"hit": hit
}
else:
# phase 2 entry
return {
"key_version": parts[0],
"language": parts[1],
"phase": 2,
"package": parts[2],
"version": parts[3],
"key": key,
"hit": hit
}
def run_command(cmd_name,
cmd_args,
cmd_cwd=None,
cmd_env=None,
cmd_input=None,
err_type=ftl_error.FTLErrors.INTERNAL()):
with Timing(cmd_name):
logging.info("`%s` full cmd:\n%s" % (cmd_name, " ".join(cmd_args)))
proc_pipe = None
proc_pipe = subprocess.Popen(
cmd_args,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
cwd=cmd_cwd,
env=cmd_env,
)
stdout, stderr = proc_pipe.communicate(input=cmd_input)
logging.info("`%s` stdout:\n%s" % (cmd_name, stdout))
err_txt = ""
if stderr:
err_txt = "`%s` had error output:\n%s" % (cmd_name, stderr)
logging.error(err_txt)
if proc_pipe.returncode:
ret_txt = "error: `%s` returned code: %d" % (cmd_name,
proc_pipe.returncode)
logging.error(ret_txt)
if err_type == ftl_error.FTLErrors.USER():
raise ftl_error.UserError("%s\n%s" % (err_txt, ret_txt))
elif err_type == ftl_error.FTLErrors.INTERNAL():
raise ftl_error.InternalError("%s\n%s" % (err_txt, ret_txt))
else:
raise Exception("Unknown error type passed to run_command")
| nkubala/runtimes-common | ftl/common/ftl_util.py | Python | apache-2.0 | 8,151 | 0 |
from math import factorial
def prob_034():
facts = [factorial(i) for i in range(10)]
ans = 0
limit = factorial(9) * 7
for num in range(10, limit):
temp_num = num
sums = 0
while temp_num:
sums += facts[temp_num % 10]
temp_num //= 10
if sums == num:
ans += num
return ans
if __name__ == "__main__":
import time
s = time.time()
print(prob_034())
print(time.time() - s) | anshbansal/general | Python3/project_euler/001_050/034.py | Python | mit | 474 | 0.004219 |
#!/usr/bin/python2.4
#
# Copyright 2009 Empeeric LTD. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import urllib,urllib2
import urlparse
import string
try:
import simplejson
except ImportError:
import json as simplejson
BITLY_BASE_URL = "http://api.bit.ly/"
BITLY_API_VERSION = "2.0.1"
VERBS_PARAM = {
'shorten':'longUrl',
'expand':'shortUrl',
'info':'shortUrl',
'stats':'shortUrl',
'errors':'',
}
class BitlyError(Exception):
'''Base class for bitly errors'''
@property
def message(self):
'''Returns the first argument used to construct this error.'''
return self.args[0]
class Api(object):
""" API class for bit.ly """
def __init__(self, login, apikey):
self.login = login
self.apikey = apikey
self._urllib = urllib2
def shorten(self,longURLs,params={}):
"""
Takes either:
A long URL string and returns shortened URL string
Or a list of long URL strings and returns a list of shortened URL strings.
"""
want_result_list = True
if not isinstance(longURLs, list):
longURLs = [longURLs]
want_result_list = False
for index,url in enumerate(longURLs):
if not '://' in url:
longURLs[index] = "http://" + url
request = self._getURL("shorten",longURLs,params)
result = self._fetchUrl(request)
json = simplejson.loads(result)
self._CheckForError(json)
results = json['results']
res = [self._extract_short_url(results[url]) for url in longURLs]
if want_result_list:
return res
else:
return res[0]
def _extract_short_url(self,item):
if item['shortKeywordUrl'] == "":
return item['shortUrl']
else:
return item['shortKeywordUrl']
def expand(self,shortURL,params={}):
""" Given a bit.ly url or hash, return long source url """
request = self._getURL("expand",shortURL,params)
result = self._fetchUrl(request)
json = simplejson.loads(result)
self._CheckForError(json)
return json['results'][string.split(shortURL, '/')[-1]]['longUrl']
def info(self,shortURL,params={}):
"""
Given a bit.ly url or hash,
return information about that page,
such as the long source url
"""
request = self._getURL("info",shortURL,params)
result = self._fetchUrl(request)
json = simplejson.loads(result)
self._CheckForError(json)
return json['results'][string.split(shortURL, '/')[-1]]
def stats(self,shortURL,params={}):
""" Given a bit.ly url or hash, return traffic and referrer data. """
request = self._getURL("stats",shortURL,params)
result = self._fetchUrl(request)
json = simplejson.loads(result)
self._CheckForError(json)
return Stats.NewFromJsonDict(json['results'])
def errors(self,params={}):
""" Get a list of bit.ly API error codes. """
request = self._getURL("errors","",params)
result = self._fetchUrl(request)
json = simplejson.loads(result)
self._CheckForError(json)
return json['results']
def setUrllib(self, urllib):
'''Override the default urllib implementation.
Args:
urllib: an instance that supports the same API as the urllib2 module
'''
self._urllib = urllib
def _getURL(self,verb,paramVal,more_params={}):
if not isinstance(paramVal, list):
paramVal = [paramVal]
params = {
'version':BITLY_API_VERSION,
'format':'json',
'login':self.login,
'apiKey':self.apikey,
}
params.update(more_params)
params = params.items()
verbParam = VERBS_PARAM[verb]
if verbParam:
for val in paramVal:
params.append(( verbParam,val ))
encoded_params = urllib.urlencode(params)
return "%s%s?%s" % (BITLY_BASE_URL,verb,encoded_params)
def _fetchUrl(self,url):
'''Fetch a URL
Args:
url: The URL to retrieve
Returns:
A string containing the body of the response.
'''
# Open and return the URL
url_data = self._urllib.urlopen(url).read()
return url_data
def _CheckForError(self, data):
"""Raises a BitlyError if bitly returns an error message.
Args:
data: A python dict created from the bitly json response
Raises:
BitlyError wrapping the bitly error message if one exists.
"""
# bitly errors are relatively unlikely, so it is faster
# to check first, rather than try and catch the exception
if 'ERROR' in data or data['statusCode'] == 'ERROR':
raise BitlyError, data['errorMessage']
for key in data['results']:
if type(data['results']) is dict and type(data['results'][key]) is dict:
if 'statusCode' in data['results'][key] and data['results'][key]['statusCode'] == 'ERROR':
raise BitlyError, data['results'][key]['errorMessage']
class Stats(object):
'''A class representing the Statistics returned by the bitly api.
The Stats structure exposes the following properties:
status.user_clicks # read only
status.clicks # read only
'''
def __init__(self,user_clicks=None,total_clicks=None):
self.user_clicks = user_clicks
self.total_clicks = total_clicks
@staticmethod
def NewFromJsonDict(data):
'''Create a new instance based on a JSON dict.
Args:
data: A JSON dict, as converted from the JSON in the bitly API
Returns:
A bitly.Stats instance
'''
return Stats(user_clicks=data.get('userClicks', None),
total_clicks=data.get('clicks', None))
if __name__ == '__main__':
testURL1="www.yahoo.com"
testURL2="www.cnn.com"
a=Api(login="pythonbitly",apikey="R_06871db6b7fd31a4242709acaf1b6648")
short=a.shorten(testURL1)
print "Short URL = %s" % short
short=a.shorten(testURL1,{'history':1})
print "Short URL with history = %s" % short
urlList=[testURL1,testURL2]
shortList=a.shorten(urlList)
print "Short URL list = %s" % shortList
long=a.expand(short)
print "Expanded URL = %s" % long
info=a.info(short)
print "Info: %s" % info
stats=a.stats(short)
print "User clicks %s, total clicks: %s" % (stats.user_clicks,stats.total_clicks)
errors=a.errors()
print "Errors: %s" % errors
testURL3=["www.google.com"]
short=a.shorten(testURL3)
print "Short url in list = %s" % short | zackster/HipHopGoblin | trunk/bitly.py | Python | bsd-3-clause | 7,778 | 0.015042 |
# -*- coding: utf-8 -*-
# Disable while we have Python 2.x compatability
# pylint: disable=useless-object-inheritance
"""This module contains classes and functionality relating to Sonos Groups."""
from __future__ import unicode_literals
class ZoneGroup(object):
"""
A class representing a Sonos Group. It looks like this::
ZoneGroup(
uid='RINCON_000FD584236D01400:58',
coordinator=SoCo("192.168.1.101"),
members={SoCo("192.168.1.101"), SoCo("192.168.1.102")}
)
Any SoCo instance can tell you what group it is in::
>>> device = soco.discovery.any_soco()
>>> device.group
ZoneGroup(
uid='RINCON_000FD584236D01400:58',
coordinator=SoCo("192.168.1.101"),
members={SoCo("192.168.1.101"), SoCo("192.168.1.102")}
)
From there, you can find the coordinator for the current group::
>>> device.group.coordinator
SoCo("192.168.1.101")
or, for example, its name::
>>> device.group.coordinator.player_name
Kitchen
or a set of the members::
>>> device.group.members
{SoCo("192.168.1.101"), SoCo("192.168.1.102")}
For convenience, ZoneGroup is also a container::
>>> for player in device.group:
... print player.player_name
Living Room
Kitchen
If you need it, you can get an iterator over all groups on the network::
>>> device.all_groups
<generator object all_groups at 0x108cf0c30>
A consistent readable label for the group members can be returned with
the `label` and `short_label` properties.
Properties are available to get and set the group `volume` and the group
`mute` state, and the `set_relative_volume()` method can be used to make
relative adjustments to the group volume, e.g.:
>>> device.group.volume = 25
>>> device.group.volume
25
>>> device.group.set_relative_volume(-10)
15
>>> device.group.mute
>>> False
>>> device.group.mute = True
>>> device.group.mute
True
"""
def __init__(self, uid, coordinator, members=None):
"""
Args:
uid (str): The unique Sonos ID for this group, eg
``RINCON_000FD584236D01400:5``.
coordinator (SoCo): The SoCo instance representing the coordinator
of this group.
members (Iterable[SoCo]): An iterable containing SoCo instances
which represent the members of this group.
"""
#: The unique Sonos ID for this group
self.uid = uid
#: The `SoCo` instance which coordinates this group
self.coordinator = coordinator
if members is not None:
#: A set of `SoCo` instances which are members of the group
self.members = set(members)
else:
self.members = set()
def __iter__(self):
return self.members.__iter__()
def __contains__(self, member):
return member in self.members
def __repr__(self):
return "{0}(uid='{1}', coordinator={2!r}, members={3!r})".format(
self.__class__.__name__, self.uid, self.coordinator, self.members
)
@property
def label(self):
"""str: A description of the group.
>>> device.group.label
'Kitchen, Living Room'
"""
group_names = sorted([m.player_name for m in self.members])
return ", ".join(group_names)
@property
def short_label(self):
"""str: A short description of the group.
>>> device.group.short_label
'Kitchen + 1'
"""
group_names = sorted([m.player_name for m in self.members])
group_label = group_names[0]
if len(group_names) > 1:
group_label += " + {}".format(len(group_names) - 1)
return group_label
@property
def volume(self):
"""int: The volume of the group.
An integer between 0 and 100.
"""
response = self.coordinator.groupRenderingControl.GetGroupVolume(
[("InstanceID", 0)]
)
return int(response["CurrentVolume"])
@volume.setter
def volume(self, group_volume):
group_volume = int(group_volume)
group_volume = max(0, min(group_volume, 100)) # Coerce in range
self.coordinator.groupRenderingControl.SetGroupVolume(
[("InstanceID", 0), ("DesiredVolume", group_volume)]
)
@property
def mute(self):
"""bool: The mute state for the group.
True or False.
"""
response = self.coordinator.groupRenderingControl.GetGroupMute(
[("InstanceID", 0)]
)
mute_state = response["CurrentMute"]
return bool(int(mute_state))
@mute.setter
def mute(self, group_mute):
mute_value = "1" if group_mute else "0"
self.coordinator.groupRenderingControl.SetGroupMute(
[("InstanceID", 0), ("DesiredMute", mute_value)]
)
def set_relative_volume(self, relative_group_volume):
"""Adjust the group volume up or down by a relative amount.
If the adjustment causes the volume to overshoot the maximum value
of 100, the volume will be set to 100. If the adjustment causes the
volume to undershoot the minimum value of 0, the volume will be set
to 0.
Note that this method is an alternative to using addition and
subtraction assignment operators (+=, -=) on the `volume` property
of a `ZoneGroup` instance. These operators perform the same function
as `set_relative_volume()` but require two network calls per
operation instead of one.
Args:
relative_group_volume (int): The relative volume adjustment. Can be
positive or negative.
Returns:
int: The new group volume setting.
Raises:
ValueError: If ``relative_group_volume`` cannot be cast as
an integer.
"""
relative_group_volume = int(relative_group_volume)
# Sonos automatically handles out-of-range values.
resp = self.coordinator.groupRenderingControl.SetRelativeGroupVolume(
[("InstanceID", 0), ("Adjustment", relative_group_volume)]
)
return int(resp["NewVolume"])
| KennethNielsen/SoCo | soco/groups.py | Python | mit | 6,415 | 0 |
# Copyright (C) 2015 ycmd contributors
#
# This file is part of ycmd.
#
# ycmd is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ycmd is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ycmd. If not, see <http://www.gnu.org/licenses/>.
from ycmd.utils import ToUtf8IfNeeded
from ycmd.completers.completer import Completer
from ycmd import responses, utils, hmac_utils
import logging
import urlparse
import requests
import httplib
import json
import tempfile
import base64
import binascii
import threading
import os
from os import path as p
_logger = logging.getLogger( __name__ )
DIR_OF_THIS_SCRIPT = p.dirname( p.abspath( __file__ ) )
DIR_OF_THIRD_PARTY = utils.PathToNearestThirdPartyFolder( DIR_OF_THIS_SCRIPT )
RACERD_BINARY_NAME = 'racerd' + ( '.exe' if utils.OnWindows() else '' )
RACERD_BINARY = p.join( DIR_OF_THIRD_PARTY,
'racerd', 'target', 'release', RACERD_BINARY_NAME )
RACERD_HMAC_HEADER = 'x-racerd-hmac'
HMAC_SECRET_LENGTH = 16
BINARY_NOT_FOUND_MESSAGE = ( 'racerd binary not found. Did you build it? ' +
'You can do so by running ' +
'"./build.py --racer-completer".' )
ERROR_FROM_RACERD_MESSAGE = (
'Received error from racerd while retrieving completions. You did not '
'set the rust_src_path option, which is probably causing this issue. '
'See YCM docs for details.'
)
def FindRacerdBinary( user_options ):
"""
Find path to racerd binary
This function prefers the 'racerd_binary_path' value as provided in
user_options if available. It then falls back to ycmd's racerd build. If
that's not found, attempts to use racerd from current path.
"""
racerd_user_binary = user_options.get( 'racerd_binary_path' )
if racerd_user_binary:
# The user has explicitly specified a path.
if os.path.isfile( racerd_user_binary ):
return racerd_user_binary
else:
_logger.warn( 'user provided racerd_binary_path is not file' )
if os.path.isfile( RACERD_BINARY ):
return RACERD_BINARY
return utils.PathToFirstExistingExecutable( [ 'racerd' ] )
class RustCompleter( Completer ):
"""
A completer for the rust programming language backed by racerd.
https://github.com/jwilm/racerd
"""
def __init__( self, user_options ):
super( RustCompleter, self ).__init__( user_options )
self._racerd = FindRacerdBinary( user_options )
self._racerd_host = None
self._server_state_lock = threading.RLock()
self._keep_logfiles = user_options[ 'server_keep_logfiles' ]
self._hmac_secret = ''
self._rust_source_path = self._GetRustSrcPath()
if not self._rust_source_path:
_logger.warn( 'No path provided for the rustc source. Please set the '
'rust_src_path option' )
if not self._racerd:
_logger.error( BINARY_NOT_FOUND_MESSAGE )
raise RuntimeError( BINARY_NOT_FOUND_MESSAGE )
self._StartServer()
def _GetRustSrcPath( self ):
"""
Attempt to read user option for rust_src_path. Fallback to environment
variable if it's not provided.
"""
rust_src_path = self.user_options[ 'rust_src_path' ]
# Early return if user provided config
if rust_src_path:
return rust_src_path
# Fall back to environment variable
env_key = 'RUST_SRC_PATH'
if env_key in os.environ:
return os.environ[ env_key ]
return None
def SupportedFiletypes( self ):
return [ 'rust' ]
def _ComputeRequestHmac( self, method, path, body ):
if not body:
body = ''
hmac = hmac_utils.CreateRequestHmac( method, path, body, self._hmac_secret )
return binascii.hexlify( hmac )
def _GetResponse( self, handler, request_data = None, method = 'POST' ):
"""
Query racerd via HTTP
racerd returns JSON with 200 OK responses. 204 No Content responses occur
when no errors were encountered but no completions, definitions, or errors
were found.
"""
_logger.info( 'RustCompleter._GetResponse' )
url = urlparse.urljoin( self._racerd_host, handler )
parameters = self._TranslateRequest( request_data )
body = json.dumps( parameters ) if parameters else None
request_hmac = self._ComputeRequestHmac( method, handler, body )
extra_headers = { 'content-type': 'application/json' }
extra_headers[ RACERD_HMAC_HEADER ] = request_hmac
response = requests.request( method,
url,
data = body,
headers = extra_headers )
response.raise_for_status()
if response.status_code is httplib.NO_CONTENT:
return None
return response.json()
def _TranslateRequest( self, request_data ):
"""
Transform ycm request into racerd request
"""
if not request_data:
return None
file_path = request_data[ 'filepath' ]
buffers = []
for path, obj in request_data[ 'file_data' ].items():
buffers.append( {
'contents': obj[ 'contents' ],
'file_path': path
} )
line = request_data[ 'line_num' ]
col = request_data[ 'column_num' ] - 1
return {
'buffers': buffers,
'line': line,
'column': col,
'file_path': file_path
}
def _GetExtraData( self, completion ):
location = {}
if completion[ 'file_path' ]:
location[ 'filepath' ] = ToUtf8IfNeeded( completion[ 'file_path' ] )
if completion[ 'line' ]:
location[ 'line_num' ] = completion[ 'line' ]
if completion[ 'column' ]:
location[ 'column_num' ] = completion[ 'column' ] + 1
if location:
return { 'location': location }
return None
def ComputeCandidatesInner( self, request_data ):
try:
completions = self._FetchCompletions( request_data )
except requests.HTTPError:
if not self._rust_source_path:
raise RuntimeError( ERROR_FROM_RACERD_MESSAGE )
raise
if not completions:
return []
return [ responses.BuildCompletionData(
insertion_text = ToUtf8IfNeeded( completion[ 'text' ] ),
kind = ToUtf8IfNeeded( completion[ 'kind' ] ),
extra_menu_info = ToUtf8IfNeeded( completion[ 'context' ] ),
extra_data = self._GetExtraData( completion ) )
for completion in completions ]
def _FetchCompletions( self, request_data ):
return self._GetResponse( '/list_completions', request_data )
def _WriteSecretFile( self, secret ):
"""
Write a file containing the `secret` argument. The path to this file is
returned.
Note that racerd consumes the file upon reading; removal of the temp file is
intentionally not handled here.
"""
# Make temp file
secret_fd, secret_path = tempfile.mkstemp( text=True )
# Write secret
with os.fdopen( secret_fd, 'w' ) as secret_file:
secret_file.write( secret )
return secret_path
def _StartServer( self ):
"""
Start racerd.
"""
with self._server_state_lock:
self._hmac_secret = self._CreateHmacSecret()
secret_file_path = self._WriteSecretFile( self._hmac_secret )
port = utils.GetUnusedLocalhostPort()
args = [ self._racerd, 'serve',
'--port', str(port),
'-l',
'--secret-file', secret_file_path ]
# Enable logging of crashes
env = os.environ.copy()
env[ 'RUST_BACKTRACE' ] = '1'
if self._rust_source_path:
args.extend( [ '--rust-src-path', self._rust_source_path ] )
filename_format = p.join( utils.PathToTempDir(),
'racerd_{port}_{std}.log' )
self._server_stdout = filename_format.format( port = port,
std = 'stdout' )
self._server_stderr = filename_format.format( port = port,
std = 'stderr' )
with open( self._server_stderr, 'w' ) as fstderr:
with open( self._server_stdout, 'w' ) as fstdout:
self._racerd_phandle = utils.SafePopen( args,
stdout = fstdout,
stderr = fstderr,
env = env )
self._racerd_host = 'http://127.0.0.1:{0}'.format( port )
_logger.info( 'RustCompleter using host = ' + self._racerd_host )
def ServerIsRunning( self ):
"""
Check racerd status.
"""
with self._server_state_lock:
if not self._racerd_host or not self._racerd_phandle:
return False
try:
self._GetResponse( '/ping', method = 'GET' )
return True
except requests.HTTPError:
self._StopServer()
return False
def ServerIsReady( self ):
try:
self._GetResponse( '/ping', method = 'GET' )
return True
except Exception:
return False
def _StopServer( self ):
"""
Stop racerd.
"""
with self._server_state_lock:
if self._racerd_phandle:
self._racerd_phandle.terminate()
self._racerd_phandle.wait()
self._racerd_phandle = None
self._racerd_host = None
if not self._keep_logfiles:
# Remove stdout log
if self._server_stdout and p.exists( self._server_stdout ):
os.unlink( self._server_stdout )
self._server_stdout = None
# Remove stderr log
if self._server_stderr and p.exists( self._server_stderr ):
os.unlink( self._server_stderr )
self._server_stderr = None
def _RestartServer( self ):
"""
Restart racerd
"""
_logger.debug( 'RustCompleter restarting racerd' )
with self._server_state_lock:
if self.ServerIsRunning():
self._StopServer()
self._StartServer()
_logger.debug( 'RustCompleter has restarted racerd' )
def GetSubcommandsMap( self ):
return {
'GoTo' : ( lambda self, request_data, args:
self._GoToDefinition( request_data ) ),
'GoToDefinition' : ( lambda self, request_data, args:
self._GoToDefinition( request_data ) ),
'GoToDeclaration' : ( lambda self, request_data, args:
self._GoToDefinition( request_data ) ),
'StopServer' : ( lambda self, request_data, args:
self._StopServer() ),
'RestartServer' : ( lambda self, request_data, args:
self._RestartServer() ),
}
def _GoToDefinition( self, request_data ):
try:
definition = self._GetResponse( '/find_definition', request_data )
return responses.BuildGoToResponse( definition[ 'file_path' ],
definition[ 'line' ],
definition[ 'column' ] + 1 )
except Exception:
raise RuntimeError( 'Can\'t jump to definition.' )
def Shutdown( self ):
self._StopServer()
def _CreateHmacSecret( self ):
return base64.b64encode( os.urandom( HMAC_SECRET_LENGTH ) )
def DebugInfo( self, request_data ):
with self._server_state_lock:
if self.ServerIsRunning():
return ( 'racerd\n'
' listening at: {0}\n'
' racerd path: {1}\n'
' stdout log: {2}\n'
' stderr log: {3}').format( self._racerd_host,
self._racerd,
self._server_stdout,
self._server_stderr )
if self._server_stdout and self._server_stderr:
return ( 'racerd is no longer running\n',
' racerd path: {0}\n'
' stdout log: {1}\n'
' stderr log: {2}').format( self._racerd,
self._server_stdout,
self._server_stderr )
return 'racerd is not running'
| NorfairKing/sus-depot | shared/shared/vim/dotvim/bundle/YouCompleteMe/third_party/ycmd/ycmd/completers/rust/rust_completer.py | Python | gpl-2.0 | 12,458 | 0.031947 |
import etcd
import logging
import os
import signal
import time
import unittest
import patroni.config as config
from mock import Mock, PropertyMock, patch
from patroni.api import RestApiServer
from patroni.async_executor import AsyncExecutor
from patroni.dcs.etcd import AbstractEtcdClientWithFailover
from patroni.exceptions import DCSError
from patroni.postgresql import Postgresql
from patroni.postgresql.config import ConfigHandler
from patroni import check_psycopg
from patroni.__main__ import Patroni, main as _main, patroni_main
from six.moves import BaseHTTPServer, builtins
from threading import Thread
from . import psycopg_connect, SleepException
from .test_etcd import etcd_read, etcd_write
from .test_postgresql import MockPostmaster
def mock_import(*args, **kwargs):
if args[0] == 'psycopg':
raise ImportError
ret = Mock()
ret.__version__ = '2.5.3.dev1 a b c'
return ret
class MockFrozenImporter(object):
toc = set(['patroni.dcs.etcd'])
@patch('time.sleep', Mock())
@patch('subprocess.call', Mock(return_value=0))
@patch('patroni.psycopg.connect', psycopg_connect)
@patch.object(ConfigHandler, 'append_pg_hba', Mock())
@patch.object(ConfigHandler, 'write_postgresql_conf', Mock())
@patch.object(ConfigHandler, 'write_recovery_conf', Mock())
@patch.object(Postgresql, 'is_running', Mock(return_value=MockPostmaster()))
@patch.object(Postgresql, 'call_nowait', Mock())
@patch.object(BaseHTTPServer.HTTPServer, '__init__', Mock())
@patch.object(AsyncExecutor, 'run', Mock())
@patch.object(etcd.Client, 'write', etcd_write)
@patch.object(etcd.Client, 'read', etcd_read)
class TestPatroni(unittest.TestCase):
def test_no_config(self):
self.assertRaises(SystemExit, patroni_main)
@patch('sys.argv', ['patroni.py', '--validate-config', 'postgres0.yml'])
def test_validate_config(self):
self.assertRaises(SystemExit, patroni_main)
@patch('pkgutil.iter_importers', Mock(return_value=[MockFrozenImporter()]))
@patch('sys.frozen', Mock(return_value=True), create=True)
@patch.object(BaseHTTPServer.HTTPServer, '__init__', Mock())
@patch.object(etcd.Client, 'read', etcd_read)
@patch.object(Thread, 'start', Mock())
@patch.object(AbstractEtcdClientWithFailover, 'machines', PropertyMock(return_value=['http://remotehost:2379']))
def setUp(self):
self._handlers = logging.getLogger().handlers[:]
RestApiServer._BaseServer__is_shut_down = Mock()
RestApiServer._BaseServer__shutdown_request = True
RestApiServer.socket = 0
os.environ['PATRONI_POSTGRESQL_DATA_DIR'] = 'data/test0'
conf = config.Config('postgres0.yml')
self.p = Patroni(conf)
def tearDown(self):
logging.getLogger().handlers[:] = self._handlers
@patch('patroni.dcs.AbstractDCS.get_cluster', Mock(side_effect=[None, DCSError('foo'), None]))
def test_load_dynamic_configuration(self):
self.p.config._dynamic_configuration = {}
self.p.load_dynamic_configuration()
self.p.load_dynamic_configuration()
@patch('sys.argv', ['patroni.py', 'postgres0.yml'])
@patch('time.sleep', Mock(side_effect=SleepException))
@patch.object(etcd.Client, 'delete', Mock())
@patch.object(AbstractEtcdClientWithFailover, 'machines', PropertyMock(return_value=['http://remotehost:2379']))
@patch.object(Thread, 'join', Mock())
def test_patroni_patroni_main(self):
with patch('subprocess.call', Mock(return_value=1)):
with patch.object(Patroni, 'run', Mock(side_effect=SleepException)):
os.environ['PATRONI_POSTGRESQL_DATA_DIR'] = 'data/test0'
self.assertRaises(SleepException, patroni_main)
with patch.object(Patroni, 'run', Mock(side_effect=KeyboardInterrupt())):
with patch('patroni.ha.Ha.is_paused', Mock(return_value=True)):
os.environ['PATRONI_POSTGRESQL_DATA_DIR'] = 'data/test0'
patroni_main()
@patch('os.getpid')
@patch('multiprocessing.Process')
@patch('patroni.__main__.patroni_main', Mock())
def test_patroni_main(self, mock_process, mock_getpid):
mock_getpid.return_value = 2
_main()
mock_getpid.return_value = 1
def mock_signal(signo, handler):
handler(signo, None)
with patch('signal.signal', mock_signal):
with patch('os.waitpid', Mock(side_effect=[(1, 0), (0, 0)])):
_main()
with patch('os.waitpid', Mock(side_effect=OSError)):
_main()
ref = {'passtochild': lambda signo, stack_frame: 0}
def mock_sighup(signo, handler):
if hasattr(signal, 'SIGHUP') and signo == signal.SIGHUP:
ref['passtochild'] = handler
def mock_join():
ref['passtochild'](0, None)
mock_process.return_value.join = mock_join
with patch('signal.signal', mock_sighup), patch('os.kill', Mock()):
self.assertIsNone(_main())
@patch('patroni.config.Config.save_cache', Mock())
@patch('patroni.config.Config.reload_local_configuration', Mock(return_value=True))
@patch('patroni.ha.Ha.is_leader', Mock(return_value=True))
@patch.object(Postgresql, 'state', PropertyMock(return_value='running'))
@patch.object(Postgresql, 'data_directory_empty', Mock(return_value=False))
def test_run(self):
self.p.postgresql.set_role('replica')
self.p.sighup_handler()
self.p.ha.dcs.watch = Mock(side_effect=SleepException)
self.p.api.start = Mock()
self.p.logger.start = Mock()
self.p.config._dynamic_configuration = {}
self.assertRaises(SleepException, self.p.run)
with patch('patroni.config.Config.reload_local_configuration', Mock(return_value=False)):
self.p.sighup_handler()
self.assertRaises(SleepException, self.p.run)
with patch('patroni.config.Config.set_dynamic_configuration', Mock(return_value=True)):
self.assertRaises(SleepException, self.p.run)
with patch('patroni.postgresql.Postgresql.data_directory_empty', Mock(return_value=False)):
self.assertRaises(SleepException, self.p.run)
def test_sigterm_handler(self):
self.assertRaises(SystemExit, self.p.sigterm_handler)
def test_schedule_next_run(self):
self.p.ha.cluster = Mock()
self.p.ha.dcs.watch = Mock(return_value=True)
self.p.schedule_next_run()
self.p.next_run = time.time() - self.p.dcs.loop_wait - 1
self.p.schedule_next_run()
def test_noloadbalance(self):
self.p.tags['noloadbalance'] = True
self.assertTrue(self.p.noloadbalance)
def test_nofailover(self):
self.p.tags['nofailover'] = True
self.assertTrue(self.p.nofailover)
self.p.tags['nofailover'] = None
self.assertFalse(self.p.nofailover)
def test_replicatefrom(self):
self.assertIsNone(self.p.replicatefrom)
self.p.tags['replicatefrom'] = 'foo'
self.assertEqual(self.p.replicatefrom, 'foo')
def test_reload_config(self):
self.p.reload_config()
self.p.get_tags = Mock(side_effect=Exception)
self.p.reload_config(local=True)
def test_nosync(self):
self.p.tags['nosync'] = True
self.assertTrue(self.p.nosync)
self.p.tags['nosync'] = None
self.assertFalse(self.p.nosync)
@patch.object(Thread, 'join', Mock())
def test_shutdown(self):
self.p.api.shutdown = Mock(side_effect=Exception)
self.p.ha.shutdown = Mock(side_effect=Exception)
self.p.shutdown()
def test_check_psycopg(self):
with patch.object(builtins, '__import__', Mock(side_effect=ImportError)):
self.assertRaises(SystemExit, check_psycopg)
with patch.object(builtins, '__import__', mock_import):
self.assertRaises(SystemExit, check_psycopg)
| zalando/patroni | tests/test_patroni.py | Python | mit | 7,937 | 0.00126 |
#!/usr/bin/env python3
# Basic program to read csv file and spit it back out
import argparse
import csv
import sys
def read_worksheet(csv_file):
"""
Read contents of worksheet_csv and return (contents, dialect, fields)
"""
contents = {}
dialect = None
fields = None
with open(csv_file, 'r', newline='') as worksheet:
dialect = csv.Sniffer().sniff(worksheet.read())
worksheet.seek(0)
header = csv.Sniffer().has_header(worksheet.read())
worksheet.seek(0)
reader = csv.DictReader(worksheet, dialect=dialect)
fields = reader.fieldnames
for row in reader:
contents[row['Full name']] = row
return (contents, dialect, fields)
def write_worksheet(csv_file, contents, dialect, fields):
"""
Write contents to worksheet_csv using dialect and fields
"""
with open(csv_file, 'w', newline='') as worksheet:
writer = csv.DictWriter(worksheet, fields, dialect=dialect)
writer.writeheader()
for val in contents.values():
writer.writerow(val)
return None
def _main(argv=None):
"""
Module Grading Worksheet Module Unit Tests
"""
argv = argv or sys.argv[1:]
# Setup Argument Parsing
parser = argparse.ArgumentParser(description='Test Process Moodle Grading Worksheet')
parser.add_argument('input_csv', type=str,
help='Input Grading Worksheet CSV File')
parser.add_argument('output_csv', type=str,
help='Output Grading Worksheet CSV File')
# Parse Arguments
args = parser.parse_args(argv)
input_csv = args.input_csv
output_csv = args.output_csv
# Read Input
contents, dialect, fields = read_worksheet(input_csv)
# Mutate Contents
for val in contents.values():
val['Grade'] = '99.9'
# Write Output
write_worksheet(output_csv, contents, dialect, fields)
if __name__ == "__main__":
sys.exit(_main())
| asayler/moodle-offline-grading | moodle_grading_worksheet.py | Python | gpl-3.0 | 1,988 | 0.002012 |
"""Python STIX2 FileSystem Source/Sink"""
import errno
import io
import json
import os
import re
import stat
from stix2 import v20, v21
from stix2.base import _STIXBase
from stix2.datastore import (
DataSink, DataSource, DataSourceError, DataStoreMixin,
)
from stix2.datastore.filters import Filter, FilterSet, apply_common_filters
from stix2.parsing import parse
from stix2.serialization import fp_serialize
from stix2.utils import format_datetime, get_type_from_id, parse_into_datetime
def _timestamp2filename(timestamp):
"""
Encapsulates a way to create unique filenames based on an object's
"modified" property value. This should not include an extension.
Args:
timestamp: A timestamp, as a datetime.datetime object or string.
"""
# The format_datetime will determine the correct level of precision.
if isinstance(timestamp, str):
timestamp = parse_into_datetime(timestamp)
ts = format_datetime(timestamp)
ts = re.sub(r"[-T:\.Z ]", "", ts)
return ts
class AuthSet(object):
"""
Represents either a whitelist or blacklist of values, where/what we
must/must not search to find objects which match a query. (Maybe "AuthSet"
isn't the right name, but determining authorization is a typical context in
which black/white lists are used.)
The set may be empty. For a whitelist, this means you mustn't search
anywhere, which means the query was impossible to match, so you can skip
searching altogether. For a blacklist, this means nothing is excluded
and you must search everywhere.
"""
BLACK = 0
WHITE = 1
def __init__(self, allowed, prohibited):
"""
Initialize this AuthSet from the given sets of allowed and/or
prohibited values. The type of set (black or white) is determined
from the allowed and/or prohibited values given.
Args:
allowed: A set of allowed values (or None if no allow filters
were found in the query)
prohibited: A set of prohibited values (not None)
"""
if allowed is None:
self.__values = prohibited
self.__type = AuthSet.BLACK
else:
# There was at least one allow filter, so create a whitelist. But
# any matching prohibited values create a combination of conditions
# which can never match. So exclude those.
self.__values = allowed - prohibited
self.__type = AuthSet.WHITE
@property
def values(self):
"""
Get the values in this white/blacklist, as a set.
"""
return self.__values
@property
def auth_type(self):
"""
Get the type of set: AuthSet.WHITE or AuthSet.BLACK.
"""
return self.__type
def __repr__(self):
return "{}list: {}".format(
"white" if self.auth_type == AuthSet.WHITE else "black",
self.values,
)
# A fixed, reusable AuthSet which accepts anything. It came in handy.
_AUTHSET_ANY = AuthSet(None, set())
def _update_allow(allow_set, value):
"""
Updates the given set of "allow" values. The first time an update to the
set occurs, the value(s) are added. Thereafter, since all filters are
implicitly AND'd, the given values are intersected with the existing allow
set, which may remove values. At the end, it may even wind up empty.
Args:
allow_set: The allow set, or None
value: The value(s) to add (single value, or iterable of values)
Returns:
The updated allow set (not None)
"""
adding_seq = hasattr(value, "__iter__") and \
not isinstance(value, str)
if allow_set is None:
allow_set = set()
if adding_seq:
allow_set.update(value)
else:
allow_set.add(value)
else:
# strangely, the "&=" operator requires a set on the RHS
# whereas the method allows any iterable.
if adding_seq:
allow_set.intersection_update(value)
else:
allow_set.intersection_update({value})
return allow_set
def _find_search_optimizations(filters):
"""
Searches through all the filters, and creates white/blacklists of types and
IDs, which can be used to optimize the filesystem search.
Args:
filters: An iterable of filter objects representing a query
Returns:
A 2-tuple of AuthSet objects: the first is for object types, and
the second is for object IDs.
"""
# The basic approach to this is to determine what is allowed and
# prohibited, independently, and then combine them to create the final
# white/blacklists.
allowed_types = allowed_ids = None
prohibited_types = set()
prohibited_ids = set()
for filter_ in filters:
if filter_.property == "type":
if filter_.op in ("=", "in"):
allowed_types = _update_allow(allowed_types, filter_.value)
elif filter_.op == "!=":
prohibited_types.add(filter_.value)
elif filter_.property == "id":
if filter_.op == "=":
# An "allow" ID filter implies a type filter too, since IDs
# contain types within them.
allowed_ids = _update_allow(allowed_ids, filter_.value)
allowed_types = _update_allow(
allowed_types,
get_type_from_id(filter_.value),
)
elif filter_.op == "!=":
prohibited_ids.add(filter_.value)
elif filter_.op == "in":
allowed_ids = _update_allow(allowed_ids, filter_.value)
allowed_types = _update_allow(
allowed_types, (
get_type_from_id(id_) for id_ in filter_.value
),
)
opt_types = AuthSet(allowed_types, prohibited_types)
opt_ids = AuthSet(allowed_ids, prohibited_ids)
# If we have both type and ID whitelists, perform a type-based intersection
# on them, to further optimize. (Some of the cross-property constraints
# occur above; this is essentially a second pass which operates on the
# final whitelists, which among other things, incorporates any of the
# prohibitions found above.)
if opt_types.auth_type == AuthSet.WHITE and \
opt_ids.auth_type == AuthSet.WHITE:
opt_types.values.intersection_update(
get_type_from_id(id_) for id_ in opt_ids.values
)
opt_ids.values.intersection_update(
id_ for id_ in opt_ids.values
if get_type_from_id(id_) in opt_types.values
)
return opt_types, opt_ids
def _get_matching_dir_entries(parent_dir, auth_set, st_mode_test=None, ext=""):
"""
Search a directory (non-recursively), and find entries which match the
given criteria.
Args:
parent_dir: The directory to search
auth_set: an AuthSet instance, which represents a black/whitelist
filter on filenames
st_mode_test: A callable allowing filtering based on the type of
directory entry. E.g. just get directories, or just get files. It
will be passed the st_mode field of a stat() structure and should
return True to include the file, or False to exclude it. Easy thing to
do is pass one of the stat module functions, e.g. stat.S_ISREG. If
None, don't filter based on entry type.
ext: Determines how names from auth_set match up to directory
entries, and allows filtering by extension. The extension is added
to auth_set values to obtain directory entries; it is removed from
directory entries to obtain auth_set values. In this way, auth_set
may be treated as having only "basenames" of the entries. Only entries
having the given extension will be included in the results. If not
empty, the extension MUST include a leading ".". The default is the
empty string, which will result in direct comparisons, and no
extension-based filtering.
Returns:
(list): A list of directory entries matching the criteria. These will not
have any path info included; they will just be bare names.
Raises:
OSError: If there are errors accessing directory contents or stat()'ing
files
"""
results = []
if auth_set.auth_type == AuthSet.WHITE:
for value in auth_set.values:
filename = value + ext
try:
if st_mode_test:
s = os.stat(os.path.join(parent_dir, filename))
type_pass = st_mode_test(s.st_mode)
else:
type_pass = True
if type_pass:
results.append(filename)
except OSError as e:
if e.errno != errno.ENOENT:
raise
# else, file-not-found is ok, just skip
else: # auth_set is a blacklist
for entry in os.listdir(parent_dir):
if ext:
auth_name, this_ext = os.path.splitext(entry)
if this_ext != ext:
continue
else:
auth_name = entry
if auth_name in auth_set.values:
continue
try:
if st_mode_test:
s = os.stat(os.path.join(parent_dir, entry))
type_pass = st_mode_test(s.st_mode)
else:
type_pass = True
if type_pass:
results.append(entry)
except OSError as e:
if e.errno != errno.ENOENT:
raise
# else, file-not-found is ok, just skip
return results
def _check_object_from_file(query, filepath, allow_custom, version, encoding):
"""
Read a STIX object from the given file, and check it against the given
filters.
Args:
query: Iterable of filters
filepath (str): Path to file to read
allow_custom (bool): Whether to allow custom properties as well unknown
custom objects.
version (str): If present, it forces the parser to use the version
provided. Otherwise, the library will make the best effort based
on checking the "spec_version" property.
encoding (str): The encoding to use when reading a file from the
filesystem.
Returns:
The (parsed) STIX object, if the object passes the filters. If
not, None is returned.
Raises:
TypeError: If the file had invalid JSON
IOError: If there are problems opening/reading the file
stix2.exceptions.STIXError: If there were problems creating a STIX
object from the JSON
"""
try:
with io.open(filepath, "r", encoding=encoding) as f:
stix_json = json.load(f)
except ValueError: # not a JSON file
raise TypeError(
"STIX JSON object at '{0}' could either not be parsed "
"to JSON or was not valid STIX JSON".format(filepath),
)
stix_obj = parse(stix_json, allow_custom, version)
if stix_obj["type"] == "bundle":
stix_obj = stix_obj["objects"][0]
# check against other filters, add if match
result = next(apply_common_filters([stix_obj], query), None)
return result
def _is_versioned_type_dir(type_path, type_name):
"""
Try to detect whether the given directory is for a versioned type of STIX
object. This is done by looking for a directory whose name is a STIX ID
of the appropriate type. If found, treat this type as versioned. This
doesn't work when a versioned type directory is empty (it will be
mis-classified as unversioned), but this detection is only necessary when
reading/querying data. If a directory is empty, you'll get no results
either way.
Args:
type_path: A path to a directory containing one type of STIX object.
type_name: The STIX type name.
Returns:
True if the directory looks like it contains versioned objects; False
if not.
Raises:
OSError: If there are errors accessing directory contents or stat()'ing
files
"""
id_regex = re.compile(
r"^" + re.escape(type_name) +
r"--[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}"
r"-[0-9a-f]{12}$",
re.I,
)
for entry in os.listdir(type_path):
s = os.stat(os.path.join(type_path, entry))
if stat.S_ISDIR(s.st_mode) and id_regex.match(entry):
is_versioned = True
break
else:
is_versioned = False
return is_versioned
def _search_versioned(query, type_path, auth_ids, allow_custom, version, encoding):
"""
Searches the given directory, which contains data for STIX objects of a
particular versioned type, and return any which match the query.
Args:
query: The query to match against
type_path: The directory with type-specific STIX object files
auth_ids: Search optimization based on object ID
allow_custom (bool): Whether to allow custom properties as well unknown
custom objects.
version (str): If present, it forces the parser to use the version
provided. Otherwise, the library will make the best effort based
on checking the "spec_version" property.
encoding (str): The encoding to use when reading a file from the
filesystem.
Returns:
A list of all matching objects
Raises:
stix2.exceptions.STIXError: If any objects had invalid content
TypeError: If any objects had invalid content
IOError: If there were any problems opening/reading files
OSError: If there were any problems opening/reading files
"""
results = []
id_dirs = _get_matching_dir_entries(
type_path, auth_ids,
stat.S_ISDIR,
)
for id_dir in id_dirs:
id_path = os.path.join(type_path, id_dir)
# This leverages a more sophisticated function to do a simple thing:
# get all the JSON files from a directory. I guess it does give us
# file type checking, ensuring we only get regular files.
version_files = _get_matching_dir_entries(
id_path, _AUTHSET_ANY,
stat.S_ISREG, ".json",
)
for version_file in version_files:
version_path = os.path.join(id_path, version_file)
try:
stix_obj = _check_object_from_file(
query, version_path,
allow_custom, version,
encoding,
)
if stix_obj:
results.append(stix_obj)
except IOError as e:
if e.errno != errno.ENOENT:
raise
# else, file-not-found is ok, just skip
# For backward-compatibility, also search for plain files named after
# object IDs, in the type directory.
backcompat_results = _search_unversioned(
query, type_path, auth_ids, allow_custom, version, encoding,
)
results.extend(backcompat_results)
return results
def _search_unversioned(
query, type_path, auth_ids, allow_custom, version, encoding,
):
"""
Searches the given directory, which contains unversioned data, and return
any objects which match the query.
Args:
query: The query to match against
type_path: The directory with STIX files of unversioned type
auth_ids: Search optimization based on object ID
allow_custom (bool): Whether to allow custom properties as well unknown
custom objects.
version (str): If present, it forces the parser to use the version
provided. Otherwise, the library will make the best effort based
on checking the "spec_version" property.
encoding (str): The encoding to use when reading a file from the
filesystem.
Returns:
A list of all matching objects
Raises:
stix2.exceptions.STIXError: If any objects had invalid content
TypeError: If any objects had invalid content
IOError: If there were any problems opening/reading files
OSError: If there were any problems opening/reading files
"""
results = []
id_files = _get_matching_dir_entries(
type_path, auth_ids, stat.S_ISREG,
".json",
)
for id_file in id_files:
id_path = os.path.join(type_path, id_file)
try:
stix_obj = _check_object_from_file(
query, id_path, allow_custom,
version, encoding,
)
if stix_obj:
results.append(stix_obj)
except IOError as e:
if e.errno != errno.ENOENT:
raise
# else, file-not-found is ok, just skip
return results
class FileSystemStore(DataStoreMixin):
"""Interface to a file directory of STIX objects.
FileSystemStore is a wrapper around a paired FileSystemSink
and FileSystemSource.
Args:
stix_dir (str): path to directory of STIX objects
allow_custom (bool): whether to allow custom STIX content to be
pushed/retrieved. Defaults to True for FileSystemSource side
(retrieving data) and False for FileSystemSink
side(pushing data). However, when parameter is supplied, it
will be applied to both FileSystemSource and FileSystemSink.
bundlify (bool): whether to wrap objects in bundles when saving
them. Default: False.
encoding (str): The encoding to use when reading a file from the
filesystem.
Attributes:
source (FileSystemSource): FileSystemSource
sink (FileSystemSink): FileSystemSink
"""
def __init__(self, stix_dir, allow_custom=None, bundlify=False, encoding='utf-8'):
if allow_custom is None:
allow_custom_source = True
allow_custom_sink = False
else:
allow_custom_sink = allow_custom_source = allow_custom
super(FileSystemStore, self).__init__(
source=FileSystemSource(stix_dir=stix_dir, allow_custom=allow_custom_source, encoding=encoding),
sink=FileSystemSink(stix_dir=stix_dir, allow_custom=allow_custom_sink, bundlify=bundlify),
)
class FileSystemSink(DataSink):
"""Interface for adding/pushing STIX objects to file directory of STIX
objects.
Can be paired with a FileSystemSource, together as the two
components of a FileSystemStore.
Args:
stix_dir (str): path to directory of STIX objects.
allow_custom (bool): Whether to allow custom STIX content to be
added to the FileSystemSource. Default: False
bundlify (bool): Whether to wrap objects in bundles when saving them.
Default: False.
"""
def __init__(self, stix_dir, allow_custom=False, bundlify=False):
super(FileSystemSink, self).__init__()
self._stix_dir = os.path.abspath(stix_dir)
self.allow_custom = allow_custom
self.bundlify = bundlify
if not os.path.exists(self._stix_dir):
raise ValueError("directory path for STIX data does not exist")
@property
def stix_dir(self):
return self._stix_dir
def _check_path_and_write(self, stix_obj, encoding='utf-8'):
"""Write the given STIX object to a file in the STIX file directory.
"""
type_dir = os.path.join(self._stix_dir, stix_obj["type"])
# All versioned objects should have a "modified" property.
if "modified" in stix_obj:
filename = _timestamp2filename(stix_obj["modified"])
obj_dir = os.path.join(type_dir, stix_obj["id"])
else:
filename = stix_obj["id"]
obj_dir = type_dir
file_path = os.path.join(obj_dir, filename + ".json")
if not os.path.exists(obj_dir):
os.makedirs(obj_dir)
if self.bundlify:
if 'spec_version' in stix_obj:
# Assuming future specs will allow multiple SDO/SROs
# versions in a single bundle we won't need to check this
# and just use the latest supported Bundle version.
stix_obj = v21.Bundle(stix_obj, allow_custom=self.allow_custom)
else:
stix_obj = v20.Bundle(stix_obj, allow_custom=self.allow_custom)
if os.path.isfile(file_path):
raise DataSourceError("Attempted to overwrite file (!) at: {}".format(file_path))
with io.open(file_path, mode='w', encoding=encoding) as f:
fp_serialize(stix_obj, f, pretty=True, encoding=encoding, ensure_ascii=False)
def add(self, stix_data=None, version=None):
"""Add STIX objects to file directory.
Args:
stix_data (STIX object OR dict OR str OR list): valid STIX 2.0 content
in a STIX object (or list of), dict (or list of), or a STIX 2.0
json encoded string.
version (str): If present, it forces the parser to use the version
provided. Otherwise, the library will make the best effort based
on checking the "spec_version" property.
Note:
``stix_data`` can be a Bundle object, but each object in it will be
saved separately; you will be able to retrieve any of the objects
the Bundle contained, but not the Bundle itself.
"""
if isinstance(stix_data, (v20.Bundle, v21.Bundle)):
# recursively add individual STIX objects
for stix_obj in stix_data.get("objects", []):
self.add(stix_obj, version=version)
elif isinstance(stix_data, _STIXBase):
# adding python STIX object
self._check_path_and_write(stix_data)
elif isinstance(stix_data, (str, dict)):
parsed_data = parse(stix_data, allow_custom=self.allow_custom, version=version)
if isinstance(parsed_data, _STIXBase):
self.add(parsed_data, version=version)
else:
# custom unregistered object type
self._check_path_and_write(parsed_data)
elif isinstance(stix_data, list):
# recursively add individual STIX objects
for stix_obj in stix_data:
self.add(stix_obj)
else:
raise TypeError(
"stix_data must be a STIX object (or list of), "
"JSON formatted STIX (or list of), "
"or a JSON formatted STIX bundle",
)
class FileSystemSource(DataSource):
"""Interface for searching/retrieving STIX objects from a STIX object file
directory.
Can be paired with a FileSystemSink, together as the two
components of a FileSystemStore.
Args:
stix_dir (str): path to directory of STIX objects
allow_custom (bool): Whether to allow custom STIX content to be
added to the FileSystemSink. Default: True
encoding (str): The encoding to use when reading a file from the
filesystem.
"""
def __init__(self, stix_dir, allow_custom=True, encoding='utf-8'):
super(FileSystemSource, self).__init__()
self._stix_dir = os.path.abspath(stix_dir)
self.allow_custom = allow_custom
self.encoding = encoding
if not os.path.exists(self._stix_dir):
raise ValueError("directory path for STIX data does not exist: %s" % self._stix_dir)
@property
def stix_dir(self):
return self._stix_dir
def get(self, stix_id, version=None, _composite_filters=None):
"""Retrieve STIX object from file directory via STIX ID.
Args:
stix_id (str): The STIX ID of the STIX object to be retrieved.
_composite_filters (FilterSet): collection of filters passed from the parent
CompositeDataSource, not user supplied
version (str): If present, it forces the parser to use the version
provided. Otherwise, the library will make the best effort based
on checking the "spec_version" property.
Returns:
(STIX object): STIX object that has the supplied STIX ID.
The STIX object is loaded from its json file, parsed into
a python STIX object and then returned
"""
all_data = self.all_versions(stix_id, version=version, _composite_filters=_composite_filters)
if all_data:
# Simple check for a versioned STIX type: see if the objects have a
# "modified" property. (Need only check one, since they are all of
# the same type.)
is_versioned = "modified" in all_data[0]
if is_versioned:
stix_obj = sorted(all_data, key=lambda k: k['modified'])[-1]
else:
stix_obj = all_data[0]
else:
stix_obj = None
return stix_obj
def all_versions(self, stix_id, version=None, _composite_filters=None):
"""Retrieve STIX object from file directory via STIX ID, all versions.
Note: Since FileSystem sources/sinks don't handle multiple versions
of a STIX object, this operation is unnecessary. Pass call to get().
Args:
stix_id (str): The STIX ID of the STIX objects to be retrieved.
_composite_filters (FilterSet): collection of filters passed from
the parent CompositeDataSource, not user supplied
version (str): If present, it forces the parser to use the version
provided. Otherwise, the library will make the best effort based
on checking the "spec_version" property.
Returns:
(list): of STIX objects that has the supplied STIX ID.
The STIX objects are loaded from their json files, parsed into
a python STIX objects and then returned
"""
query = [Filter("id", "=", stix_id)]
return self.query(query, version=version, _composite_filters=_composite_filters)
def query(self, query=None, version=None, _composite_filters=None):
"""Search and retrieve STIX objects based on the complete query.
A "complete query" includes the filters from the query, the filters
attached to this FileSystemSource, and any filters passed from a
CompositeDataSource (i.e. _composite_filters).
Args:
query (list): list of filters to search on
_composite_filters (FilterSet): collection of filters passed from
the CompositeDataSource, not user supplied
version (str): If present, it forces the parser to use the version
provided. Otherwise, the library will make the best effort based
on checking the "spec_version" property.
Returns:
(list): list of STIX objects that matches the supplied
query. The STIX objects are loaded from their json files,
parsed into a python STIX objects and then returned.
"""
all_data = []
query = FilterSet(query)
# combine all query filters
if self.filters:
query.add(self.filters)
if _composite_filters:
query.add(_composite_filters)
auth_types, auth_ids = _find_search_optimizations(query)
type_dirs = _get_matching_dir_entries(
self._stix_dir, auth_types,
stat.S_ISDIR,
)
for type_dir in type_dirs:
type_path = os.path.join(self._stix_dir, type_dir)
type_is_versioned = _is_versioned_type_dir(type_path, type_dir)
if type_is_versioned:
type_results = _search_versioned(
query, type_path, auth_ids,
self.allow_custom, version,
self.encoding,
)
else:
type_results = _search_unversioned(
query, type_path, auth_ids,
self.allow_custom, version,
self.encoding,
)
all_data.extend(type_results)
return all_data
| oasis-open/cti-python-stix2 | stix2/datastore/filesystem.py | Python | bsd-3-clause | 28,607 | 0.000699 |
# -*- coding: utf-8 -*-
##
## This file is part of Harvesting Kit.
## Copyright (C) 2014 CERN.
##
## Harvesting Kit is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Harvesting Kit is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Harvesting Kit; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
from __future__ import print_function
import sys
from datetime import datetime
from harvestingkit.utils import (fix_journal_name,
collapse_initials,
record_add_field)
from harvestingkit.minidom_utils import (get_value_in_tag,
xml_to_text,
get_attribute_in_tag,
get_inner_xml)
from datetime import date
class JatsPackage(object):
def __init__(self, journal_mappings={}):
self.journal_mappings = journal_mappings
def _get_journal(self):
try:
title = get_value_in_tag(self.document, 'abbrev-journal-title')
if not title:
title = get_value_in_tag(self.document, 'journal-title')
return title.strip()
except Exception:
print("Can't find journal-title", file=sys.stderr)
return ''
def _get_abstract(self):
for tag in self.document.getElementsByTagName('abstract'):
return get_inner_xml(tag)
def _get_title(self):
try:
notes = []
for tag in self.document.getElementsByTagName('article-title'):
for note in tag.getElementsByTagName('xref'):
if note.getAttribute('ref-type') == 'fn':
tag.removeChild(note)
notes.append(note.getAttribute('rid'))
return get_inner_xml(tag), get_value_in_tag(self.document, 'subtitle'), notes
except Exception:
print("Can't find title", file=sys.stderr)
return '', '', ''
def _get_doi(self):
try:
for tag in self.document.getElementsByTagName('article-id'):
if tag.getAttribute('pub-id-type') == 'doi':
return tag.firstChild.data
except Exception:
print("Can't find doi", file=sys.stderr)
return ''
def _get_affiliations(self):
affiliations = {}
for tag in self.document.getElementsByTagName('aff'):
aid = tag.getAttribute('id')
affiliation = xml_to_text(tag)
if affiliation:
#removes the label
try:
int(affiliation.split()[0])
affiliation = ' '.join(affiliation.split()[1:])
except ValueError:
pass
affiliations[aid] = affiliation
return affiliations
def _get_author_emails(self):
author_emails = {}
for tag in self.document.getElementsByTagName('author-notes'):
email_elements = tag.getElementsByTagName('corresp')
email_elements += tag.getElementsByTagName('fn')
for tg in email_elements:
nid = tg.getAttribute('id')
email = xml_to_text(tg)
email = email.replace(';', '')
#removes the label
if email.split() > 1:
emails = email.split()[1:]
valid_emails = []
for email in emails:
if '@' in email and '.' in email:
valid_emails.append(email)
author_emails[nid] = valid_emails
return author_emails
def _get_authors(self):
authors = []
for contrib in self.document.getElementsByTagName('contrib'):
# Springer puts colaborations in additional "contrib" tag so to
# avoid having fake author with all affiliations we skip "contrib"
# tag with "contrib" subtags.
if contrib.getElementsByTagName('contrib'):
continue
if contrib.getAttribute('contrib-type') == 'author':
surname = get_value_in_tag(contrib, 'surname')
given_names = get_value_in_tag(contrib, 'given-names')
given_names = collapse_initials(given_names)
name = '%s, %s' % (surname, given_names)
affiliations = []
corresp = []
for tag in contrib.getElementsByTagName('xref'):
if tag.getAttribute('ref-type') == 'aff':
for rid in tag.getAttribute('rid').split():
if rid.lower().startswith('a'):
affiliations.append(rid)
elif rid.lower().startswith('n'):
corresp.append(rid)
elif tag.getAttribute('ref-type') == 'corresp' or\
tag.getAttribute('ref-type') == 'author-notes':
for rid in tag.getAttribute('rid').split():
corresp.append(rid)
authors.append((name, affiliations, corresp))
return authors
def _get_license(self):
license = ''
license_type = ''
license_url = ''
for tag in self.document.getElementsByTagName('license'):
license = get_value_in_tag(tag, 'ext-link')
license_type = tag.getAttribute('license-type')
license_url = get_attribute_in_tag(tag, 'ext-link', 'xlink:href')
if license_url:
license_url = license_url[0]
return license, license_type, license_url
def _get_page_count(self):
try:
return get_attribute_in_tag(self.document, 'page-count', 'count')[0]
except IndexError:
print("Can't find page count", file=sys.stderr)
return ''
def _get_copyright(self):
try:
copyright_holder = get_value_in_tag(self.document, 'copyright-holder')
copyright_year = get_value_in_tag(self.document, 'copyright-year')
copyright_statement = get_value_in_tag(self.document, 'copyright-statement')
return copyright_holder, copyright_year, copyright_statement
except Exception:
print("Can't find copyright", file=sys.stderr)
return '', '', ''
def _get_pacscodes(self):
pacscodes = []
for tag in self.document.getElementsByTagName('kwd-group'):
if tag.getAttribute('kwd-group-type') == 'pacs':
for code in tag.getElementsByTagName('kwd'):
pacscodes.append(xml_to_text(code))
return pacscodes
def _get_date(self):
final = ''
epub_date = ''
ppub_date = ''
for dateTag in self.document.getElementsByTagName('pub-date'):
if dateTag.getAttribute('pub-type') == 'final':
try:
day = int(get_value_in_tag(dateTag, 'day'))
month = int(get_value_in_tag(dateTag, 'month'))
year = int(get_value_in_tag(dateTag, 'year'))
final = str(date(year, month, day))
except ValueError:
pass
if dateTag.getAttribute('pub-type') == 'epub':
try:
day = int(get_value_in_tag(dateTag, 'day'))
month = int(get_value_in_tag(dateTag, 'month'))
year = int(get_value_in_tag(dateTag, 'year'))
epub_date = str(date(year, month, day))
except ValueError:
epub_date = dateTag.getAttribute('iso-8601-date')
elif dateTag.getAttribute('pub-type') == 'ppub':
try:
day = int(get_value_in_tag(dateTag, 'day'))
month = int(get_value_in_tag(dateTag, 'month'))
year = int(get_value_in_tag(dateTag, 'year'))
ppub_date = str(date(year, month, day))
except ValueError:
ppub_date = dateTag.getAttribute('iso-8601-date')
if final:
return final
elif epub_date:
return epub_date
elif ppub_date:
return ppub_date
else:
print("Can't find publication date", file=sys.stderr)
return datetime.now().strftime("%Y-%m-%d")
def _get_publisher(self):
try:
return get_value_in_tag(self.document, 'publisher')
except Exception:
print("Can't find publisher", file=sys.stderr)
return ''
def _get_subject(self):
subjects = []
for tag in self.document.getElementsByTagName('subj-group'):
if tag.getAttribute('subj-group-type') == 'toc-minor' or \
tag.getAttribute('subj-group-type') == 'section':
for subject in tag.getElementsByTagName('subject'):
subjects.append(xml_to_text(subject))
return ', '.join(subjects)
def _get_publication_information(self):
journal = self._get_journal()
date = self._get_date()
doi = self._get_doi()
issue = get_value_in_tag(self.document, 'issue')
journal, volume = fix_journal_name(journal, self.journal_mappings)
volume += get_value_in_tag(self.document, 'volume')
page = get_value_in_tag(self.document, 'elocation-id')
fpage = get_value_in_tag(self.document, 'fpage')
lpage = get_value_in_tag(self.document, 'lpage')
year = date[:4]
return (journal, volume, issue, year, date, doi, page, fpage, lpage)
def _get_keywords(self):
keywords = []
for tag in self.document.getElementsByTagName('kwd-group'):
if tag.getAttribute('kwd-group-type') != 'pacs':
for kwd in tag.getElementsByTagName('kwd'):
keywords.append(xml_to_text(kwd))
return keywords
def _add_authors(self, rec):
authors = self._get_authors()
affiliations = self._get_affiliations()
author_emails = self._get_author_emails()
first_author = True
for author in authors:
subfields = [('a', author[0])]
if author[1]:
for aff in author[1]:
subfields.append(('v', affiliations[aff]))
if author[2]:
for note in author[2]:
for email in author_emails[note]:
if email:
subfields.append(('m', email))
if first_author:
record_add_field(rec, '100', subfields=subfields)
first_author = False
else:
record_add_field(rec, '700', subfields=subfields)
def _get_article_type(self):
article_type = get_attribute_in_tag(self.document, 'article', 'article-type')
if article_type:
article_type = article_type[0]
return article_type
| kaplun/harvesting-kit | harvestingkit/jats_package.py | Python | gpl-2.0 | 11,580 | 0.001813 |
from __future__ import absolute_import
from typing import Any, Iterable, Dict, Tuple, Callable, Text, Mapping
import requests
import json
import sys
import inspect
import logging
from six.moves import urllib
from functools import reduce
from requests import Response
from django.utils.translation import ugettext as _
from zerver.models import Realm, UserProfile, get_realm_by_email_domain, get_user_profile_by_id, get_client
from zerver.lib.actions import check_send_message
from zerver.lib.queue import queue_json_publish
from zerver.lib.validator import check_dict, check_string
from zerver.decorator import JsonableError
MAX_REQUEST_RETRIES = 3
class OutgoingWebhookServiceInterface(object):
def __init__(self, base_url, token, user_profile, service_name):
# type: (Text, Text, UserProfile, Text) -> None
self.base_url = base_url # type: Text
self.token = token # type: Text
self.user_profile = user_profile # type: Text
self.service_name = service_name # type: Text
# Given an event that triggers an outgoing webhook operation, returns the REST
# operation that should be performed, together with the body of the request.
#
# The input format can vary depending on the type of webhook service.
# The return value should be a tuple (rest_operation, request_data), where:
# rest_operation is a dictionary containing atleast the following keys: method, relative_url_path and
# request_kwargs. It provides rest operation related info.
# request_data is a dictionary whose format can vary depending on the type of webhook service.
def process_event(self, event):
# type: (Dict[str, Any]) -> Tuple[Dict[str ,Any], Dict[str, Any]]
raise NotImplementedError()
# Given a successful response to the outgoing webhook REST operation, returns the message
# that should be sent back to the user.
#
# The response will be the response object obtained from REST operation.
# The event will be the same as the input to process_command.
# The returned message will be a dictionary which should have "response_message" as key and response message to
# be sent to user as value.
def process_success(self, response, event):
# type: (Response, Dict[Text, Any]) -> Dict[str, Any]
raise NotImplementedError()
# Given a failed outgoing webhook REST operation, returns the message that should be sent back to the user.
#
# The response will be the response object obtained from REST operation.
# The event will be the same as the input to process_command.
# The returned message will be a dictionary which should have "response_message" as key and response message to
# be sent to user as value.
def process_failure(self, response, event):
# type: (Response, Dict[Text, Any]) -> Dict[str, Any]
raise NotImplementedError()
def send_response_message(bot_id, message, response_message_content):
# type: (str, Dict[str, Any], Text) -> None
recipient_type_name = message['type']
bot_user = get_user_profile_by_id(bot_id)
realm = get_realm_by_email_domain(message['sender_email'])
if recipient_type_name == 'stream':
recipients = [message['display_recipient']]
check_send_message(bot_user, get_client("OutgoingWebhookResponse"), recipient_type_name, recipients,
message['subject'], response_message_content, realm, forwarder_user_profile=bot_user)
else:
# Private message; only send if the bot is there in the recipients
recipients = [recipient['email'] for recipient in message['display_recipient']]
if bot_user.email in recipients:
check_send_message(bot_user, get_client("OutgoingWebhookResponse"), recipient_type_name, recipients,
message['subject'], response_message_content, realm, forwarder_user_profile=bot_user)
def succeed_with_message(event, success_message):
# type: (Dict[str, Any], Text) -> None
success_message = "Success! " + success_message
send_response_message(event['user_profile_id'], event['message'], success_message)
def fail_with_message(event, failure_message):
# type: (Dict[str, Any], Text) -> None
failure_message = "Failure! " + failure_message
send_response_message(event['user_profile_id'], event['message'], failure_message)
def request_retry(event, failure_message):
# type: (Dict[str, Any], Text) -> None
event['failed_tries'] += 1
if event['failed_tries'] > MAX_REQUEST_RETRIES:
bot_user = get_user_profile_by_id(event['user_profile_id'])
failure_message = "Maximum retries exceeded! " + failure_message
fail_with_message(event, failure_message)
logging.warning("Maximum retries exceeded for trigger:%s event:%s" % (bot_user.email, event['command']))
else:
queue_json_publish("outgoing_webhooks", event, lambda x: None)
def do_rest_call(rest_operation, request_data, event, service_handler, timeout=None):
# type: (Dict[str, Any], Dict[str, Any], Dict[str, Any], Any, Any) -> None
rest_operation_validator = check_dict([
('method', check_string),
('relative_url_path', check_string),
('request_kwargs', check_dict([])),
('base_url', check_string),
])
error = rest_operation_validator('rest_operation', rest_operation)
if error:
raise JsonableError(error)
http_method = rest_operation['method']
final_url = urllib.parse.urljoin(rest_operation['base_url'], rest_operation['relative_url_path'])
request_kwargs = rest_operation['request_kwargs']
request_kwargs['timeout'] = timeout
try:
response = requests.request(http_method, final_url, data=json.dumps(request_data), **request_kwargs)
if str(response.status_code).startswith('2'):
response_data = service_handler.process_success(response, event)
succeed_with_message(event, response_data["response_message"])
# On 50x errors, try retry
elif str(response.status_code).startswith('5'):
request_retry(event, "unable to connect with the third party.")
else:
response_data = service_handler.process_failure(response, event)
fail_with_message(event, response_data["response_message"])
except requests.exceptions.Timeout:
logging.info("Trigger event %s on %s timed out. Retrying" % (event["command"], event['service_name']))
request_retry(event, 'unable to connect with the third party.')
except requests.exceptions.RequestException as e:
response_message = "An exception occured for message `%s`! See the logs for more information." % (event["command"],)
logging.exception("Outhook trigger failed:\n %s" % (e,))
fail_with_message(event, response_message)
| jrowan/zulip | zerver/lib/outgoing_webhook.py | Python | apache-2.0 | 6,862 | 0.00408 |
from django.contrib.auth import get_user, get_user_model
from django.contrib.auth.models import AnonymousUser, User
from django.core.exceptions import ImproperlyConfigured
from django.db import IntegrityError
from django.http import HttpRequest
from django.test import TestCase, override_settings
from django.utils import translation
from .models import CustomUser
class BasicTestCase(TestCase):
def test_user(self):
"Users can be created and can set their password"
u = User.objects.create_user('testuser', 'test@example.com', 'testpw')
self.assertTrue(u.has_usable_password())
self.assertFalse(u.check_password('bad'))
self.assertTrue(u.check_password('testpw'))
# Check we can manually set an unusable password
u.set_unusable_password()
u.save()
self.assertFalse(u.check_password('testpw'))
self.assertFalse(u.has_usable_password())
u.set_password('testpw')
self.assertTrue(u.check_password('testpw'))
u.set_password(None)
self.assertFalse(u.has_usable_password())
# Check username getter
self.assertEqual(u.get_username(), 'testuser')
# Check authentication/permissions
self.assertFalse(u.is_anonymous)
self.assertTrue(u.is_authenticated)
self.assertFalse(u.is_staff)
self.assertTrue(u.is_active)
self.assertFalse(u.is_superuser)
# Check API-based user creation with no password
u2 = User.objects.create_user('testuser2', 'test2@example.com')
self.assertFalse(u2.has_usable_password())
def test_unicode_username(self):
User.objects.create_user('jörg')
User.objects.create_user('Григорий')
# Two equivalent unicode normalized usernames should be duplicates
omega_username = 'iamtheΩ' # U+03A9 GREEK CAPITAL LETTER OMEGA
ohm_username = 'iamtheΩ' # U+2126 OHM SIGN
User.objects.create_user(ohm_username)
with self.assertRaises(IntegrityError):
User.objects.create_user(omega_username)
def test_user_no_email(self):
"Users can be created without an email"
u = User.objects.create_user('testuser1')
self.assertEqual(u.email, '')
u2 = User.objects.create_user('testuser2', email='')
self.assertEqual(u2.email, '')
u3 = User.objects.create_user('testuser3', email=None)
self.assertEqual(u3.email, '')
def test_anonymous_user(self):
"Check the properties of the anonymous user"
a = AnonymousUser()
self.assertIsNone(a.pk)
self.assertEqual(a.username, '')
self.assertEqual(a.get_username(), '')
self.assertTrue(a.is_anonymous)
self.assertFalse(a.is_authenticated)
self.assertFalse(a.is_staff)
self.assertFalse(a.is_active)
self.assertFalse(a.is_superuser)
self.assertEqual(a.groups.all().count(), 0)
self.assertEqual(a.user_permissions.all().count(), 0)
def test_superuser(self):
"Check the creation and properties of a superuser"
super = User.objects.create_superuser('super', 'super@example.com', 'super')
self.assertTrue(super.is_superuser)
self.assertTrue(super.is_active)
self.assertTrue(super.is_staff)
def test_get_user_model(self):
"The current user model can be retrieved"
self.assertEqual(get_user_model(), User)
@override_settings(AUTH_USER_MODEL='auth_tests.CustomUser')
def test_swappable_user(self):
"The current user model can be swapped out for another"
self.assertEqual(get_user_model(), CustomUser)
with self.assertRaises(AttributeError):
User.objects.all()
@override_settings(AUTH_USER_MODEL='badsetting')
def test_swappable_user_bad_setting(self):
"The alternate user setting must point to something in the format app.model"
msg = "AUTH_USER_MODEL must be of the form 'app_label.model_name'"
with self.assertRaisesMessage(ImproperlyConfigured, msg):
get_user_model()
@override_settings(AUTH_USER_MODEL='thismodel.doesntexist')
def test_swappable_user_nonexistent_model(self):
"The current user model must point to an installed model"
msg = (
"AUTH_USER_MODEL refers to model 'thismodel.doesntexist' "
"that has not been installed"
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
get_user_model()
def test_user_verbose_names_translatable(self):
"Default User model verbose names are translatable (#19945)"
with translation.override('en'):
self.assertEqual(User._meta.verbose_name, 'user')
self.assertEqual(User._meta.verbose_name_plural, 'users')
with translation.override('es'):
self.assertEqual(User._meta.verbose_name, 'usuario')
self.assertEqual(User._meta.verbose_name_plural, 'usuarios')
class TestGetUser(TestCase):
def test_get_user_anonymous(self):
request = HttpRequest()
request.session = self.client.session
user = get_user(request)
self.assertIsInstance(user, AnonymousUser)
def test_get_user(self):
created_user = User.objects.create_user('testuser', 'test@example.com', 'testpw')
self.client.login(username='testuser', password='testpw')
request = HttpRequest()
request.session = self.client.session
user = get_user(request)
self.assertIsInstance(user, User)
self.assertEqual(user.username, created_user.username)
| edmorley/django | tests/auth_tests/test_basic.py | Python | bsd-3-clause | 5,627 | 0.000534 |
from rest_framework import serializers
from .models import User, Activity, Period
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('url', 'username', 'email')
extra_kwargs = {
'url': {'view_name': 'timeperiod:user-detail'},
}
class ActivitySerializer(serializers.HyperlinkedModelSerializer):
user = serializers.HiddenField(default=serializers.CurrentUserDefault())
class Meta:
model = Activity
fields = ('url', 'user', 'name', 'total', 'running')
extra_kwargs = {
'url': {'view_name': 'timeperiod:activity-detail'},
'user': {'view_name': 'timeperiod:user-detail'},
}
class PeriodSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Period
fields = ('url', 'activity', 'start', 'end', 'valid')
extra_kwargs = {
'url': {'view_name': 'timeperiod:period-detail'},
'activity': {'view_name': 'timeperiod:activity-detail'},
}
| maurob/timeperiod | serializers.py | Python | mit | 1,063 | 0.001881 |
class Solution:
def postorderTraversal(self, root):
"""
:type root: TreeNode
:rtype: List[int]
"""
if root is None:
return []
else:
return list(self.postorderTraversalGen(root))
def postorderTraversalGen(self, node):
if node.left is not None:
for other in self.postorderTraversalGen(node.left):
yield other
if node.right is not None:
for other in self.postorderTraversalGen(node.right):
yield other
yield node.val | xiao0720/leetcode | 145/Solution.py | Python | mit | 597 | 0.0067 |
from formatter.AbstractFormatter import AbstractFormatter
import html
import os
class IndexElement:
anchor = 0
def __init__(self, content):
self.content = content
self.a = IndexElement.anchor
self.node_list = []
IndexElement.anchor += 1
def addNode(self, e):
self.node_list.append(e)
def getHtmlIndex(index):
html = "<li><a href=\"#%d\">%s</a></li>" % (index.a, index.content)
if len(index.node_list) > 0:
html += "<ol>"
for node in index.node_list:
html += IndexElement.getHtmlIndex(node)
html += "</ol>"
return html
class ResultElement:
def __init__(self):
self.content = ""
class HtmlFormatter(AbstractFormatter):
def make_file(self):
of = open(self.output_file, 'w', encoding="utf-8")
of.write('<!DOCTYPE html><html><head>')
of.write('<meta charset="utf-8">')
of.write('<title>%s</title>' % os.path.basename(self.result_file))
of.write('</head><body>')
# Main data
of.write("<table>")
of.write("<tr><td><b>Author:</b></td><td>%s</td></tr>" % self.result_data['author'])
of.write("<tr><td><b>Config file:</b></td><td>%s</td></tr>" % self.result_data['config']['file'])
of.write("<tr><td><b>Config file SHA256:</b></td><td>%s</td></tr>" % self.result_data['config']['sha256'])
of.write("<tr><td><b>Evidence folder path:</b></td><td>%s</td></tr>" % self.result_data['evidence_folder'])
of.write("</table>")
of.write("<hr>")
result_element = ResultElement()
result_element.content = "<h1>Result</h1>"
index_list = []
index_content = "<h1>Index</h1>"
#of.write("<h2>Result</h2>")
for mc in self.result_data['result']:
index_elem = IndexElement(mc['module_chain_id'])
index_list.append(index_elem)
self.traverse_chain(result_element, index_elem, mc)
index_content += "<ol>"
for node in index_list:
index_content += IndexElement.getHtmlIndex(node)
index_content += "</ol>"
#result_element.content += "<hr />"
of.write(index_content)
of.write(result_element.content)
of.write('</body></html>')
of.close()
def traverse_chain(self, result: ResultElement, index: IndexElement, mc):
result.content += "<h2 id=\"%d\">%s</h2>" % (index.a, mc['module_chain_id'])
for mod in mc['modules']:
mod_id_index = IndexElement(mod['title'])
index.addNode(mod_id_index)
result.content += "<h3 id=\"%d\" style=\"background-color: #ccc;\">%s</h3>" % (mod_id_index.a, mod['title'])
result.content += "<table>"
result.content += "<tr><td><b>Module ID</b></td><td>%s</td></tr>" % mod['mod']
result.content += "<tr><td><b>File count</b></td><td>%s</td></tr>" % mod['files_count']
result.content += "</table>"
if len(mod['data']) > 0:
result.content += "<h4 id=\"%d\" style=\"background-color: #ccc;\">Collected data</h4>" % IndexElement.anchor
mod_id_index.addNode(IndexElement("Collected data"))
file_list = sorted(mod['data'].keys())
for file_name in file_list:
file_data = mod['data'][file_name]
result.content += "<b>%s</b>" % file_name
if len(file_data) > 0:
is_tuple = isinstance(file_data[0], tuple)
if not is_tuple:
result.content += "<ul>"
else:
result.content += "<table>"
for file_data_elem in file_data:
if is_tuple:
result.content += "<tr>"
result.content += "<td style=\"background-color: #ccc;\"><b>%s</b></td><td>%s</td>" % (file_data_elem[0], file_data_elem[1])
result.content += "</tr>"
else:
result.content += "<li>%s</li>" % file_data_elem
if not is_tuple:
result.content += "</ul>"
else:
result.content += "</table>"
try:
if len(mod['extract_data']) > 0:
result.content += "<h4 id=\"%d\">Extracted data</h4>" % IndexElement.anchor
mod_id_index.addNode(IndexElement("Extracted data"))
file_list = sorted(mod['extract_data'].keys())
for file_name in file_list:
file_data = mod['extract_data'][file_name]
table_views = sorted(file_data.keys())
result.content += "<b style=\"background-color: #ccc;\">%s</b><br />" % file_name
for table in table_views:
table_info = file_data[table]
result.content += "<b>%s</b>" % table
result.content += "<table style=\"white-space: nowrap;\">"
for col in table_info[0]:
result.content += "<th style=\"background-color: #ccc;\">%s</th>" % col
for row in table_info[1]:
result.content += "<tr>"
for col_data in row:
cell_data = col_data
if isinstance(col_data, bytes):
cell_data = col_data.decode('utf-8', 'ignore')
elif col_data == None:
cell_data = "NULL"
else:
cell_data = col_data
if isinstance(cell_data, str):
cell_data = html.escape(cell_data)
result.content += "<td style=\"min-width: 100px;\">%s</td>" % cell_data
result.content += "</tr>"
result.content += "</table>"
result.content += '<hr style="margin-bottom: 100px;" />'
except KeyError:
pass
sub_module_chain = None
try:
sub_module_chain = mod['module_chain']
except KeyError:
continue
if sub_module_chain:
result.content += '<hr />'
result.content += "<div style=\"padding-left: 5px; border-left: 3px; border-left-style: dotted; border-left-color: #ccc\""
self.traverse_chain(result, index, sub_module_chain)
result.content += "</div>"
| vdjagilev/desefu-export | formatter/html/HtmlFormatter.py | Python | mit | 7,031 | 0.003413 |
import importlib
from flask import render_template
import lib.es as es
def get(p):
# get data source definiton
query = 'name:{}'.format(p['nav'][3])
p['ds'] = es.list(p['host'], 'core_data', 'datasource', query)[0]
# load service
path = "web.modules.dataservice.services.{}".format(p['ds']['type'])
mod = importlib.import_module(path)
return mod.execute(p)
| unkyulee/elastic-cms | src/web/modules/dataservice/controllers/json.py | Python | mit | 392 | 0.005102 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# (c) The University of Strathclyde 2019
# Author: Leighton Pritchard
#
# Contact:
# leighton.pritchard@strath.ac.uk
#
# Leighton Pritchard,
# Strathclyde Institute of Pharmaceutical and Biomedical Sciences
# The University of Strathclyde
# Cathedral Street
# Glasgow
# G1 1XQ
# Scotland,
# UK
#
# The MIT License
#
# (c) The University of Strathclyde 2019
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""Code to implement Seaborn graphics output for ANI analyses."""
import warnings
import matplotlib # pylint: disable=C0411
import pandas as pd
import seaborn as sns
matplotlib.use("Agg")
import matplotlib.pyplot as plt # noqa: E402,E501 # pylint: disable=wrong-import-position,wrong-import-order,ungrouped-imports
# Add classes colorbar to Seaborn plot
def get_colorbar(dfr, classes):
"""Return a colorbar representing classes, for a Seaborn plot.
:param dfr:
:param classes:
The aim is to get a pd.Series for the passed dataframe columns,
in the form:
0 colour for class in col 0
1 colour for class in col 1
... colour for class in col ...
n colour for class in col n
"""
levels = sorted(list(set(classes.values())))
paldict = dict(
zip(
levels,
sns.cubehelix_palette(
len(levels), light=0.9, dark=0.1, reverse=True, start=1, rot=-2
),
)
)
lvl_pal = {cls: paldict[lvl] for (cls, lvl) in list(classes.items())}
# Have to use string conversion of the dataframe index, here
col_cb = pd.Series([str(_) for _ in dfr.index]).map(lvl_pal)
# The col_cb Series index now has to match the dfr.index, but
# we don't create the Series with this (and if we try, it
# fails) - so change it with this line
col_cb.index = dfr.index
return col_cb
# Add labels to the seaborn heatmap axes
def add_labels(fig, params):
"""Add labels to Seaborn heatmap axes, in-place.
:param fig:
:param params:
"""
if params.labels:
# If a label mapping is missing, use the key text as fall back
for _ in fig.ax_heatmap.get_yticklabels():
_.set_text(params.labels.get(_.get_text(), _.get_text()))
for _ in fig.ax_heatmap.get_xticklabels():
_.set_text(params.labels.get(_.get_text(), _.get_text()))
fig.ax_heatmap.set_xticklabels(fig.ax_heatmap.get_xticklabels(), rotation=90)
fig.ax_heatmap.set_yticklabels(fig.ax_heatmap.get_yticklabels(), rotation=0)
return fig
# Return a clustermap
def get_clustermap(dfr, params, title=None, annot=True):
"""Return a Seaborn clustermap for the passed dataframe.
:param dfr:
:param params:
:param title: str, plot title
:param annot: Boolean, add text for cell values?
"""
# If we do not catch warnings here, then we often get the following warning:
# ClusterWarning: scipy.cluster: The symmetric non-negative hollow
# observation matrix looks suspiciously like an uncondensed distance matrix
# The usual solution would be to convert the array with
# scipy.spatial.distance.squareform(), but this requires that all values in
# the main diagonal are zero, which is not the case for ANI.
# As we know this is a (1-distance) matrix, we could just set the diagonal
# to zero and fudge it, but this is not a good solution. Instead, we suppress
# the warning in a context manager for this function call only, because we
# know the warning is not relevant.
with warnings.catch_warnings():
warnings.filterwarnings(
"ignore",
message=(
"scipy.cluster: The symmetric non-negative "
"hollow observation matrix looks suspiciously like an "
"uncondensed distance matrix"
),
)
fig = sns.clustermap(
dfr,
cmap=params.cmap,
vmin=params.vmin,
vmax=params.vmax,
col_colors=params.colorbar,
row_colors=params.colorbar,
figsize=(params.figsize, params.figsize),
linewidths=params.linewidths,
annot=annot,
)
# add labels for each of the input genomes
add_labels(fig, params)
fig.cax.yaxis.set_label_position("left")
if title:
fig.cax.set_ylabel(title)
# Return clustermap
return fig
# Generate Seaborn heatmap output
def heatmap(dfr, outfilename=None, title=None, params=None):
"""Return seaborn heatmap with cluster dendrograms.
:param dfr: pandas DataFrame with relevant data
:param outfilename: path to output file (indicates output format)
:param title:
:param params:
"""
# Decide on figure layout size: a minimum size is required for
# aesthetics, and a maximum to avoid core dumps on rendering.
# If we hit the maximum size, we should modify font size.
maxfigsize = 120
calcfigsize = dfr.shape[0] * 1.1
figsize = min(max(8, calcfigsize), maxfigsize)
if figsize == maxfigsize:
scale = maxfigsize / calcfigsize
sns.set_context("notebook", font_scale=scale)
# Add a colorbar?
if params.classes is None:
col_cb = None
else:
col_cb = get_colorbar(dfr, params.classes)
# Add attributes to parameter object, and draw heatmap
params.colorbar = col_cb
params.figsize = figsize
params.linewidths = 0.25
fig = get_clustermap(dfr, params, title=title)
# Save to file
if outfilename:
fig.savefig(outfilename)
# Return clustermap
return fig
def distribution(dfr, outfilename, matname, title=None):
"""Return seaborn distribution plot for matrix.
:param drf: DataFrame with results matrix
:param outfilename: Path to output file for writing
:param matname: str, type of matrix being plotted
:param title: str, optional title
"""
fill = "#A6C8E0"
rug = "#2678B2"
fig, axes = plt.subplots(1, 2, figsize=(15, 5))
fig.suptitle(title)
sns.histplot(
dfr.values.flatten(),
ax=axes[0],
stat="count",
element="step",
color=fill,
edgecolor=fill,
)
axes[0].set_ylim(ymin=0)
sns.kdeplot(dfr.values.flatten(), ax=axes[1])
sns.rugplot(dfr.values.flatten(), ax=axes[1], color=rug)
# Modify axes after data is plotted
for _ in axes:
if matname == "sim_errors":
_.set_xlim(0, _.get_xlim()[1])
elif matname in ["hadamard", "coverage"]:
_.set_xlim(0, 1.01)
elif matname == "identity":
_.set_xlim(0.75, 1.01)
# Tidy figure
fig.tight_layout(rect=[0, 0.03, 1, 0.95])
if outfilename:
# For some reason seaborn gives us an AxesSubPlot with
# sns.distplot, rather than a Figure, so we need this hack
fig.savefig(outfilename)
return fig
def scatter(
dfr1,
dfr2,
outfilename=None,
matname1="identity",
matname2="coverage",
title=None,
params=None,
):
"""Return seaborn scatterplot.
:param dfr1: pandas DataFrame with x-axis data
:param dfr2: pandas DataFrame with y-axis data
:param outfilename: path to output file (indicates output format)
:param matname1: name of x-axis data
:param matname2: name of y-axis data
:param title: title for the plot
:param params: a list of parameters for plotting: [colormap, vmin, vmax]
"""
# Make an empty dataframe to collect the input data in
combined = pd.DataFrame()
# Add data
combined[matname1] = dfr1.values.flatten()
combined[matname2] = dfr2.values.flatten()
# Add lable information, if available
# if params.labels:
# hue = "labels"
# combined['labels'] = # add labels to dataframe; unsure of their configuration at this point
# else:
hue = None
# Create the plot
fig = sns.lmplot(
x=matname1,
y=matname2,
data=combined,
hue=hue,
fit_reg=False,
scatter_kws={"s": 2},
)
fig.set(xlabel=matname1.title(), ylabel=matname2.title())
plt.title(title)
# Save to file
if outfilename:
fig.savefig(outfilename)
# Return clustermap
return fig
| widdowquinn/pyani | pyani/pyani_graphics/sns/__init__.py | Python | mit | 9,274 | 0.000647 |
"""Handy XML processing utility functions.
Various XML processing utilities, using minidom, that are used in
various places throughout the code.
"""
"""
============================== License ========================================
Copyright (C) 2008, 2010-12 University of Edinburgh, Mark Granroth-Wilding
This file is part of The Jazz Parser.
The Jazz Parser is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
The Jazz Parser is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with The Jazz Parser. If not, see <http://www.gnu.org/licenses/>.
============================ End license ======================================
"""
__author__ = "Mark Granroth-Wilding <mark.granroth-wilding@ed.ac.uk>"
from xml.dom import minidom
class XmlReadError(Exception):
pass
def attrs_to_dict(attrs):
"""
Converts a minidom NamedNodeMap that represents the attributes
of a node into a dictionary. The keys are attribute names.
The values are the attributes' string values.
"""
return dict([(str(attr.name),attr.value) for attr in attrs.values()])
def remove_unwanted_elements(node_list):
"""
Minidom node lists include entries for carriage returns, for
some reason. This function removes these from a list.
"""
return [node for node in node_list \
if (node.nodeType != minidom.Node.TEXT_NODE) and \
(node.nodeType != minidom.Node.COMMENT_NODE)]
def get_single_element_by_tag_name(node, tag_name, optional=False):
"""
Returns an element that is a child of the given node and that
has the tag name given. This method is used where it is assumed
that one such tag exists.
If there is none, an exception is
raised. If there is more than one, the first is returned.
@return: the child of node with tag name tag_name
"""
from jazzparser.grammar import GrammarReadError
tags = node.getElementsByTagName(tag_name)
if len(tags) == 0:
if optional:
return None
else:
raise XmlReadError, "No %s tag found" % tag_name
return tags[0]
def require_attrs(node, attrs):
"""
Checks for the existence of the named attributes on the given
node and raises an exception if they're not there.
Returns a tuple of their values if they're all found.
"""
return tuple([require_attr(node, attr) for attr in attrs])
def require_attr(node, attr):
"""
Checks for the existence of the named attribute on the given
node and raises an exception if it's not there.
Returns its value if it is there.
"""
element = node.attributes.getNamedItem(attr)
if element is None:
raise XmlReadError, "required attribute '%s' was not found "\
"on %s node: %s" % (attr, node.nodeName, node.toxml())
return element.value
| markgw/jazzparser | src/jazzparser/utils/domxml.py | Python | gpl-3.0 | 3,282 | 0.012492 |
#!/usr/bin/env python3
class Solution:
def maxPower(self, s):
p, cnt, ret = '', 1, 0
for c in s:
if c == p:
cnt += 1
else:
ret, cnt = max(ret, cnt), 1
p = c
return max(ret, cnt)
str_list = [
'cc',
'leetcode',
'abbcccddddeeeeedcba',
'triplepillooooow',
'hooraaaaaaaay',
'tourist',
]
sol = Solution()
for s in str_list:
print(s, sol.maxPower(s))
| eroicaleo/LearningPython | interview/leet/1446_Consecutive_Characters.py | Python | mit | 471 | 0.002123 |
def get_filename_from_pathname(pathnames):
# if arg is a not a list, make it so
# https://stackoverflow.com/a/922800/861745
if not isinstance(pathnames, (list, tuple)):
pathnames = [pathnames]
for pathname in pathnames:
# print( pathname )
print( pathname.replace('\\','/').replace('}','/').split('/')[-1] )
def main():
str_pathname = r"path blah/ path \ this}file name from string.txt"
array_pathnames = [
r"",
r"file name from array.txt",
r"path blah/ path \ this}file name.txt",
r"path blah/ path \ this}file.txt",
r"\test/test\test.txt"
]
get_filename_from_pathname(str_pathname)
get_filename_from_pathname(array_pathnames)
# if called directly, then run this example:
if __name__ == '__main__':
main()
# https://forum.bigfix.com/t/get-filename-from-arbitrary-pathnames/34616
| jgstew/tools | Python/get_filename_from_pathname.py | Python | mit | 894 | 0.006711 |
#!/usr/local/bin/python
# Code Fights Digits Product Problem
def digitsProduct(product):
def get_single_dig_factors(product):
# Helper function to generate single-digit factors of product
n = product
factors = []
for i in range(9, 1, -1):
while n % i == 0 and n > 1:
factors.append(i)
n /= i
if n > 9:
# At least one factor is a two-digit prime number
return None
return sorted(factors)
if product == 0:
return 10
elif product < 10:
return product
factors = get_single_dig_factors(product)
if factors:
return int(''.join([str(i) for i in factors]))
else:
return -1
def main():
tests = [
[12, 26],
[19, -1],
[450, 2559],
[0, 10],
[13, -1],
[1, 1],
[243, 399],
[576, 889],
[360, 589],
[24, 38],
[120, 358],
[168, 378],
[192, 388],
[216, 389],
[600, 3558],
[33, -1],
[81, 99]
]
for t in tests:
res = digitsProduct(t[0])
ans = t[1]
if ans == res:
print("PASSED: digitsProduct({}) returned {}"
.format(t[0], res))
else:
print("FAILED: digitsProduct({}) returned {}, answer: {}"
.format(t[0], res, ans))
if __name__ == '__main__':
main()
| HKuz/Test_Code | CodeFights/digitsProduct.py | Python | mit | 1,457 | 0 |
#Made by Kerb
import sys
from com.l2scoria import Config
from com.l2scoria.gameserver.model.quest import State
from com.l2scoria.gameserver.model.quest import QuestState
from com.l2scoria.gameserver.model.quest.jython import QuestJython as JQuest
qn = "644_GraveRobberAnnihilation"
#Drop rate
DROP_CHANCE = 75
#Npc
KARUDA = 32017
#Items
ORC_GOODS = 8088
#Rewards
REWARDS = {
"1" : [1865 , 30], #Varnish
"2" : [1867 , 40], #Animal Skin
"3" : [1872 , 40], #Animal Bone
"4" : [1871 , 30], #Charcoal
"5" : [1870 , 30], #Coal
"6" : [1869 , 30], #Iron Ore
}
#Mobs
MOBS = [ 22003,22004,22005,22006,22008 ]
class Quest (JQuest) :
def onEvent (self,event,st) :
cond = st.getInt("cond")
htmltext = event
if event == "32017-03.htm" :
if st.getPlayer().getLevel() < 20 :
htmltext = "32017-02.htm"
st.exitQuest(1)
else :
st.set("cond","1")
st.setState(STARTED)
st.playSound("ItemSound.quest_accept")
elif event in REWARDS.keys() :
item, amount = REWARDS[event]
st.takeItems(ORC_GOODS,-1)
st.giveItems(item, amount)
st.playSound("ItemSound.quest_finish")
st.exitQuest(1)
return
return htmltext
def onTalk (self,npc,player):
htmltext = "<html><body>You are either not carrying out your quest or don't meet the criteria.</body></html>"
st = player.getQuestState(qn)
if st :
npcId = npc.getNpcId()
id = st.getState()
cond = st.getInt("cond")
if cond == 0 :
htmltext = "32017-01.htm"
elif cond == 1 :
htmltext = "32017-04.htm"
elif cond == 2 :
if st.getQuestItemsCount(ORC_GOODS) >= 120 :
htmltext = "32017-05.htm"
else :
htmltext = "32017-04.htm"
return htmltext
def onKill(self,npc,player,isPet):
partyMember = self.getRandomPartyMember(player,"1")
if not partyMember: return
st = partyMember.getQuestState(qn)
if st :
if st.getState() == STARTED :
count = st.getQuestItemsCount(ORC_GOODS)
if st.getInt("cond") == 1 and count < 120 :
chance = DROP_CHANCE * Config.RATE_DROP_QUEST
numItems, chance = divmod(chance,100)
if st.getRandom(100) < chance :
numItems += 1
if numItems :
if count + numItems >= 120 :
numItems = 120 - count
st.playSound("ItemSound.quest_middle")
st.set("cond","2")
else:
st.playSound("ItemSound.quest_itemget")
st.giveItems(ORC_GOODS,int(numItems))
return
QUEST = Quest(644, qn, "Grave Robber Annihilation")
CREATED = State('Start', QUEST)
STARTED = State('Started', QUEST)
QUEST.setInitialState(CREATED)
QUEST.addStartNpc(KARUDA)
QUEST.addTalkId(KARUDA)
for i in MOBS :
QUEST.addKillId(i)
STARTED.addQuestDrop(KARUDA,ORC_GOODS,1) | zenn1989/scoria-interlude | L2Jscoria-Game/data/scripts/quests/644_GraveRobberAnnihilation/__init__.py | Python | gpl-3.0 | 2,948 | 0.04749 |
from datetime import datetime
from channels.generic.websockets import JsonWebsocketConsumer
from channels import Group, Channel
from channels.message import Message
from ..models import Room
class ChatServer(JsonWebsocketConsumer):
# Set to True if you want them, else leave out
strict_ordering = False
slight_ordering = False
def connection_groups(self, **kwargs):
"""
Called to return the list of groups to automatically add/remove
this connection to/from.
"""
return kwargs.pop('slug')
def connect(self, message, **kwargs): # type: (Message, dict)
"""
Handles connecting to the websocket
:param message: The socket message
"""
slug = kwargs.pop('slug')
Group(slug).add(message.reply_channel)
self.message.reply_channel.send({"accept": True})
def receive(self, content, **kwargs): # type: (dict, dict)
"""
Handles receiving websocket messages
"""
# Re-introduce the kwargs into the content dict
content.update(kwargs)
content['reply_channel_name'] = self.message.reply_channel.name
# Unpack the message and send it to metronome.routing.command_routing list
Channel('chat.receive').send(content=content)
def disconnect(self, message, **kwargs): # type: (Message, dict)
"""
Handles disconnecting from a room
"""
slug = kwargs['slug']
Group(slug).discard(message.reply_channel)
# Handle a user-leave event
message.content['event'] = 'user-leave'
self.receive(message.content, **kwargs)
| k-pramod/channel.js | examples/chatter/chat/consumers/base.py | Python | mit | 1,649 | 0.001213 |
#!/usr/bin/env python
import messagebird
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--accessKey', help='access key for MessageBird API', type=str, required=True)
parser.add_argument('--webhookId', help='webhook that you want to read', type=str, required=True)
args = vars(parser.parse_args())
try:
client = messagebird.Client(args['accessKey'])
webhook = client.conversation_read_webhook(args['webhookId'])
# Print the object information.
print('The following information was returned as a Webhook object:')
print(webhook)
except messagebird.client.ErrorException as e:
print('An error occured while requesting a Webhook object:')
for error in e.errors:
print(' code : %d' % error.code)
print(' description : %s' % error.description)
print(' parameter : %s\n' % error.parameter)
| messagebird/python-rest-api | examples/conversation_read_webhook.py | Python | bsd-2-clause | 878 | 0.002278 |
#!/usr/bin/python2.4
#
# Copyright 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import sys
from django.db import connections
from ...boot import PROJECT_DIR
from ...db.base import DatabaseWrapper
from django.core.management.base import BaseCommand
from django.core.exceptions import ImproperlyConfigured
def start_dev_appserver(argv):
"""Starts the App Engine dev_appserver program for the Django project.
The appserver is run with default parameters. If you need to pass any special
parameters to the dev_appserver you will have to invoke it manually.
"""
from google.appengine.tools import dev_appserver_main
progname = argv[0]
args = []
# hack __main__ so --help in dev_appserver_main works OK.
sys.modules['__main__'] = dev_appserver_main
# Set bind ip/port if specified.
addr, port = None, '8000'
if len(argv) > 2:
if not argv[2].startswith('-'):
addrport = argv[2]
try:
addr, port = addrport.split(":")
except ValueError:
addr = addrport
else:
args.append(argv[2])
args.extend(argv[3:])
if addr:
args.extend(["--address", addr])
if port:
args.extend(["--port", port])
# Add email settings
from django.conf import settings
if '--smtp_host' not in args and '--enable_sendmail' not in args:
args.extend(['--smtp_host', settings.EMAIL_HOST,
'--smtp_port', str(settings.EMAIL_PORT),
'--smtp_user', settings.EMAIL_HOST_USER,
'--smtp_password', settings.EMAIL_HOST_PASSWORD])
# Pass the application specific datastore location to the server.
for name in connections:
connection = connections[name]
if isinstance(connection, DatabaseWrapper):
p = connection._get_paths()
if '--datastore_path' not in args:
args.extend(['--datastore_path', p[0]])
if '--blobstore_path' not in args:
args.extend(['--blobstore_path', p[1]])
if '--history_path' not in args:
args.extend(['--history_path', p[2]])
break
# Reset logging level to INFO as dev_appserver will spew tons of debug logs
logging.getLogger().setLevel(logging.INFO)
# Append the current working directory to the arguments.
dev_appserver_main.main([progname] + args + [PROJECT_DIR])
class Command(BaseCommand):
"""Overrides the default Django runserver command.
Instead of starting the default Django development server this command
fires up a copy of the full fledged App Engine dev_appserver that emulates
the live environment your application will be deployed to.
"""
help = 'Runs a copy of the App Engine development server.'
args = '[optional port number, or ipaddr:port]'
def run_from_argv(self, argv):
start_dev_appserver(argv)
| bdelliott/wordgame | web/djangoappengine/management/commands/runserver.py | Python | mit | 3,475 | 0.000576 |
#!/usr/bin/python
# *****************************************************************************
#
# Copyright (c) 2016, EPAM SYSTEMS INC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ******************************************************************************
from pymongo import MongoClient
import sys
import yaml
import argparse
path = "/etc/mongod.conf"
outfile = "/etc/mongo_params.yml"
parser = argparse.ArgumentParser()
parser.add_argument('--resource', type=str, default='')
parser.add_argument('--status', type=str, default='')
args = parser.parse_args()
def read_yml_conf(path, section, param):
try:
with open(path, 'r') as config_yml:
config = yaml.load(config_yml)
result = config[section][param]
return result
except:
print("File does not exist")
return ''
def update_resource_status(resource, status):
path = "/etc/mongod.conf"
mongo_passwd = "PASSWORD"
mongo_ip = read_yml_conf(path, 'net', 'bindIp')
mongo_port = read_yml_conf(path, 'net', 'port')
client = MongoClient(mongo_ip + ':' + str(mongo_port))
client = MongoClient("mongodb://admin:" + mongo_passwd + "@" + mongo_ip + ':' + str(mongo_port) + "/dlabdb")
client.dlabdb.statuses.save({"_id": resource, "value": status})
if __name__ == "__main__":
try:
update_resource_status(args.resource, args.status)
except:
print("Unable to update status for the resource {}".format(args.resource))
sys.exit(1) | epam/DLab | infrastructure-provisioning/src/ssn/scripts/resource_status.py | Python | apache-2.0 | 2,004 | 0.002495 |
# encoding: utf-8
"""
Miscellaneous functions, which are useful for handling bodies.
"""
from yade.wrapper import *
import utils,math,numpy
try:
from minieigen import *
except ImportError:
from miniEigen import *
#spheresPackDimensions==================================================
def spheresPackDimensions(idSpheres=[],mask=-1):
"""The function accepts the list of spheres id's or list of bodies and calculates max and min dimensions, geometrical center.
:param list idSpheres: list of spheres
:param int mask: :yref:`Body.mask` for the checked bodies
:return: dictionary with keys ``min`` (minimal dimension, Vector3), ``max`` (maximal dimension, Vector3), ``minId`` (minimal dimension sphere Id, Vector3), ``maxId`` (maximal dimension sphere Id, Vector3), ``center`` (central point of bounding box, Vector3), ``extends`` (sizes of bounding box, Vector3), ``volume`` (volume of spheres, Real), ``mass`` (mass of spheres, Real), ``number`` (number of spheres, int),
"""
idSpheresIter=[]
if (len(idSpheres)<1):
#check mask
ifSpherMask=[]
if (mask>-1): #The case, when only the mask was given, without list of ids
for i in O.bodies:
if ((i.mask&mask)<>0):
ifSpherMask.append(i.id)
if (len(ifSpherMask)<2):
raise RuntimeWarning("Not enough bodies to analyze with given mask")
else:
idSpheresIter=ifSpherMask
else:
raise RuntimeWarning("Only a list of particles with length > 1 can be analyzed")
else:
idSpheresIter=idSpheres
minVal = Vector3.Zero
maxVal = Vector3.Zero
minId = Vector3.Zero
maxId = Vector3.Zero
counter = 0
volume = 0.0
mass = 0.0
for i in idSpheresIter:
if (type(i).__name__=='int'):
b = O.bodies[i] #We have received a list of ID's
elif (type(i).__name__=='Body'):
b = i #We have recevied a list of bodies
else:
raise TypeError("Unknow type of data, should be list of int's or bodies's")
if (b):
spherePosition=b.state.pos #skip non-existent spheres
try:
sphereRadius=b.shape.radius #skip non-spheres
except AttributeError: continue
if (mask>-1) and ((mask&b.mask)==0): continue #skip bodies with wrong mask
sphereRadiusVec3 = Vector3(sphereRadius,sphereRadius,sphereRadius)
sphereMax = spherePosition + sphereRadiusVec3
sphereMin = spherePosition - sphereRadiusVec3
for dim in range(0,3):
if ((sphereMax[dim]>maxVal[dim]) or (counter==0)):
maxVal[dim]=sphereMax[dim]
maxId[dim] = b.id
if ((sphereMin[dim]<minVal[dim]) or (counter==0)):
minVal[dim]=sphereMin[dim]
minId[dim] = b.id
volume += 4.0/3.0*math.pi*sphereRadius*sphereRadius*sphereRadius
mass += b.state.mass
counter += 1
center = (maxVal-minVal)/2.0+minVal
extends = maxVal-minVal
dimensions = {'max':maxVal,'min':minVal,'maxId':maxId,'minId':minId,'center':center,
'extends':extends, 'volume':volume, 'mass':mass, 'number':counter}
return dimensions
#facetsDimensions==================================================
def facetsDimensions(idFacets=[],mask=-1):
"""The function accepts the list of facet id's or list of facets and calculates max and min dimensions, geometrical center.
:param list idFacets: list of spheres
:param int mask: :yref:`Body.mask` for the checked bodies
:return: dictionary with keys ``min`` (minimal dimension, Vector3), ``max`` (maximal dimension, Vector3), ``minId`` (minimal dimension facet Id, Vector3), ``maxId`` (maximal dimension facet Id, Vector3), ``center`` (central point of bounding box, Vector3), ``extends`` (sizes of bounding box, Vector3), ``number`` (number of facets, int),
"""
idFacetsIter=[]
if (len(idFacets)<1):
#check mask
ifFacetMask=[]
if (mask>-1): #The case, when only the mask was given, without list of ids
for i in O.bodies:
if ((i.mask&mask)<>0):
ifFacetMask.append(i.id)
if (len(ifFacetMask)<2):
raise RuntimeWarning("Not enough bodies to analyze with given mask")
else:
idFacetsIter=ifFacetMask
else:
raise RuntimeWarning("Only a list of particles with length > 1 can be analyzed")
else:
idFacetsIter=idFacets
minVal = Vector3.Zero
maxVal = Vector3.Zero
minId = Vector3.Zero
maxId = Vector3.Zero
counter = 0
for i in idFacetsIter:
if (type(i).__name__=='int'):
b = O.bodies[i] #We have received a list of ID's
elif (type(i).__name__=='Body'):
b = i #We have recevied a list of bodies
else:
raise TypeError("Unknow type of data, should be list of int's or bodies's")
if (b):
p = b.state.pos
o = b.state.ori
s = b.shape
pt1 = p + o*s.vertices[0]
pt2 = p + o*s.vertices[1]
pt3 = p + o*s.vertices[2]
if (mask>-1) and ((mask&b.mask)==0): continue #skip bodies with wrong mask
facetMax = Vector3(max(pt1[0], pt2[0], pt3[0]), max(pt1[1], pt2[1], pt3[1]), max(pt1[2], pt2[2], pt3[2]))
facetMin = Vector3(min(pt1[0], pt2[0], pt3[0]), min(pt1[1], pt2[1], pt3[1]), min(pt1[2], pt2[2], pt3[2]))
for dim in range(0,3):
if ((facetMax[dim]>maxVal[dim]) or (counter==0)):
maxVal[dim]=facetMax[dim]
maxId[dim] = b.id
if ((facetMin[dim]<minVal[dim]) or (counter==0)):
minVal[dim]=facetMin[dim]
minId[dim] = b.id
counter += 1
center = (maxVal-minVal)/2.0+minVal
extends = maxVal-minVal
dimensions = {'max':maxVal,'min':minVal,'maxId':maxId,'minId':minId,'center':center,
'extends':extends, 'number':counter}
return dimensions
#spheresPackDimensions==================================================
def spheresModify(idSpheres=[],mask=-1,shift=Vector3.Zero,scale=1.0,orientation=Quaternion((0,1,0),0.0),copy=False):
"""The function accepts the list of spheres id's or list of bodies and modifies them: rotating, scaling, shifting.
if copy=True copies bodies and modifies them.
Also the mask can be given. If idSpheres not empty, the function affects only bodies, where the mask passes.
If idSpheres is empty, the function search for bodies, where the mask passes.
:param Vector3 shift: Vector3(X,Y,Z) parameter moves spheres.
:param float scale: factor scales given spheres.
:param Quaternion orientation: orientation of spheres
:param int mask: :yref:`Body.mask` for the checked bodies
:returns: list of bodies if copy=True, and Boolean value if copy=False
"""
idSpheresIter=[]
if (len(idSpheres)==0):
#check mask
ifSpherMask=[]
if (mask>-1): #The case, when only the mask was given, without list of ids
for i in O.bodies:
if ((i.mask&mask)<>0):
ifSpherMask.append(i.id)
if (len(ifSpherMask)==0):
raise RuntimeWarning("No bodies to modify with given mask")
else:
idSpheresIter=ifSpherMask
else:
raise RuntimeWarning("No bodies to modify")
else:
idSpheresIter=idSpheres
dims = spheresPackDimensions(idSpheresIter)
ret=[]
for i in idSpheresIter:
if (type(i).__name__=='int'):
b = O.bodies[i] #We have received a list of ID's
elif (type(i).__name__=='Body'):
b = i #We have recevied a list of bodies
else:
raise TypeError("Unknown type of data, should be list of int's or bodies")
try:
sphereRadius=b.shape.radius #skip non-spheres
except AttributeError: continue
if (mask>-1) and ((mask&b.mask)==0): continue #skip bodies with wrong mask
if (copy): b=sphereDuplicate(b)
b.state.pos=orientation*(b.state.pos-dims['center'])+dims['center']
b.shape.radius*=scale
b.state.pos=(b.state.pos-dims['center'])*scale + dims['center']
b.state.pos+=shift
if (copy): ret.append(b)
if (copy):
return ret
else:
return True
#spheresDublicate=======================================================
def sphereDuplicate(idSphere):
"""The functions makes a copy of sphere"""
i=idSphere
if (type(i).__name__=='int'):
b = O.bodies[i] #We have received a list of ID's
elif (type(i).__name__=='Body'):
b = i #We have recevied a list of bodies
else:
raise TypeError("Unknown type of data, should be list of int's or bodies")
try:
sphereRadius=b.shape.radius #skip non-spheres
except AttributeError:
return False
addedBody = utils.sphere(center=b.state.pos,radius=b.shape.radius,fixed=not(b.dynamic),wire=b.shape.wire,color=b.shape.color,highlight=b.shape.highlight,material=b.material,mask=b.mask)
return addedBody
| ThomasSweijen/TPF | py/bodiesHandling.py | Python | gpl-2.0 | 8,305 | 0.056713 |
"""
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from django import forms
from django.utils.safestring import mark_safe
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Layout, Fieldset, Div, Submit, Hidden, HTML, Field
from crispy_forms.bootstrap import FormActions, AppendedText, InlineRadios
from django.forms.models import inlineformset_factory
from .search import Search
from .models import ActivitySubmission, WellActivityType, ProvinceState, DrillingMethod, LithologyDescription, LithologyMoisture, Casing, CasingType, LinerPerforation
from .models import ScreenIntake, ScreenMaterial, ScreenBottom, Screen, ProductionData, WaterQualityCharacteristic
from datetime import date
class SearchForm(forms.Form):
well = forms.IntegerField(
label=mark_safe('Well Tag Number or Well Identification Plate Number <a href="#" data-toggle="popover" data-container="body" data-placement="right" \
data-content="Well electronic filing number or physical identification plate number"> \
<i class="fa fa-question-circle" style="color:blue"></i></a>'),
required=False,
widget=forms.NumberInput(attrs={'placeholder': 'example: 123456'}),
)
addr = forms.CharField(
label=mark_safe('Street Address <a href="#" data-toggle="popover" data-container="body" data-placement="right" \
data-content="For best results, try searching using the street name only."> \
<i class="fa fa-question-circle" style="color:blue"></i></a>'),
max_length=100,
required=False,
widget=forms.TextInput(attrs={'placeholder': 'example: 123 main'}),
)
legal = forms.CharField(
label=mark_safe('Legal Plan or District Lot or PID <a href="#" data-toggle="popover" data-container="body" data-placement="right" \
data-content="Find the legal plan, district lot, or 9-digit PID (parcel identifier) on the property assessment, property tax notice, or real estate transaction."> \
<i class="fa fa-question-circle" style="color:blue"></i></a>'),
max_length=100,
required=False,
widget=forms.TextInput(attrs={'placeholder': 'example: 123a'}),
)
owner = forms.CharField(
label=mark_safe('Owner Name <a href="#" data-toggle="popover" data-container="body" data-placement="right" \
data-content="The owner name is usually the name of the well owner at time of drilling."> \
<i class="fa fa-question-circle" style="color:blue"></i></a>'),
max_length=100,
required=False,
widget=forms.TextInput(attrs={'placeholder': 'example: Smith or smi'}),
)
start_lat_long = forms.CharField(
widget=forms.HiddenInput(),
required=False
)
end_lat_long = forms.CharField(
widget=forms.HiddenInput(),
required=False
)
WELL_RESULTS_LIMIT = 1000
@property
def helper(self):
helper = FormHelper()
helper.form_id = 'id-searchForm'
helper.form_method = 'get'
helper.form_action = ''
helper.layout = Layout(
Fieldset(
'',
'well',
'addr',
'legal',
'owner',
Hidden('sort', 'well_tag_number'),
Hidden('dir', 'asc'),
# start_lat_long and end_lat_long are programatically generated
# based on an identifyWells operation on the client.
Hidden('start_lat_long', ''),
Hidden('end_lat_long', ''),
),
FormActions(
Submit('s', 'Search'),
HTML('<a class="btn btn-default" href="{% url \'search\' %}">Reset</a>'),
css_class='form-group formButtons',
)
)
return helper
def clean(self):
cleaned_data = super(SearchForm, self).clean()
well = cleaned_data.get('well')
addr = cleaned_data.get('addr')
legal = cleaned_data.get('legal')
owner = cleaned_data.get('owner')
# start_lat_long and end_lat_long are programatically-generated, and
# should consist of a dictionary of a comma-separated list consisting
# of two floats that comprise latitude and longitude. They are used
# in the identifyWells operation to query all wells whose lat/long info
# place them within a user-drawn rectangle on the search page map.
start_lat_long = cleaned_data.get('start_lat_long')
end_lat_long = cleaned_data.get('end_lat_long')
# If only one of the rectangle's points exist, we cannot perform the query.
if bool(start_lat_long) != bool(end_lat_long):
raise forms.ValidationError(
"identifyWells operation did not provide sufficient data. "
"The map may not accurately reflect query results."
)
if (not well and not addr and not legal and
not owner and not (start_lat_long and end_lat_long)):
raise forms.ValidationError(
"At least 1 search field is required."
)
def process(self):
well_results = None
well = self.cleaned_data.get('well')
addr = self.cleaned_data.get('addr')
legal = self.cleaned_data.get('legal')
owner = self.cleaned_data.get('owner')
start_lat_long = self.cleaned_data.get('start_lat_long')
end_lat_long = self.cleaned_data.get('end_lat_long')
lat_long_box = {'start_corner': start_lat_long, 'end_corner': end_lat_long}
well_results = Search.well_search(well, addr, legal, owner, lat_long_box, self.WELL_RESULTS_LIMIT)
return well_results
class WellOwnerForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self.helper = FormHelper()
self.helper.form_tag = False
self.helper.disable_csrf = True
self.helper.layout = Layout(
Fieldset(
'Owner Information',
Div(
Div('owner_full_name', css_class='col-md-4'),
css_class='row',
),
Div(
Div('owner_mailing_address', css_class='col-md-4'),
css_class='row',
),
Div(
Div('owner_city', css_class='col-md-3 city'),
Div('owner_province_state', css_class='col-md-1'),
Div('owner_postal_code', css_class='col-md-3 postal'),
css_class='row',
),
)
)
super(WellOwnerForm, self).__init__(*args, **kwargs)
# Make fields required on the form even though they are not required in the DB due to legacy data issues
# TODO - check admin or staff user and don't make these fields required
self.fields['owner_postal_code'].required = True
# display code instead of the value from __str__ in the model
self.fields['owner_province_state'].label_from_instance = self.label_from_instance_code
try:
bc = ProvinceState.objects.get(code='BC')
self.initial['owner_province_state'] = bc
self.fields['owner_province_state'].empty_label = None
except Exception as e:
pass
@staticmethod
def label_from_instance_code(obj):
return obj.code
class Meta:
model = ActivitySubmission
fields = ['owner_full_name', 'owner_mailing_address', 'owner_city', 'owner_province_state', 'owner_postal_code']
class ActivitySubmissionTypeAndClassForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self.helper = FormHelper()
self.helper.form_tag = False
self.helper.disable_csrf = True
self.helper.layout = Layout(
Fieldset(
'Type of Work and Well Class',
Div(
Div('well_activity_type', css_class='col-md-4'),
Div(HTML('<label for="units">Measurement units for data entry</label><br /><input type="radio" name="units" value="Imperial" checked /> Imperial<br /><input type="radio" name="units" value="Metric" disabled /> Metric<br /><br />'), css_class='col-md-4'),
css_class='row',
),
Div(
Div('well_class', css_class='col-md-4'),
Div(Div('well_subclass', id='divSubclass'), Div('intended_water_use', id='divIntendedWaterUse'), css_class='col-md-4'),
css_class='row',
),
Div(
Div('identification_plate_number', css_class='col-md-4'),
Div('where_plate_attached', css_class='col-md-4'),
css_class='row',
),
Div(
Div('driller_responsible', css_class='col-md-4'),
Div('driller_name', css_class='col-md-4'),
Div(HTML('<input type="checkbox" id="chkSameAsPersonResponsible" /> <label for="chkSameAsPersonResponsible">Same as Person Responsible for Drilling</label>'), css_class='col-md-4'),
css_class='row',
),
Div(
Div('consultant_name', css_class='col-md-4'),
Div('consultant_company', css_class='col-md-4'),
css_class='row',
),
Div(
Div('work_start_date', css_class='col-md-4 date'),
Div('work_end_date', css_class='col-md-4 date'),
css_class='row',
),
)
)
super(ActivitySubmissionTypeAndClassForm, self).__init__(*args, **kwargs)
try:
con = WellActivityType.objects.get(code='CON')
self.initial['well_activity_type'] = con
self.fields['well_activity_type'].empty_label = None
except Exception as e:
pass
def clean_work_start_date(self):
work_start_date = self.cleaned_data.get('work_start_date')
if work_start_date > date.today():
raise forms.ValidationError('Work start date cannot be in the future.')
return work_start_date
def clean(self):
cleaned_data = super(ActivitySubmissionTypeAndClassForm, self).clean()
identification_plate_number = cleaned_data.get('identification_plate_number')
where_plate_attached = cleaned_data.get('where_plate_attached')
work_start_date = cleaned_data.get('work_start_date')
work_end_date = cleaned_data.get('work_end_date')
errors = []
if identification_plate_number and not where_plate_attached:
errors.append('Where Identification Plate Is Attached is required when specifying Identification Plate Number.')
if work_start_date and work_end_date and work_end_date < work_start_date:
errors.append('Work End Date cannot be earlier than Work Start Date.')
if len(errors) > 0:
raise forms.ValidationError(errors)
return cleaned_data
class Meta:
model = ActivitySubmission
fields = ['well_activity_type', 'well_class', 'well_subclass', 'intended_water_use', 'identification_plate_number', 'where_plate_attached', 'driller_responsible', 'driller_name', 'consultant_name', 'consultant_company', 'work_start_date', 'work_end_date']
help_texts = {'work_start_date': "yyyy-mm-dd", 'work_end_date': "yyyy-mm-dd",}
widgets = {'well_activity_type': forms.RadioSelect}
class ActivitySubmissionLocationForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self.helper = FormHelper()
self.helper.form_tag = False
self.helper.disable_csrf = True
self.helper.layout = Layout(
Fieldset(
'Well Location',
Div(
Div(HTML('Please provide as much well location information as possible. A minimum of one type of well location information is required below.<br /><br />'), css_class='col-md-12'),
css_class='row',
),
Div(
Div(HTML('<label>1) Civic Address</label>'), css_class='col-md-2'),
Div(HTML('<input type="checkbox" id="chkSameAsOwnerAddress" /> <label for="chkSameAsOwnerAddress">Same as Owner Address</label>'), css_class='col-md-10'),
css_class='row',
),
Div(
Div('street_address', css_class='col-md-4'),
css_class='row',
),
Div(
Div('city', css_class='col-md-4 city'),
css_class='row',
),
Div(
Div(HTML('OR'), css_class='col-md-12'),
css_class='row',
),
Div(
Div(HTML(' '), css_class='col-md-12'),
css_class='row',
),
Div(
Div(HTML('<label>2) Legal Description</label>'), css_class='col-md-12'),
css_class='row',
),
Div(
Div('legal_lot', css_class='col-md-2 city'),
Div('legal_plan', css_class='col-md-2 city'),
Div('legal_district_lot', css_class='col-md-2 city'),
Div('legal_block', css_class='col-md-2 city'),
css_class='row',
),
Div(
Div('legal_section', css_class='col-md-2 city'),
Div('legal_township', css_class='col-md-2 city'),
Div('legal_range', css_class='col-md-2 city'),
Div('legal_land_district', css_class='col-md-2 city'),
css_class='row',
),
Div(
Div(HTML('OR'), css_class='col-md-12'),
css_class='row',
),
Div(
Div(HTML(' '), css_class='col-md-12'),
css_class='row',
),
Div(
Div(HTML('<label>3) Parcel Identifier</label>'), css_class='col-md-12'),
css_class='row',
),
Div(
Div('legal_pid', css_class='col-md-2'),
css_class='row',
),
Div(
Div(HTML(' '), css_class='col-md-12'),
css_class='row',
),
Div(
Div(HTML(' '), css_class='col-md-12'),
css_class='row',
),
Div(
Div('well_location_description', css_class='col-md-8'),
css_class='row',
),
)
)
super(ActivitySubmissionLocationForm, self).__init__(*args, **kwargs)
def clean(self):
cleaned_data = super(ActivitySubmissionLocationForm, self).clean()
street_address = cleaned_data.get('street_address')
city = cleaned_data.get('city')
address_provided = street_address and city
legal_lot = cleaned_data.get('legal_lot')
legal_plan = cleaned_data.get('legal_plan')
legal_land_district = cleaned_data.get('legal_land_district')
legal_provided = legal_lot and legal_plan and legal_land_district
if not address_provided and not legal_provided and not cleaned_data.get('legal_pid'):
raise forms.ValidationError('At least 1 of Civic Address, Legal Description (Lot, Plan and Land District) or Parcel Identifier must be provided.')
return cleaned_data
class Meta:
model = ActivitySubmission
fields = ['street_address', 'city', 'legal_lot', 'legal_plan', 'legal_district_lot', 'legal_block', 'legal_section', 'legal_township', 'legal_range', 'legal_land_district', 'legal_pid', 'well_location_description']
help_texts = {'well_location_description': "Provide any additional well location details, such as physical landmarks",}
class ActivitySubmissionGpsForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self.helper = FormHelper()
self.helper.form_tag = False
self.helper.disable_csrf = True
self.helper.layout = Layout(
Fieldset(
'Geographic Coordinates',
Div(
Div(HTML('To determine coordinates using a Global Positioning System (GPS), set the datum to the North American Datum of 1983 (NAD 83), the current ministry standard for mapping.<br /><br />'), css_class='col-md-12'),
css_class='row',
),
Div(
Div(
Div(
Div(HTML('<div id="coord-error-pane" class="alert alert-warning" style="display:none"></div>')),
css_class='row',
),
Div(
Div(AppendedText('latitude', 'decimal degrees'), css_class='col-md-4'),
Div(AppendedText('longitude', 'decimal degrees'), css_class='col-md-4'),
css_class='row',
),
Div(
Div(HTML('OR'), css_class='col-md-12'),
css_class='row',
),
Div(
Div(HTML(' '), css_class='col-md-12'),
css_class='row',
),
Div(
Div(HTML('<div id="div_id_gps-latitude_dms" class="form-group"> <label for="id_gps-latitude_d" class="control-label ">Latitude</label> <div class="controls "> <div class="input-group"> <input class="numberinput form-control" id="id_gps-latitude_d" name="gps-latitude_d" step="1" type="number" /> <span class="input-group-addon">deg</span> <input class="numberinput form-control" id="id_gps-latitude_m" name="gps-latitude_m" step="1" type="number" /> <span class="input-group-addon">min</span> <input class="numberinput form-control" id="id_gps-latitude_s" name="gps-latitude_s" step="0.01" type="number" /> <span class="input-group-addon">sec</span> </div> </div> </div>'), css_class='col-md-5'),
Div(HTML('<div id="div_id_gps-longitude_dms" class="form-group"> <label for="id_gps-longitude_d" class="control-label ">Longitude</label> <div class="controls "> <div class="input-group"> <input class="numberinput form-control" id="id_gps-longitude_d" name="gps-longitude_d" step="1" type="number" /> <span class="input-group-addon">deg</span> <input class="numberinput form-control" id="id_gps-longitude_m" name="gps-longitude_m" step="1" type="number" /> <span class="input-group-addon">min</span> <input class="numberinput form-control" id="id_gps-longitude_s" name="gps-longitude_s" step="0.01" type="number" /> <span class="input-group-addon">sec</span> </div> </div> </div>'), css_class='col-md-5'),
css_class='row',
),
Div(
Div(HTML('OR'), css_class='col-md-12'),
css_class='row',
),
Div(
Div(HTML(' '), css_class='col-md-12'),
css_class='row',
),
Div(
Div(HTML('<div id="div_id_gps-zone" class="form-group"> <label for="id_gps-zone" class="control-label ">Zone</label> <div class="controls "> <select class="select form-control" id="id_gps-zone" name="gps-zone"><option value="" selected="selected">---------</option><option value="8">8</option><option value="9">9</option><option value="10">10</option><option value="11">11</option></select> </div> </div>'), css_class='col-md-2'),
Div(HTML('<div id="div_id_gps-utm_easting" class="form-group"> <label for="id_gps-utm_easting" class="control-label ">UTM Easting</label> <div class="controls "> <div class="input-group"> <input class="numberinput form-control" id="id_gps-utm_easting" name="gps-utm_easting" step="1" type="number" min="200000" max="800000" /> <span class="input-group-addon">m</span> </div> </div> </div>'), css_class='col-md-3'),
Div(HTML('<div id="div_id_gps-utm_northing" class="form-group"> <label for="id_gps-utm_northing" class="control-label ">UTM Northing</label> <div class="controls "> <div class="input-group"> <input class="numberinput form-control" id="id_gps-utm_northing" name="gps-utm_northing" step="1" type="number" min="5350500" max="6655250" /> <span class="input-group-addon">m</span> </div> </div> </div>'), css_class='col-md-3'),
css_class='row',
),
css_class='col-md-8',
),
Div(
Div(
id='add-map',
aria_label='This map shows the location of a prospective well as a light blue pushpin, as well as any existing wells as dark blue circles. Coordinates for the prospective well may be refined by dragging the pushpin with the mouse.'
),
Div(
id='attribution'
),
Div(HTML('<br />After the GPS coordinates are entered, the pushpin can be moved by clicking and dragging it on the map. The GPS coordinates will be updated automatically.')
),
css_class='col-md-4',
),
css_class='row',
),
),
Fieldset(
'Method of Drilling',
Div(
Div(AppendedText('ground_elevation', 'ft (asl)'), css_class='col-md-2'),
Div('ground_elevation_method', css_class='col-md-3'),
css_class='row',
),
Div(
Div('drilling_method', css_class='col-md-2'),
Div('other_drilling_method', css_class='col-md-3'),
css_class='row',
),
Div(
Div(HTML(' '), css_class='col-md-12'),
css_class='row',
),
Div(
Div('orientation_vertical', css_class='col-md-3'),
css_class='row',
),
)
)
super(ActivitySubmissionGpsForm, self).__init__(*args, **kwargs)
# Make fields required on the form even though they are not required in the DB due to legacy data issues
# TODO - check admin or staff user and don't make these fields required
self.fields['latitude'].required = True
self.fields['longitude'].required = True
self.fields['drilling_method'].required = True
def clean_latitude(self):
latitude = self.cleaned_data.get('latitude')
if latitude < 48.204555 or latitude > 60.0223:
raise forms.ValidationError('Latitude must be between 48.204556 and 60.02230.')
decimal_places = max(0,-latitude.as_tuple().exponent)
if decimal_places < 5:
raise forms.ValidationError('Latitude must be specified to at least 5 decimal places.')
return latitude
def clean_longitude(self):
longitude = self.cleaned_data.get('longitude')
if longitude < -139.073671 or longitude > -114.033822:
raise forms.ValidationError('Longitude must be between -139.073671 and -114.033822.')
decimal_places = max(0,-longitude.as_tuple().exponent)
if decimal_places < 5:
raise forms.ValidationError('Longitude must be specified to at least 5 decimal places.')
return longitude
def clean(self):
cleaned_data = super(ActivitySubmissionGpsForm, self).clean()
ground_elevation = cleaned_data.get('ground_elevation')
ground_elevation_method = cleaned_data.get('ground_elevation_method')
drilling_method = cleaned_data.get('drilling_method')
other_drilling_method = cleaned_data.get('other_drilling_method')
errors = []
if ground_elevation and not ground_elevation_method:
errors.append('Method for Determining Ground Elevation is required when specifying Ground Elevation.')
try:
if drilling_method == DrillingMethod.objects.get(code='OTHER') and not other_drilling_method:
errors.append('Specify Other Drilling Method.')
except Exception as e:
errors.append('Configuration error: Other Drilling Method does not exist, please contact the administrator.')
if len(errors) > 0:
raise forms.ValidationError(errors)
return cleaned_data
class Meta:
model = ActivitySubmission
fields = ['latitude', 'longitude', 'ground_elevation', 'ground_elevation_method', 'drilling_method', 'other_drilling_method', 'orientation_vertical']
widgets = {'orientation_vertical': forms.RadioSelect,
'latitude': forms.TextInput(attrs={'type': 'number', 'min': '48.20456', 'max': '60.0222', 'step': 'any'}),
'longitude': forms.TextInput(attrs={'type': 'number', 'min': '-139.07367', 'max': '-114.03383', 'step': 'any'})}
class LithologyForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self.helper = FormHelper()
self.helper.form_tag = False
self.helper.disable_csrf = True
self.helper.form_show_labels = False
self.helper.render_required_fields = True
self.helper.render_hidden_fields = True
self.helper.layout = Layout(
HTML('<tr valign="top">'),
HTML('<td width="6%">'),
'lithology_from',
HTML('</td>'),
HTML('<td width="6%">'),
'lithology_to',
HTML('</td>'),
HTML('<td>'),
'surficial_material',
HTML('</td>'),
HTML('<td>'),
'secondary_surficial_material',
HTML('</td>'),
HTML('<td>'),
'bedrock_material',
HTML('</td>'),
HTML('<td>'),
'bedrock_material_descriptor',
HTML('</td>'),
HTML('<td>'),
'lithology_structure',
HTML('</td>'),
HTML('<td>'),
'lithology_colour',
HTML('</td>'),
HTML('<td>'),
'lithology_hardness',
HTML('</td>'),
HTML('<td>'),
'lithology_moisture',
HTML('</td>'),
HTML('<td>'),
'water_bearing_estimated_flow',
HTML('</td>'),
HTML('<td>'),
'lithology_observation',
HTML('</td><td width="5%">{% if form.instance.pk %}{{ form.DELETE }}{% endif %}</td>'),
HTML('</tr>'),
)
super(LithologyForm, self).__init__(*args, **kwargs)
def clean(self):
cleaned_data = super(LithologyForm, self).clean()
lithology_from = cleaned_data.get('lithology_from')
lithology_to = cleaned_data.get('lithology_to')
surficial_material = cleaned_data.get('surficial_material')
bedrock_material = cleaned_data.get('bedrock_material')
errors = []
if lithology_from and lithology_to and lithology_to < lithology_from:
errors.append('To must be greater than or equal to From.')
if not surficial_material and not bedrock_material:
errors.append('Surficial Material or Bedrock is required.')
if bedrock_material:
lithology_moisture = cleaned_data.get('lithology_moisture')
water_bearing_estimated_flow = cleaned_data.get('water_bearing_estimated_flow')
try:
if lithology_moisture == LithologyMoisture.objects.get(code='Water Bear') and not water_bearing_estimated_flow:
errors.append('Water Bearing Estimated Flow is required for Water Bearing Bedrock.')
except Exception as e:
errors.append('Configuration error: Water Bearing Lithology Moisture does not exist, please contact the administrator.')
if len(errors) > 0:
raise forms.ValidationError(errors)
return cleaned_data
class Meta:
model = LithologyDescription
fields = ['lithology_from', 'lithology_to', 'surficial_material', 'secondary_surficial_material', 'bedrock_material', 'bedrock_material_descriptor', 'lithology_structure', 'lithology_colour', 'lithology_hardness', 'lithology_moisture', 'water_bearing_estimated_flow', 'lithology_observation']
class CasingForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self.helper = FormHelper()
self.helper.form_tag = False
self.helper.disable_csrf = True
self.helper.form_show_labels = False
self.helper.render_required_fields = True
self.helper.render_hidden_fields = True
self.helper.layout = Layout(
HTML('<tr valign="top">'),
HTML('<td width="5%">'),
'casing_from',
HTML('</td>'),
HTML('<td width="5%">'),
'casing_to',
HTML('</td>'),
HTML('<td width="10%">'),
'internal_diameter',
HTML('</td>'),
HTML('<td>'),
'casing_type',
HTML('</td>'),
HTML('<td>'),
'casing_material',
HTML('</td>'),
HTML('<td width="10%">'),
'wall_thickness',
HTML('</td>'),
HTML('<td>'),
InlineRadios('drive_shoe'),
HTML('</td><td width="5%">{% if form.instance.pk %}{{ form.DELETE }}{% endif %}</td>'),
HTML('</tr>'),
)
super(CasingForm, self).__init__(*args, **kwargs)
self.fields['drive_shoe'].label = False
def clean(self):
cleaned_data = super(CasingForm, self).clean()
casing_from = cleaned_data.get('casing_from')
casing_to = cleaned_data.get('casing_to')
casing_type = cleaned_data.get('casing_type')
casing_material = cleaned_data.get('casing_material')
wall_thickness = cleaned_data.get('wall_thickness')
errors = []
if casing_from and casing_to and casing_to < casing_from:
errors.append('To must be greater than or equal to From.')
open_casing_type = None
try:
open_casing_type = CasingType.objects.get(code='OPEN')
except Exception as e:
errors.append('Configuration error: Open Hole Casing Type does not exist, please contact the administrator.')
if open_casing_type:
if casing_type != open_casing_type and not casing_material:
self.add_error('casing_material', 'This field is required.')
if casing_type != open_casing_type and not wall_thickness:
self.add_error('wall_thickness', 'This field is required.')
if casing_type == open_casing_type and casing_material:
self.add_error('casing_material', 'Open Hole cannot have a casing material.')
if len(errors) > 0:
raise forms.ValidationError(errors)
return cleaned_data
class Meta:
model = Casing
fields = ['casing_from', 'casing_to', 'internal_diameter', 'casing_type', 'casing_material', 'wall_thickness', 'drive_shoe']
class ActivitySubmissionSurfaceSealForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self.helper = FormHelper()
self.helper.form_tag = False
self.helper.disable_csrf = True
self.helper.render_hidden_fields = True
self.helper.layout = Layout(
Fieldset(
'Surface Seal and Backfill Information',
Div(
Div('surface_seal_material', css_class='col-md-3'),
Div(AppendedText('surface_seal_depth', 'ft'), css_class='col-md-2'),
Div(AppendedText('surface_seal_thickness', 'in'), css_class='col-md-2'),
css_class='row',
),
Div(
Div('surface_seal_method', css_class='col-md-3'),
css_class='row',
),
Div(
Div(HTML(' '), css_class='col-md-12'),
css_class='row',
),
Div(
Div('backfill_above_surface_seal', css_class='col-md-3'),
Div(AppendedText('backfill_above_surface_seal_depth', 'ft'), css_class='col-md-2'),
css_class='row',
),
),
Fieldset(
'Liner Information',
Div(
Div('liner_material', css_class='col-md-3'),
css_class='row',
),
Div(
Div(AppendedText('liner_diameter', 'in'), css_class='col-md-2'),
Div(AppendedText('liner_thickness', 'in'), css_class='col-md-2'),
css_class='row',
),
Div(
Div(AppendedText('liner_from', 'ft (bgl)'), css_class='col-md-2'),
Div(AppendedText('liner_to', 'ft (bgl)'), css_class='col-md-2'),
css_class='row',
),
)
)
super(ActivitySubmissionSurfaceSealForm, self).__init__(*args, **kwargs)
def clean_surface_seal_material(self):
surface_seal_material = self.cleaned_data.get('surface_seal_material')
if self.initial['casing_exists'] and not surface_seal_material:
raise forms.ValidationError('This field is required.');
def clean_surface_seal_depth(self):
surface_seal_depth = self.cleaned_data.get('surface_seal_depth')
if self.initial['casing_exists'] and not surface_seal_depth:
raise forms.ValidationError('This field is required.');
def clean_surface_seal_thickness(self):
surface_seal_thickness = self.cleaned_data.get('surface_seal_thickness')
if self.initial['casing_exists'] and not surface_seal_thickness:
raise forms.ValidationError('This field is required.');
def clean_surface_seal_method(self):
surface_seal_method = self.cleaned_data.get('surface_seal_method')
if self.initial['casing_exists'] and not surface_seal_method:
raise forms.ValidationError('This field is required.');
def clean(self):
cleaned_data = super(ActivitySubmissionSurfaceSealForm, self).clean()
liner_from = cleaned_data.get('liner_from')
liner_to = cleaned_data.get('liner_to')
errors = []
if liner_from and liner_to and liner_to < liner_from:
errors.append('Liner To must be greater than or equal to From.')
if len(errors) > 0:
raise forms.ValidationError(errors)
return cleaned_data
class Meta:
model = ActivitySubmission
fields = ['surface_seal_material', 'surface_seal_depth', 'surface_seal_thickness', 'surface_seal_method', 'backfill_above_surface_seal', 'backfill_above_surface_seal_depth', 'liner_material', 'liner_diameter', 'liner_thickness', 'liner_from', 'liner_to']
class LinerPerforationForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self.helper = FormHelper()
self.helper.form_tag = False
self.helper.disable_csrf = True
self.helper.form_show_labels = False
self.helper.render_required_fields = True
self.helper.render_hidden_fields = True
self.helper.layout = Layout(
HTML('<tr valign="top">'),
HTML('<td>'),
'liner_perforation_from',
HTML('</td>'),
HTML('<td>'),
'liner_perforation_to',
HTML('</td><td width="75"> {% if form.instance.pk %}{{ form.DELETE }}{% endif %}</td>'),
HTML('</tr>'),
)
super(LinerPerforationForm, self).__init__(*args, **kwargs)
def clean(self):
cleaned_data = super(LinerPerforationForm, self).clean()
liner_perforation_from = cleaned_data.get('liner_perforation_from')
liner_perforation_to = cleaned_data.get('liner_perforation_to')
errors = []
if liner_perforation_from and liner_perforation_to and liner_perforation_to < liner_perforation_from:
errors.append('To must be greater than or equal to From.')
if len(errors) > 0:
raise forms.ValidationError(errors)
return cleaned_data
class Meta:
model = LinerPerforation
fields = ['liner_perforation_from', 'liner_perforation_to']
class ActivitySubmissionScreenIntakeForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self.helper = FormHelper()
self.helper.form_tag = False
self.helper.disable_csrf = True
self.helper.render_hidden_fields = True
self.helper.layout = Layout(
Fieldset(
'Screen Information',
Div(
Div('screen_intake', css_class='col-md-2'),
css_class='row',
),
Div(
Div('screen_type', css_class='col-md-2'),
Div('screen_material', css_class='col-md-2'),
Div('other_screen_material', css_class='col-md-3'),
css_class='row',
),
Div(
Div('screen_opening', css_class='col-md-2'),
Div('screen_bottom', css_class='col-md-2'),
Div('other_screen_bottom', css_class='col-md-3'),
css_class='row',
),
)
)
super(ActivitySubmissionScreenIntakeForm, self).__init__(*args, **kwargs)
def clean(self):
cleaned_data = super(ActivitySubmissionScreenIntakeForm, self).clean()
screen_intake = cleaned_data.get('screen_intake')
screen_type = cleaned_data.get('screen_type')
screen_material = cleaned_data.get('screen_material')
other_screen_material = cleaned_data.get('other_screen_material')
screen_opening = cleaned_data.get('screen_opening')
screen_bottom = cleaned_data.get('screen_bottom')
other_screen_bottom = cleaned_data.get('other_screen_bottom')
errors = []
screen_screen_intake = None
try:
screen_screen_intake = ScreenIntake.objects.get(code='SCREEN')
except Exception as e:
errors.append('Configuration error: Screen Intake for Screen does not exist, please contact the administrator.')
if screen_screen_intake:
if screen_intake == screen_screen_intake and not screen_type:
self.add_error('screen_type', 'This field is required if Intake is a Screen.')
if screen_intake == screen_screen_intake and not screen_material:
self.add_error('screen_material', 'This field is required if Intake is a Screen.')
if screen_intake == screen_screen_intake and not screen_opening:
self.add_error('screen_opening', 'This field is required if Intake is a Screen.')
if screen_intake == screen_screen_intake and not screen_bottom:
self.add_error('screen_bottom', 'This field is required if Intake is a Screen.')
try:
if screen_material == ScreenMaterial.objects.get(code='OTHER') and not other_screen_material:
self.add_error('other_screen_material', 'This field is required.')
except Exception as e:
errors.append('Configuration error: Other Screen Material does not exist, please contact the administrator.')
try:
if screen_bottom == ScreenBottom.objects.get(code='OTHER') and not other_screen_bottom:
self.add_error('other_screen_bottom', 'This field is required.')
except Exception as e:
errors.append('Configuration error: Other Screen Bottom does not exist, please contact the administrator.')
if len(errors) > 0:
raise forms.ValidationError(errors)
return cleaned_data
class Meta:
model = ActivitySubmission
fields = ['screen_intake', 'screen_type', 'screen_material', 'other_screen_material', 'screen_opening', 'screen_bottom', 'other_screen_bottom']
class ScreenForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self.helper = FormHelper()
self.helper.form_tag = False
self.helper.disable_csrf = True
self.helper.form_show_labels = False
self.helper.render_required_fields = True
self.helper.render_hidden_fields = True
self.helper.layout = Layout(
HTML('<tr valign="top">'),
HTML('<td width="60">'),
'screen_from',
HTML('</td>'),
HTML('<td width="60">'),
'screen_to',
HTML('</td>'),
HTML('<td width="70">'),
'internal_diameter',
HTML('</td>'),
HTML('<td width="200">'),
'assembly_type',
HTML('</td>'),
HTML('<td width="60">'),
'slot_size',
HTML('</td><td width="75"> {% if form.instance.pk %}{{ form.DELETE }}{% endif %}</td>'),
HTML('</tr>'),
)
super(ScreenForm, self).__init__(*args, **kwargs)
def clean(self):
cleaned_data = super(ScreenForm, self).clean()
screen_from = cleaned_data.get('screen_from')
screen_to = cleaned_data.get('screen_to')
errors = []
if screen_from and screen_to and screen_to < screen_from:
errors.append('To must be greater than or equal to From.')
if len(errors) > 0:
raise forms.ValidationError(errors)
return cleaned_data
class Meta:
model = Screen
fields = ['screen_from', 'screen_to', 'internal_diameter', 'assembly_type', 'slot_size']
class ActivitySubmissionFilterPackForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self.helper = FormHelper()
self.helper.form_tag = False
self.helper.disable_csrf = True
self.helper.layout = Layout(
Fieldset(
'Filter Pack',
Div(
Div(AppendedText('filter_pack_from', 'ft'), css_class='col-md-2'),
Div(AppendedText('filter_pack_to', 'ft'), css_class='col-md-2'),
Div(AppendedText('filter_pack_thickness', 'in'), css_class='col-md-2'),
css_class='row',
),
Div(
Div('filter_pack_material', css_class='col-md-3'),
Div('filter_pack_material_size', css_class='col-md-3'),
css_class='row',
),
)
)
super(ActivitySubmissionFilterPackForm, self).__init__(*args, **kwargs)
def clean(self):
cleaned_data = super(ActivitySubmissionFilterPackForm, self).clean()
return cleaned_data
class Meta:
model = ActivitySubmission
fields = ['filter_pack_from', 'filter_pack_to', 'filter_pack_thickness', 'filter_pack_material', 'filter_pack_material_size']
class ActivitySubmissionDevelopmentForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self.helper = FormHelper()
self.helper.form_tag = False
self.helper.disable_csrf = True
self.helper.layout = Layout(
Fieldset(
'Well Development',
Div(
Div('development_method', css_class='col-md-3'),
css_class='row',
),
Div(
Div(AppendedText('development_hours', 'hrs'), css_class='col-md-3'),
css_class='row',
),
Div(
Div('development_notes', css_class='col-md-6'),
css_class='row',
),
)
)
super(ActivitySubmissionDevelopmentForm, self).__init__(*args, **kwargs)
class Meta:
model = ActivitySubmission
fields = ['development_method', 'development_hours', 'development_notes']
class ProductionDataForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self.helper = FormHelper()
self.helper.form_tag = False
self.helper.disable_csrf = True
self.helper.layout = Layout(
Fieldset(
'Well Yield Estimation',
Div(
Div('yield_estimation_method', css_class='col-md-3'),
css_class='row',
),
Div(
Div(AppendedText('yield_estimation_rate', 'USgpm'), css_class='col-md-3'),
Div(AppendedText('yield_estimation_duration', 'hrs'), css_class='col-md-3'),
css_class='row',
),
Div(
Div(AppendedText('static_level', 'ft (btoc)'), css_class='col-md-3'),
Div(AppendedText('drawdown', 'ft (btoc)'), css_class='col-md-3'),
css_class='row',
),
Div(
Div(InlineRadios('hydro_fracturing_performed'), css_class='col-md-3'),
Div(AppendedText('hydro_fracturing_yield_increase', 'USgpm'), css_class='col-md-3'),
css_class='row',
),
)
)
super(ProductionDataForm, self).__init__(*args, **kwargs)
class Meta:
model = ProductionData
fields = ['yield_estimation_method', 'yield_estimation_rate', 'yield_estimation_duration', 'static_level', 'drawdown', 'hydro_fracturing_performed', 'hydro_fracturing_yield_increase']
widgets = {'hydro_fracturing_performed': forms.RadioSelect}
class ActivitySubmissionWaterQualityForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self.helper = FormHelper()
self.helper.form_tag = False
self.helper.disable_csrf = True
self.helper.layout = Layout(
Fieldset(
'Water Quality',
Div(
Div('water_quality_characteristics', css_class='col-md-3'),
css_class='row',
),
Div(
Div('water_quality_colour', css_class='col-md-3'),
css_class='row',
),
Div(
Div('water_quality_odour', css_class='col-md-3'),
css_class='row',
),
)
)
super(ActivitySubmissionWaterQualityForm, self).__init__(*args, **kwargs)
class Meta:
model = ActivitySubmission
fields = ['water_quality_characteristics', 'water_quality_colour', 'water_quality_odour']
widgets = {'water_quality_characteristics': forms.CheckboxSelectMultiple}
class WellCompletionForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self.helper = FormHelper()
self.helper.form_tag = False
self.helper.disable_csrf = True
self.helper.layout = Layout(
Fieldset(
'Well Completion Details',
Div(
Div(AppendedText('total_depth_drilled', 'ft'), css_class='col-md-3'),
Div(AppendedText('finished_well_depth', 'ft (bgl)'), css_class='col-md-3'),
css_class='row',
),
Div(
Div(AppendedText('final_casing_stick_up', 'in'), css_class='col-md-3'),
Div(AppendedText('bedrock_depth', 'ft (bgl)'), css_class='col-md-3'),
css_class='row',
),
Div(
Div(AppendedText('static_water_level', 'ft (btoc)'), css_class='col-md-3'),
Div(AppendedText('well_yield', 'USgpm'), css_class='col-md-3'),
css_class='row',
),
Div(
Div(AppendedText('artestian_flow', 'USgpm'), css_class='col-md-3'),
Div(AppendedText('artestian_pressure', 'ft'), css_class='col-md-3'),
css_class='row',
),
Div(
Div('well_cap_type', css_class='col-md-3'),
Div(InlineRadios('well_disinfected'), css_class='col-md-3'),
css_class='row',
),
)
)
super(WellCompletionForm, self).__init__(*args, **kwargs)
# Make fields required on the form even though they are not required in the DB due to legacy data issues
# TODO - check admin or staff user and don't make these fields required
self.fields['total_depth_drilled'].required = True
self.fields['finished_well_depth'].required = True
self.fields['final_casing_stick_up'].required = True
def clean(self):
cleaned_data = super(WellCompletionForm, self).clean()
total_depth_drilled = cleaned_data.get('total_depth_drilled')
finished_well_depth = cleaned_data.get('finished_well_depth')
errors = []
if total_depth_drilled and finished_well_depth and total_depth_drilled < finished_well_depth:
errors.append('Finished Well Depth can\'t be greater than Total Depth Drilled.')
if len(errors) > 0:
raise forms.ValidationError(errors)
return cleaned_data
class Meta:
model = ActivitySubmission
fields = ['total_depth_drilled', 'finished_well_depth', 'final_casing_stick_up', 'bedrock_depth', 'static_water_level', 'well_yield', 'artestian_flow', 'artestian_pressure', 'well_cap_type', 'well_disinfected']
widgets = {'well_disinfected': forms.RadioSelect}
class ActivitySubmissionCommentForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self.helper = FormHelper()
self.helper.form_tag = False
self.helper.disable_csrf = True
self.helper.layout = Layout(
Fieldset(
'General Comments',
Div(
Div('comments', css_class='col-md-12'),
css_class='row',
),
Div(
Div('alternative_specs_submitted', css_class='col-md-12'),
css_class='row',
),
Div(
Div(HTML('<p style="font-style: italic;">Declaration: By submitting this well construction, alteration or decommission report, as the case may be, I declare that it has been done in accordance with the requirements of the Water Sustainability Act and the Groundwater Protection Regulation.</p>'), css_class='col-md-12'),
css_class='row',
),
)
)
super(ActivitySubmissionCommentForm, self).__init__(*args, **kwargs)
class Meta:
model = ActivitySubmission
fields = ['comments', 'alternative_specs_submitted']
widgets = {'comments': forms.Textarea}
#WellCompletionDataFormSet = inlineformset_factory(ActivitySubmission, WellCompletionData, max_num=1, can_delete=False)
ActivitySubmissionLithologyFormSet = inlineformset_factory(ActivitySubmission, LithologyDescription, form=LithologyForm, fk_name='activity_submission', can_delete=False, extra=10)
ActivitySubmissionCasingFormSet = inlineformset_factory(ActivitySubmission, Casing, form=CasingForm, fk_name='activity_submission', can_delete=False, extra=5)
ActivitySubmissionLinerPerforationFormSet = inlineformset_factory(ActivitySubmission, LinerPerforation, form=LinerPerforationForm, fk_name='activity_submission', can_delete=False, extra=5)
ActivitySubmissionScreenFormSet = inlineformset_factory(ActivitySubmission, Screen, form=ScreenForm, fk_name='activity_submission', can_delete=False, extra=5)
ProductionDataFormSet = inlineformset_factory(ActivitySubmission, ProductionData, form=ProductionDataForm, fk_name='activity_submission', can_delete=True, min_num=1, max_num=1)
| SethGreylyn/gwells | gwells/forms.py | Python | apache-2.0 | 53,558 | 0.003697 |
# This file is part of Invenio.
# Copyright (C) 2007, 2008, 2009, 2010, 2011, 2014 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""BibObject Module providing BibObject prividing features for documents containing text (not necessarily as the main part of the content)"""
import os
import re
from datetime import datetime
from invenio.config import CFG_BIBINDEX_PERFORM_OCR_ON_DOCNAMES
from invenio.legacy.bibdocfile.api import BibDoc, InvenioBibDocFileError
from invenio.legacy.dbquery import run_sql
from invenio.ext.logging import register_exception
_RE_PERFORM_OCR = re.compile(CFG_BIBINDEX_PERFORM_OCR_ON_DOCNAMES)
class BibTextDoc(BibDoc):
def get_text(self, version=None):
"""
@param version: the requested version. If not set, the latest version
will be used.
@type version: integer
@return: the textual content corresponding to the specified version
of the document.
@rtype: string
"""
if version is None:
version = self.get_latest_version()
if self.has_text(version):
return open(os.path.join(self.basedir, '.text;%i' % version)).read()
else:
return ""
def is_ocr_required(self):
"""
Return True if this document require OCR in order to extract text from it.
"""
for bibrec_link in self.bibrec_links:
if _RE_PERFORM_OCR.match(bibrec_link['docname']):
return True
return False
def get_text_path(self, version=None):
"""
@param version: the requested version. If not set, the latest version
will be used.
@type version: int
@return: the full path to the textual content corresponding to the specified version
of the document.
@rtype: string
"""
if version is None:
version = self.get_latest_version()
if self.has_text(version):
return os.path.join(self.basedir, '.text;%i' % version)
else:
return ""
def extract_text(self, version=None, perform_ocr=False, ln='en'):
"""
Try what is necessary to extract the textual information of a document.
@param version: the version of the document for which text is required.
If not specified the text will be retrieved from the last version.
@type version: integer
@param perform_ocr: whether to perform OCR.
@type perform_ocr: bool
@param ln: a two letter language code to give as a hint to the OCR
procedure.
@type ln: string
@raise InvenioBibDocFileError: in case of error.
@note: the text is extracted and cached for later use. Use L{get_text}
to retrieve it.
"""
raise RuntimeError("Text extraction is not implemented.")
def pdf_a_p(self):
"""
@return: True if this document contains a PDF in PDF/A format.
@rtype: bool"""
return self.has_flag('PDF/A', 'pdf')
def has_text(self, require_up_to_date=False, version=None):
"""
Return True if the text of this document has already been extracted.
@param require_up_to_date: if True check the text was actually
extracted after the most recent format of the given version.
@type require_up_to_date: bool
@param version: a version for which the text should have been
extracted. If not specified the latest version is considered.
@type version: integer
@return: True if the text has already been extracted.
@rtype: bool
"""
if version is None:
version = self.get_latest_version()
if os.path.exists(os.path.join(self.basedir, '.text;%i' % version)):
if not require_up_to_date:
return True
else:
docfiles = self.list_version_files(version)
text_md = datetime.fromtimestamp(os.path.getmtime(os.path.join(self.basedir, '.text;%i' % version)))
for docfile in docfiles:
if text_md <= docfile.md:
return False
return True
return False
def __repr__(self):
return 'BibTextDoc(%s, %s, %s)' % (repr(self.id), repr(self.doctype), repr(self.human_readable))
def supports(doctype, extensions):
return doctype == "Fulltext" or reduce(lambda x, y: x or y.startswith(".pdf") or y.startswith(".ps") , extensions, False)
def create_instance(docid=None, doctype='Main', human_readable=False, # pylint: disable=W0613
initial_data = None):
return BibTextDoc(docid=docid, human_readable=human_readable,
initial_data = initial_data)
| ludmilamarian/invenio | invenio/legacy/bibdocfile/plugins/bom_textdoc.py | Python | gpl-2.0 | 5,427 | 0.003317 |
from django.http.response import HttpResponse
import requests
def get_statuses(request):
url = request.GET.get('path', 'http://www.kikar.org/api/v1/facebook_status/?limit=5')
url = url.replace("'", "").replace('"', "")
print(url)
kikar_res = requests.get(url)
res = HttpResponse(content=kikar_res.content, content_type='application/json')
return res
| noamelf/Open-Knesset | kikar/views.py | Python | bsd-3-clause | 376 | 0.005319 |
from yos.rt import BaseTasklet
from yos.ipc import Catalog
class CatalogExample(BaseTasklet):
def on_startup(self):
Catalog.store('test1', 'test2', catname='test3')
Catalog.get('test1', self.on_read, catname='test3')
def on_read(self, val):
if val == 'test2':
print("Test passed")
else:
print("Test failed") | piotrmaslanka/systemy | examples/catalogExample.py | Python | mit | 382 | 0.010471 |
#!/usr/local/bin/python
# check python version
import sys
ver_info = sys.version_info
# parse commandlines
if ver_info[0] < 3 and ver_info[1] < 7:
from optparse import OptionParser
parser = OptionParser()
parser.add_option("-f", "--file", dest="filename", help="input log file", metavar="LOG_FILE")
# parser.add_option("-d", "--directory", dest="dirname", help="input directory with log files", metavar="LOG_DIR")
parser.add_option("-t", "--dbtype", dest="dbtype", help="database type", default="mongodb", metavar="DB_TYPE")
(options, args) = parser.parse_args();
else:
import argparse
parser = argparse.ArgumentParser(description="Log to database ingester")
parser.add_argument("-f, --file", dest="filename", help="input log file", metavar="LOG_FILE")
# parser.add_argument("-d, --directory", dest="dirname", help="input directory with log files", metavar="LOG_DIR")
parser.add_argument("-t, --dbtype", dest="dbtype", help="database type", default="mongodb", metavar="DB_TYPE")
options = parser.parse_args()
print "file {0} ".format(options.filename)
# print "dirname {0} ".format(options.dirname)
print "dbtype {0}".format(options.dbtype)
if options.dbtype == "mongodb":
from DBDriver.MongoDBDriver import MongoDBDriver
dbingester = MongoDBDriver();
elif options.dbtype == "cassandra":
from DBDriver.CassandraDBDriver import CassandraDBDriver
dbingester = CassandraDBDriver();
else:
print "ERROR: unsupported db type {0}".format(options.dbtype);
sys.exit(2);
import re
# open the file and iterate
with open(options.filename) as f:
# read the first line
line = f.readline()
if re.match("v2.1", line):
from LogParser.LogParsers import LogParserV2_1
lparser = LogParserV2_1(options.filename)
elif re.match("v2", line):
from LogParser.LogParsers import LogParserV2
lparser = LogParserV2_1(options.filename)
else:
print "UNSUPPORTED LOG VERSION: {0}".format(line)
sys.exit(1)
for line in f:
lparser.parseLine(line, dbingester)
| EmoryUniversity/PIAT | src/common/log-analysis/python-discard/LogDBIngester.py | Python | lgpl-3.0 | 2,170 | 0.007834 |
from multiprocessing import Process
import pytest
from .mock_server.server import start
server_process = None
@pytest.hookimpl
def pytest_sessionstart(session):
global server_process
server_process = Process(target=start)
server_process.start()
@pytest.hookimpl
def pytest_sessionfinish(session):
if server_process is not None:
server_process.terminate()
server_process.join()
| pirate/bookmark-archiver | tests/conftest.py | Python | mit | 417 | 0.009592 |
from __future__ import print_function, division
import numpy as np
from astropy import units as u
from astropy.table import Table
from ..utils.validator import validate_array
__all__ = ['Extinction']
class Extinction(object):
def __init__(self):
self.wav = None
self.chi = None
@property
def wav(self):
return self._wav
@wav.setter
def wav(self, value):
if value is None:
self._wav = None
else:
self._wav = validate_array('wav', value, ndim=1,
shape=None if self.chi is None else self.chi.shape,
physical_type='length')
@property
def chi(self):
return self._chi
@chi.setter
def chi(self, value):
if value is None:
self._chi = None
else:
self._chi = validate_array('chi', value, ndim=1,
shape=None if self.wav is None else self.wav.shape,
physical_type='area per unit mass')
@classmethod
def from_file(cls, filename, columns=(0, 1),
wav_unit=u.micron, chi_unit=u.cm ** 2 / u.g):
"""
Read an extinction law from an ASCII file.
This reads in two columns: the wavelength, and the opacity (in units
of area per unit mass).
Parameters
----------
filename : str, optional
The name of the file to read the extinction law from
columns : tuple or list, optional
The columns to use for the wavelength and opacity respectively
wav_unit : :class:`~astropy.units.core.Unit`
The units to assume for the wavelength
chi_unit : :class:`~astropy.units.core.Unit`
The units to assume for the opacity
"""
self = cls()
f = np.loadtxt(filename, dtype=[('wav', float), ('chi', float)],
usecols=columns)
self.wav = f['wav'] * wav_unit
self.chi = f['chi'] * chi_unit
return self
def get_av(self, wav):
"""
Interpolate the Av at given wavelengths
Parameters
----------
wav : :class:`~astropy.units.quantity.Quantity`
The wavelengths at which to interpolate the visual extinction.
"""
if isinstance(wav, u.Quantity) and wav.unit.is_equivalent(u.m):
return (-0.4 * np.interp(wav.to(self.wav.unit), self.wav, self.chi, left=0., right=0.)
/ np.interp(([0.55] * u.micron).to(self.wav.unit), self.wav, self.chi))
else:
raise TypeError("wav should be given as a Quantity object with units of length")
@classmethod
def from_table(cls, table):
self = cls()
self.wav = table['wav'].data * table['wav'].unit
self.chi = table['chi'].data * table['chi'].unit
return self
def to_table(self):
t = Table()
t['wav'] = self.wav
t['chi'] = self.chi
return t
def __getstate__(self):
return {
'wav': self.wav,
'chi': self.chi,
}
def __setstate__(self, d):
self.__init__()
self.wav = d['wav']
self.chi = d['chi']
| astrofrog/sedfitter | sedfitter/extinction/extinction.py | Python | bsd-2-clause | 3,289 | 0.00152 |
from sys import argv
from xml.dom import minidom
import csv
stem = argv[1][:-4] if argv[1].endswith('.xml') else argv[1]
xmldoc = minidom.parse('%s.xml'%stem)
labellist = xmldoc.getElementsByTagName('label')
labels = [l.attributes['name'].value for l in labellist]
labelset = set(labels)
for split in 'train','test':
with open('%s-%s.csv'%(stem,split), 'rb') as csvfile:
reader = csv.DictReader(csvfile)
features = [f for f in reader.fieldnames if f not in labelset]
x = open('%s-%s.x.txt'%(stem,split), 'w')
y = open('%s-%s.y.txt'%(stem,split), 'w')
for row in reader:
xbuf = ' '.join([row[f] for f in features])
ybuf = ' '.join([row[l] for l in labels])
x.write('%s\n'%xbuf)
y.write("%s\n"%ybuf)
x.close()
y.close()
| pmineiro/randembed | mulan/xmlcsv2xy.py | Python | unlicense | 829 | 0.014475 |
#!/usr/bin/python
# Copyright (c) 2013 The Bitcoin Core developers
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
import json
from urllib import urlopen
import requests
import getpass
from string import Template
import sys
import os
import subprocess
class RunError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
def run(command, **kwargs):
fail_hard = kwargs.pop("fail_hard", True)
# output to /dev/null by default:
kwargs.setdefault("stdout", open('/dev/null', 'w'))
kwargs.setdefault("stderr", open('/dev/null', 'w'))
command = Template(command).substitute(os.environ)
if "TRACE" in os.environ:
if 'cwd' in kwargs:
print("[cwd=%s] %s"%(kwargs['cwd'], command))
else: print(command)
try:
process = subprocess.Popen(command.split(' '), **kwargs)
process.wait()
except KeyboardInterrupt:
process.terminate()
raise
if process.returncode != 0 and fail_hard:
raise RunError("Failed: "+command)
return process.returncode
def checkout_pull(clone_url, commit, out):
# Init
build_dir=os.environ["BUILD_DIR"]
run("umount ${CHROOT_COPY}/proc", fail_hard=False)
run("rsync --delete -apv ${CHROOT_MASTER}/ ${CHROOT_COPY}")
run("rm -rf ${CHROOT_COPY}${SCRIPTS_DIR}")
run("cp -a ${SCRIPTS_DIR} ${CHROOT_COPY}${SCRIPTS_DIR}")
# Merge onto upstream/master
run("rm -rf ${BUILD_DIR}")
run("mkdir -p ${BUILD_DIR}")
run("git clone ${CLONE_URL} ${BUILD_DIR}")
run("git remote add pull "+clone_url, cwd=build_dir, stdout=out, stderr=out)
run("git fetch pull", cwd=build_dir, stdout=out, stderr=out)
if run("git merge "+ commit, fail_hard=False, cwd=build_dir, stdout=out, stderr=out) != 0:
return False
run("chown -R ${BUILD_USER}:${BUILD_GROUP} ${BUILD_DIR}", stdout=out, stderr=out)
run("mount --bind /proc ${CHROOT_COPY}/proc")
return True
def commentOn(commentUrl, success, inMerge, needTests, linkUrl):
common_message = """
This test script verifies pulls every time they are updated. It, however, dies sometimes and fails to test properly. If you are waiting on a test, please check timestamps to verify that the test.log is moving at http://jenkins.bluematt.me/pull-tester/current/
Contact BlueMatt on freenode if something looks broken."""
# Remove old BitcoinPullTester comments (I'm being lazy and not paginating here)
recentcomments = requests.get(commentUrl+"?sort=created&direction=desc",
auth=(os.environ['GITHUB_USER'], os.environ["GITHUB_AUTH_TOKEN"])).json
for comment in recentcomments:
if comment["user"]["login"] == os.environ["GITHUB_USER"] and common_message in comment["body"]:
requests.delete(comment["url"],
auth=(os.environ['GITHUB_USER'], os.environ["GITHUB_AUTH_TOKEN"]))
if success == True:
if needTests:
message = "Automatic sanity-testing: PLEASE ADD TEST-CASES, though technically passed. See " + linkUrl + " for binaries and test log."
else:
message = "Automatic sanity-testing: PASSED, see " + linkUrl + " for binaries and test log."
post_data = { "body" : message + common_message}
elif inMerge:
post_data = { "body" : "Automatic sanity-testing: FAILED MERGE, see " + linkUrl + " for test log." + """
This pull does not merge cleanly onto current master""" + common_message}
else:
post_data = { "body" : "Automatic sanity-testing: FAILED BUILD/TEST, see " + linkUrl + " for binaries and test log." + """
This could happen for one of several reasons:
1. It chanages changes build scripts in a way that made them incompatible with the automated testing scripts (please tweak those patches in qa/pull-tester)
2. It adds/modifies tests which test network rules (thanks for doing that), which conflicts with a patch applied at test time
3. It does not build on either Linux i386 or Win32 (via MinGW cross compile)
4. The test suite fails on either Linux i386 or Win32
5. The block test-cases failed (lookup the first bNN identifier which failed in https://github.com/TheBlueMatt/test-scripts/blob/master/FullBlockTestGenerator.java)
If you believe this to be in error, please ping BlueMatt on freenode or TheBlueMatt here.
""" + common_message}
resp = requests.post(commentUrl, json.dumps(post_data), auth=(os.environ['GITHUB_USER'], os.environ["GITHUB_AUTH_TOKEN"]))
def testpull(number, comment_url, clone_url, commit):
print("Testing pull %d: %s : %s"%(number, clone_url,commit))
dir = os.environ["RESULTS_DIR"] + "/" + commit + "/"
print(" ouput to %s"%dir)
if os.path.exists(dir):
os.system("rm -r " + dir)
os.makedirs(dir)
currentdir = os.environ["RESULTS_DIR"] + "/current"
os.system("rm -r "+currentdir)
os.system("ln -s " + dir + " " + currentdir)
out = open(dir + "test.log", 'w+')
resultsurl = os.environ["RESULTS_URL"] + commit
checkedout = checkout_pull(clone_url, commit, out)
if checkedout != True:
print("Failed to test pull - sending comment to: " + comment_url)
commentOn(comment_url, False, True, False, resultsurl)
open(os.environ["TESTED_DB"], "a").write(commit + "\n")
return
run("rm -rf ${CHROOT_COPY}/${OUT_DIR}", fail_hard=False);
run("mkdir -p ${CHROOT_COPY}/${OUT_DIR}", fail_hard=False);
run("chown -R ${BUILD_USER}:${BUILD_GROUP} ${CHROOT_COPY}/${OUT_DIR}", fail_hard=False)
script = os.environ["BUILD_PATH"]+"/qa/pull-tester/pull-tester.sh"
script += " ${BUILD_PATH} ${MINGW_DEPS_DIR} ${SCRIPTS_DIR}/CreditsdComparisonTool_jar/CreditsdComparisonTool.jar 0 6 ${OUT_DIR}"
returncode = run("chroot ${CHROOT_COPY} sudo -u ${BUILD_USER} -H timeout ${TEST_TIMEOUT} "+script,
fail_hard=False, stdout=out, stderr=out)
run("mv ${CHROOT_COPY}/${OUT_DIR} " + dir)
run("mv ${BUILD_DIR} " + dir)
if returncode == 42:
print("Successfully tested pull (needs tests) - sending comment to: " + comment_url)
commentOn(comment_url, True, False, True, resultsurl)
elif returncode != 0:
print("Failed to test pull - sending comment to: " + comment_url)
commentOn(comment_url, False, False, False, resultsurl)
else:
print("Successfully tested pull - sending comment to: " + comment_url)
commentOn(comment_url, True, False, False, resultsurl)
open(os.environ["TESTED_DB"], "a").write(commit + "\n")
def environ_default(setting, value):
if not setting in os.environ:
os.environ[setting] = value
if getpass.getuser() != "root":
print("Run me as root!")
sys.exit(1)
if "GITHUB_USER" not in os.environ or "GITHUB_AUTH_TOKEN" not in os.environ:
print("GITHUB_USER and/or GITHUB_AUTH_TOKEN environment variables not set")
sys.exit(1)
environ_default("CLONE_URL", "https://github.com/bitcoin/bitcoin.git")
environ_default("MINGW_DEPS_DIR", "/mnt/w32deps")
environ_default("SCRIPTS_DIR", "/mnt/test-scripts")
environ_default("CHROOT_COPY", "/mnt/chroot-tmp")
environ_default("CHROOT_MASTER", "/mnt/chroot")
environ_default("OUT_DIR", "/mnt/out")
environ_default("BUILD_PATH", "/mnt/bitcoin")
os.environ["BUILD_DIR"] = os.environ["CHROOT_COPY"] + os.environ["BUILD_PATH"]
environ_default("RESULTS_DIR", "/mnt/www/pull-tester")
environ_default("RESULTS_URL", "http://jenkins.bluematt.me/pull-tester/")
environ_default("GITHUB_REPO", "bitcoin/bitcoin")
environ_default("TESTED_DB", "/mnt/commits-tested.txt")
environ_default("BUILD_USER", "matt")
environ_default("BUILD_GROUP", "matt")
environ_default("TEST_TIMEOUT", str(60*60*2))
print("Optional usage: pull-tester.py 2112")
f = open(os.environ["TESTED_DB"])
tested = set( line.rstrip() for line in f.readlines() )
f.close()
if len(sys.argv) > 1:
pull = requests.get("https://api.github.com/repos/"+os.environ["GITHUB_REPO"]+"/pulls/"+sys.argv[1],
auth=(os.environ['GITHUB_USER'], os.environ["GITHUB_AUTH_TOKEN"])).json
testpull(pull["number"], pull["_links"]["comments"]["href"],
pull["head"]["repo"]["clone_url"], pull["head"]["sha"])
else:
for page in range(1,100):
result = requests.get("https://api.github.com/repos/"+os.environ["GITHUB_REPO"]+"/pulls?state=open&page=%d"%(page,),
auth=(os.environ['GITHUB_USER'], os.environ["GITHUB_AUTH_TOKEN"])).json
if len(result) == 0: break;
for pull in result:
if pull["head"]["sha"] in tested:
print("Pull %d already tested"%(pull["number"],))
continue
testpull(pull["number"], pull["_links"]["comments"]["href"],
pull["head"]["repo"]["clone_url"], pull["head"]["sha"])
| credits-currency/credits | qa/pull-tester/pull-tester.py | Python | mit | 8,944 | 0.007044 |
import fudge
from oedipus import S
from oedipus.tests import no_results, Biscuit, BaseSphinxMeta
import sphinxapi
class BiscuitWithGroupBy(object):
"""Biscuit with default groupby"""
class SphinxMeta(BaseSphinxMeta):
group_by = ('a', '@group')
@fudge.patch('sphinxapi.SphinxClient')
def test_group_by(sphinx_client):
"""Test group by."""
(sphinx_client.expects_call().returns_fake()
.is_a_stub()
.expects('SetGroupBy')
.with_args('a', sphinxapi.SPH_GROUPBY_ATTR, '@group DESC')
.expects('RunQueries')
.returns(no_results))
S(Biscuit).group_by('a', '-@group')._raw()
@fudge.patch('sphinxapi.SphinxClient')
def test_group_by_asc(sphinx_client):
"""Test group by ascending."""
(sphinx_client.expects_call().returns_fake()
.is_a_stub()
.expects('SetGroupBy')
.with_args('a', sphinxapi.SPH_GROUPBY_ATTR, '@group ASC')
.expects('RunQueries')
.returns(no_results))
S(Biscuit).group_by('a', '@group')._raw()
@fudge.patch('sphinxapi.SphinxClient')
def test_group_by_override(sphinx_client):
"""Test group by override."""
(sphinx_client.expects_call().returns_fake()
.is_a_stub()
.expects('SetGroupBy')
.with_args('a', sphinxapi.SPH_GROUPBY_ATTR, '@group ASC')
.expects('RunQueries')
.returns(no_results))
# The second call overrides the first one.
S(Biscuit).group_by('b', '-@group').group_by('a', '@group')._raw()
@fudge.patch('sphinxapi.SphinxClient')
def test_group_by_multiple_bits(sphinx_client):
"""Test group by with multiple bits."""
(sphinx_client.expects_call().returns_fake()
.is_a_stub()
.expects('SetGroupBy')
.with_args('a', sphinxapi.SPH_GROUPBY_ATTR, '@relevance DESC, age ASC')
.expects('RunQueries')
.returns(no_results))
S(Biscuit).group_by('a', ('-@relevance', 'age'))._raw()
@fudge.patch('sphinxapi.SphinxClient')
def test_group_by_sphinxmeta(sphinx_client):
"""Test group by from SphinxMeta."""
(sphinx_client.expects_call().returns_fake()
.is_a_stub()
.expects('SetGroupBy')
.with_args('a', sphinxapi.SPH_GROUPBY_ATTR, '@group ASC')
.expects('RunQueries')
.returns(no_results))
S(BiscuitWithGroupBy)._raw()
| erikrose/oedipus | oedipus/tests/test_groupby.py | Python | bsd-3-clause | 2,566 | 0.00039 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import urllib.parse
from openstack import exceptions
from openstack import resource
class Resource(resource.Resource):
@classmethod
def find(cls, session, name_or_id, ignore_missing=True, **params):
"""Find a resource by its name or id.
:param session: The session to use for making this request.
:type session: :class:`~keystoneauth1.adapter.Adapter`
:param name_or_id: This resource's identifier, if needed by
the request. The default is ``None``.
:param bool ignore_missing: When set to ``False``
:class:`~openstack.exceptions.ResourceNotFound` will be
raised when the resource does not exist.
When set to ``True``, None will be returned when
attempting to find a nonexistent resource.
:param dict params: Any additional parameters to be passed into
underlying methods, such as to
:meth:`~openstack.resource.Resource.existing`
in order to pass on URI parameters.
:return: The :class:`Resource` object matching the given name or id
or None if nothing matches.
:raises: :class:`openstack.exceptions.DuplicateResource` if more
than one resource is found for this request.
:raises: :class:`openstack.exceptions.ResourceNotFound` if nothing
is found and ignore_missing is ``False``.
"""
session = cls._get_session(session)
# Try to short-circuit by looking directly for a matching ID.
try:
match = cls.existing(
id=name_or_id,
connection=session._get_connection(),
**params)
return match.fetch(session)
except exceptions.SDKException:
# DNS may return 400 when we try to do GET with name
pass
if ('name' in cls._query_mapping._mapping.keys()
and 'name' not in params):
params['name'] = name_or_id
data = cls.list(session, **params)
result = cls._get_one_match(name_or_id, data)
if result is not None:
return result
if ignore_missing:
return None
raise exceptions.ResourceNotFound(
"No %s found for %s" % (cls.__name__, name_or_id))
@classmethod
def _get_next_link(cls, uri, response, data, marker, limit, total_yielded):
next_link = None
params = {}
if isinstance(data, dict):
links = data.get('links')
if links:
next_link = links.get('next')
total = data.get('metadata', {}).get('total_count')
if total:
# We have a kill switch
total_count = int(total)
if total_count <= total_yielded:
return None, params
# Parse params from Link (next page URL) into params.
# This prevents duplication of query parameters that with large
# number of pages result in HTTP 414 error eventually.
if next_link:
parts = urllib.parse.urlparse(next_link)
query_params = urllib.parse.parse_qs(parts.query)
params.update(query_params)
next_link = urllib.parse.urljoin(next_link, parts.path)
# If we still have no link, and limit was given and is non-zero,
# and the number of records yielded equals the limit, then the user
# is playing pagination ball so we should go ahead and try once more.
if not next_link and limit:
next_link = uri
params['marker'] = marker
params['limit'] = limit
return next_link, params
| stackforge/python-openstacksdk | openstack/dns/v2/_base.py | Python | apache-2.0 | 4,338 | 0 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2019-07-31 15:53
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('sync', '0003_synctarget_lastrun'),
]
operations = [
migrations.RenameModel(
old_name='SyncTarget',
new_name='ImportSource',
),
]
| tdfischer/organizer | sync/migrations/0004_auto_20190731_1553.py | Python | agpl-3.0 | 403 | 0 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
#Adds the virtual environment's executable path to json file
import json,sys
import os.path
jsonPath = sys.argv[1]
key = sys.argv[2]
if os.path.isfile(jsonPath):
with open(jsonPath, 'r') as read_file:
data = json.load(read_file)
else:
directory = os.path.dirname(jsonPath)
if not os.path.exists(directory):
os.makedirs(directory)
with open(jsonPath, 'w+') as read_file:
data = {}
data = {}
with open(jsonPath, 'w') as outfile:
if key == 'condaExecPath':
data[key] = sys.argv[3]
else:
data[key] = sys.executable
json.dump(data, outfile, sort_keys=True, indent=4)
| DonJayamanne/pythonVSCode | build/ci/addEnvPath.py | Python | mit | 739 | 0.00406 |
#!/usr/bin/env python
# Copyright (C) 2009-2015 Johannes Dewender
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""This is a tool to submit ISRCs from a disc to MusicBrainz.
Various backends are used to gather the ISRCs
and python-musicbrainz2 to submit them.
The project is hosted on
https://github.com/JonnyJD/musicbrainz-isrcsubmit
"""
__version__ = "2.1.0"
AGENT_NAME = "isrcsubmit.py"
DEFAULT_SERVER = "musicbrainz.org"
# starting with highest priority
BACKENDS = ["mediatools", "media_info", "cdrdao", "libdiscid", "discisrc"]
BROWSERS = ["xdg-open", "x-www-browser",
"firefox", "chromium", "chrome", "opera"]
# The webbrowser module is used when nothing is found in this list.
# This especially happens on Windows and Mac OS X (browser mostly not in PATH)
import os
import re
import sys
import codecs
import logging
import getpass
import tempfile
import webbrowser
from datetime import datetime
from optparse import OptionParser
from subprocess import Popen, PIPE, call
try:
import discid
from discid import DiscError
except ImportError:
try:
from libdiscid.compat import discid
from libdiscid.compat.discid import DiscError
except ImportError:
# When both are not available, raise exception for python-discid
import discid
import musicbrainzngs
from musicbrainzngs import AuthenticationError, ResponseError, WebServiceError
try:
import keyring
except ImportError:
keyring = None
try:
from configparser import ConfigParser
except ImportError:
from ConfigParser import ConfigParser
if os.name == "nt":
SHELLNAME = "isrcsubmit.bat"
else:
SHELLNAME = "isrcsubmit.sh"
if os.path.isfile(SHELLNAME):
SCRIPTNAME = SHELLNAME
else:
SCRIPTNAME = os.path.basename(sys.argv[0])
# make code run on Python 2 and 3
try:
user_input = raw_input
except NameError:
user_input = input
try:
unicode_string = unicode
except NameError:
unicode_string = str
# global variables
options = None
ws2 = None
logger = logging.getLogger("isrcsubmit")
def script_version():
return "isrcsubmit %s by JonnyJD for MusicBrainz" % __version__
def print_help(option=None, opt=None, value=None, parser=None):
print("%s" % script_version())
print(\
"""
This python script extracts ISRCs from audio cds and submits them to MusicBrainz (musicbrainz.org).
You need to have a MusicBrainz account, specify the username and will be asked for your password every time you execute the script.
Isrcsubmit will warn you if there are any problems and won't actually submit anything to MusicBrainz without giving a final choice.
Isrcsubmit will warn you if any duplicate ISRCs are detected and help you fix priviously inserted duplicate ISRCs.
The ISRC-track relationship we found on our disc is taken as our correct evaluation.
""")
parser.print_usage()
print("""\
Please report bugs on https://github.com/JonnyJD/musicbrainz-isrcsubmit""")
sys.exit(0)
def print_usage(option=None, opt=None, value=None, parser=None):
print("%s\n" % script_version())
parser.print_help()
sys.exit(0)
class Isrc(object):
def __init__(self, isrc, track=None):
self._id = isrc
self._tracks = []
if track is not None:
self._tracks.append(track)
def add_track(self, track):
if track not in self._tracks:
self._tracks.append(track)
def get_tracks(self):
return self._tracks
def get_track_numbers(self):
numbers = []
for track in self._tracks:
numbers.append(track["position"])
return ", ".join(numbers)
class Track(dict):
"""track with equality checking
This makes it easy to check if this track is already in a collection.
Only the element already in the collection needs to be hashable.
"""
def __init__(self, track, number=None):
self._track = track
self._recording = track["recording"]
self._number = number
# check that we found the track with the correct number
assert(int(self._track["position"]) == self._number)
def __eq__(self, other):
return self["id"] == other["id"]
def __getitem__(self, item):
try:
return self._recording[item]
except KeyError:
return self._track[item]
def get(self, item, default=None):
try:
return self._recording.get(item, default)
except KeyError:
return self._track.get(item, default)
class OwnTrack(Track):
"""A track found on an analyzed (own) disc"""
pass
def get_config_home():
"""Returns the base directory for isrcsubmit's configuration files."""
if os.name == "nt":
default_location = os.environ.get("APPDATA")
else:
default_location = os.path.expanduser("~/.config")
xdg_config_home = os.environ.get("XDG_CONFIG_HOME", default_location)
return os.path.join(xdg_config_home, "isrcsubmit")
def config_path():
"""Returns isrsubmit's config file location."""
return os.path.join(get_config_home(), "config")
def gather_options(argv):
global options
if sys.platform == "darwin":
# That is the device drutil expects and stable
# /dev/rdisk1 etc. change with multiple hard disks, dmgs mounted etc.
# libdiscid < 0.6.0 can't handle drive numbers
default_device = "1"
else:
default_device = discid.get_default_device()
config = ConfigParser()
config.read(config_path())
parser = OptionParser(version=script_version(), add_help_option=False)
parser.set_usage(
"{prog} [options] [user] [device]\n {prog} -h".format(
prog=SCRIPTNAME))
parser.add_option("-h", action="callback", callback=print_usage,
help="Short usage help")
parser.add_option("--help", action="callback", callback=print_help,
help="Complete help for the script")
parser.add_option("-u", "--user", metavar="USERNAME",
help="MusicBrainz username, if not given as argument.")
# note that -d previously stand for debug
parser.add_option("-d", "--device", metavar="DEVICE",
help="CD device with a loaded audio cd, if not given as argument."
+ " The default is %s." % default_device)
parser.add_option("--release-id", metavar="RELEASE_ID",
help="Optional MusicBrainz ID of the release."
+ " This will be gathered if not given.")
parser.add_option("-b", "--backend", choices=BACKENDS, metavar="PROGRAM",
help="Force using a specific backend to extract ISRCs from the"
+ " disc. Possible backends are: %s." % ", ".join(BACKENDS)
+ " They are tried in this order otherwise.")
parser.add_option("--browser", metavar="BROWSER",
help="Program to open URLs. This will be automatically detected"
" for most setups, if not chosen manually.")
parser.add_option("--force-submit", action="store_true", default=False,
help="Always open TOC/disc ID in browser.")
parser.add_option("--server", metavar="SERVER",
help="Server to send ISRCs to. Default: %s" % DEFAULT_SERVER)
parser.add_option("--debug", action="store_true", default=False,
help="Show debug messages."
+ " Currently shows some backend messages.")
parser.add_option("--keyring", action="store_true", dest="keyring",
help="Use keyring if available.")
parser.add_option("--no-keyring", action="store_false", dest="keyring",
help="Disable keyring.")
(options, args) = parser.parse_args(argv[1:])
print("%s" % script_version())
# assign positional arguments to options
if options.user is None and args:
options.user = args[0]
args = args[1:]
if options.device is None and args:
options.device = args[0]
args = args[1:]
if args:
logger.warning("Superfluous arguments: %s", ", ".join(args))
# If an option is set in the config and not overriden on the command line,
# assign them to options.
if options.keyring is None and config.has_option("general", "keyring"):
options.keyring = config.getboolean("general", "keyring")
if options.backend is None and config.has_option("general", "backend"):
options.backend = config.get("general", "backend")
if options.backend not in BACKENDS:
print_error("Backend given in config file is not a valid choice.",
"Choose a backend from %s" % ", ".join(BACKENDS))
sys.exit(-1)
if options.browser is None and config.has_option("general", "browser"):
options.browser = config.get("general", "browser")
if options.device is None and config.has_option("general", "device"):
options.device = config.get("general", "device")
if options.server is None and config.has_option("musicbrainz", "server"):
options.server = config.get("musicbrainz", "server")
if options.user is None and config.has_option("musicbrainz", "user"):
options.user = config.get("musicbrainz", "user")
# assign remaining options automatically
if options.device is None:
options.device = default_device
options.sane_which = test_which()
if options.browser is None:
options.browser = find_browser()
if options.server is None:
options.server = DEFAULT_SERVER
if options.keyring is None:
options.keyring = True
if options.backend and not has_program(options.backend, strict=True):
print_error("Chosen backend not found. No ISRC extraction possible!",
"Make sure that %s is installed." % options.backend)
sys.exit(-1)
elif not options.backend:
options.backend = find_backend()
return options
def test_which():
"""There are some old/buggy "which" versions on Windows.
We want to know if the user has a "sane" which we can trust.
Unxutils has a broken 2.4 version. Which >= 2.16 should be fine.
"""
with open(os.devnull, "w") as devnull:
try:
# "which" should at least find itself
return_code = call(["which", "which"],
stdout=devnull, stderr=devnull)
except OSError:
return False # no which at all
else:
if (return_code == 0):
return True
else:
print('warning: your version of the tool "which"'
' is buggy/outdated')
if os.name == "nt":
print(' unxutils is old/broken, GnuWin32 is good.')
return False
def get_prog_version(prog):
if prog == "libdiscid":
version = discid.LIBDISCID_VERSION_STRING
elif prog == "cdrdao":
outdata = Popen([prog], stderr=PIPE).communicate()[1]
version = b" ".join(outdata.splitlines()[0].split()[::2][0:2])
else:
version = prog
return decode(version)
def has_program(program, strict=False):
"""When the backend is only a symlink to another backend,
we will return False, unless we strictly want to use this backend.
"""
if program == "libdiscid":
return "isrc" in discid.FEATURES
with open(os.devnull, "w") as devnull:
if options.sane_which:
p_which = Popen(["which", program], stdout=PIPE, stderr=devnull)
program_path = p_which.communicate()[0].strip()
if p_which.returncode == 0:
# check if it is only a symlink to another backend
real_program = os.path.basename(os.path.realpath(program_path))
if program != real_program and (
real_program in BACKENDS or real_program in BROWSERS):
if strict:
print("WARNING: %s is a symlink to %s"
% (program, real_program))
return True
else:
return False # use real program (target) instead
return True
else:
return False
elif program in BACKENDS:
try:
# we just try to start these non-interactive console apps
call([program], stdout=devnull, stderr=devnull)
except OSError:
return False
else:
return True
else:
return False
def find_backend():
"""search for an available backend
"""
backend = None
for prog in BACKENDS:
if has_program(prog):
backend = prog
break
if backend is None:
print_error("Cannot find a backend to extract the ISRCS!",
"Isrcsubmit can work with one of the following:",
" " + ", ".join(BACKENDS))
sys.exit(-1)
return backend
def find_browser():
"""search for an available browser
"""
for browser in BROWSERS:
if has_program(browser):
return browser
# This will use the webbrowser module to find a default
return None
def open_browser(url, exit=False, submit=False):
"""open url in the selected browser, default if none
"""
if options.browser:
if exit:
try:
if os.name == "nt":
# silly but necessary for spaces in the path
os.execlp(options.browser, '"' + options.browser + '"', url)
else:
# linux/unix works fine with spaces
os.execlp(options.browser, options.browser, url)
except OSError as err:
error = ["Couldn't open the url in %s: %s"
% (options.browser, str(err))]
if submit:
error.append("Please submit via: %s" % url)
print_error(*error)
sys.exit(1)
else:
try:
if options.debug:
Popen([options.browser, url])
else:
with open(os.devnull, "w") as devnull:
Popen([options.browser, url], stdout=devnull)
except OSError as err:
error = ["Couldn't open the url in %s: %s"
% (options.browser, str(err))]
if submit:
error.append("Please submit via: %s" % url)
print_error(*error)
else:
try:
if options.debug:
webbrowser.open(url)
else:
# this supresses stdout
webbrowser.get().open(url)
except webbrowser.Error as err:
error = ["Couldn't open the url: %s" % str(err)]
if submit:
error.append("Please submit via: %s" % url)
print_error(*error)
if exit:
sys.exit(1)
def get_real_mac_device(option_device):
"""drutil takes numbers as drives.
We ask drutil what device name corresponds to that drive
in order so we can use it as a drive for libdiscid
"""
proc = Popen(["drutil", "status", "-drive", option_device], stdout=PIPE)
try:
given = proc.communicate()[0].splitlines()[3].split("Name:")[1].strip()
except IndexError:
print_error("could not find real device",
"maybe there is no disc in the drive?")
sys.exit(-1)
# libdiscid needs the "raw" version
return given.replace("/disk", "/rdisk")
def cp65001(name):
"""This might be buggy, but better than just a LookupError
"""
if name.lower() == "cp65001":
return codecs.lookup("utf-8")
codecs.register(cp65001)
def printf(format_string, *args):
"""Print with the % and without additional spaces or newlines
"""
if not args:
# make it convenient to use without args -> different to C
args = (format_string, )
format_string = "%s"
sys.stdout.write(format_string % args)
def decode(msg):
"""This will replace unsuitable characters and use stdin encoding
"""
if isinstance(msg, bytes):
return msg.decode(sys.stdin.encoding, "replace")
else:
return unicode_string(msg)
def encode(msg):
"""This will replace unsuitable characters and use stdout encoding
"""
if isinstance(msg, unicode_string):
return msg.encode(sys.stdout.encoding, "replace")
else:
return bytes(msg)
def print_encoded(*args):
"""This will replace unsuitable characters and doesn't append a newline
"""
stringArgs = ()
for arg in args:
stringArgs += encode(arg),
msg = b" ".join(stringArgs)
if not msg.endswith(b"\n"):
msg += b" "
if os.name == "nt":
os.write(sys.stdout.fileno(), msg)
else:
try:
sys.stdout.buffer.write(msg)
except AttributeError:
sys.stdout.write(msg)
def print_release(release, position=None):
"""Print information about a release.
If the position is given, this should be an entry
in a list of releases (choice)
"""
country = (release.get("country") or "").ljust(2)
date = (release.get("date") or "").ljust(10)
barcode = (release.get("barcode") or "").rjust(13)
label_list = release["label-info-list"]
catnumber_list = []
for label in label_list:
cat_number = label.get("catalog-number")
if cat_number:
catnumber_list.append(cat_number)
catnumbers = ", ".join(catnumber_list)
if position is None:
print_encoded("Artist:\t\t%s\n" % release["artist-credit-phrase"])
print_encoded("Release:\t%s" % release["title"])
else:
print_encoded("%#2d:" % position)
print_encoded("%s - %s" % (
release["artist-credit-phrase"], release["title"]))
if release.get("status"):
print("(%s)" % release["status"])
else:
print("")
if position is None:
print_encoded("Release Event:\t%s\t%s\n" % (date, country))
print_encoded("Barcode:\t%s\n" % release.get("barcode") or "")
print_encoded("Catalog No.:\t%s\n" % catnumbers)
print_encoded("MusicBrainz ID:\t%s\n" % release["id"])
else:
print_encoded("\t%s\t%s\t%s\t%s\n" % (
country, date, barcode, catnumbers))
def print_error(*args):
string_args = tuple([str(arg) for arg in args])
logger.error("\n ".join(string_args))
def backend_error(err):
print_error("Couldn't gather ISRCs with %s: %i - %s"
% (options.backend, err.errno, err.strerror))
sys.exit(1)
def ask_for_submission(url, print_url=False):
if options.force_submit:
submit_requested = True
else:
printf("Would you like to open the browser to submit the disc?")
submit_requested = user_input(" [y/N] ").lower() == "y"
if submit_requested:
open_browser(url, exit=True, submit=True)
elif print_url:
print("Please submit the Disc ID with this url:")
print(url)
class WebService2():
"""A web service wrapper that asks for a password when first needed.
This uses musicbrainzngs as a wrapper itself.
"""
def __init__(self, username=None):
self.auth = False
self.keyring_failed = False
self.username = username
musicbrainzngs.set_hostname(options.server)
musicbrainzngs.set_useragent(AGENT_NAME, __version__,
"http://github.com/JonnyJD/musicbrainz-isrcsubmit")
def authenticate(self):
"""Sets the password if not set already
"""
if not self.auth:
print("")
if self.username is None:
printf("Please input your MusicBrainz username (empty=abort): ")
self.username = user_input()
if len(self.username) == 0:
print("(aborted)")
sys.exit(1)
password = None
if keyring is not None and options.keyring and not self.keyring_failed:
password = keyring.get_password(options.server, self.username)
if password is None:
password = getpass.getpass(
"Please input your MusicBrainz password: ")
print("")
musicbrainzngs.auth(self.username, password)
self.auth = True
self.keyring_failed = False
if keyring is not None and options.keyring:
keyring.set_password(options.server, self.username, password)
def get_releases_by_discid(self, disc_id, includes=[]):
try:
response = musicbrainzngs.get_releases_by_discid(disc_id,
includes=includes)
except ResponseError as err:
if err.cause.code == 404:
return []
else:
print_error("Couldn't fetch release: %s" % err)
sys.exit(1)
except WebServiceError as err:
print_error("Couldn't fetch release: %s" % err)
sys.exit(1)
else:
if response.get("disc"):
return response["disc"]["release-list"]
else:
return []
def get_release_by_id(self, release_id, includes=[]):
try:
return musicbrainzngs.get_release_by_id(release_id,
includes=includes)
except WebServiceError as err:
print_error("Couldn't fetch release: %s" % err)
sys.exit(1)
def submit_isrcs(self, tracks2isrcs):
logger.info("tracks2isrcs: %s", tracks2isrcs)
while True:
try:
self.authenticate()
musicbrainzngs.submit_isrcs(tracks2isrcs)
except AuthenticationError as err:
print_error("Invalid credentials: %s" % err)
self.auth = False
self.keyring_failed = True
self.username = None
continue
except WebServiceError as err:
print_error("Couldn't send ISRCs: %s" % err)
sys.exit(1)
else:
print("Successfully submitted %d ISRCS." % len(tracks2isrcs))
break
class Disc(object):
def read_disc(self):
try:
# calculate disc ID from disc
if self._backend == "libdiscid" and not options.force_submit:
disc = discid.read(self._device, features=["mcn", "isrc"])
else:
disc = discid.read(self._device)
self._disc = disc
except DiscError as err:
print_error("DiscID calculation failed: %s" % err)
sys.exit(1)
def __init__(self, device, backend, verified=False):
if sys.platform == "darwin":
self._device = get_real_mac_device(device)
logger.info("CD drive #%s corresponds to %s internally",
device, self._device)
else:
self._device = device
self._disc = None
self._release = None
self._backend = backend
self._verified = verified
self._asked_for_submission = False
self._common_includes=["artists", "labels", "recordings", "isrcs",
"artist-credits"] # the last one only for cleanup
self.read_disc() # sets self._disc
@property
def id(self):
return self._disc.id
@property
def mcn(self):
mcn = self._disc.mcn
if mcn and int(mcn) > 0:
return mcn
else:
return None
@property
def tracks(self):
return self._disc.tracks
@property
def submission_url(self):
url = self._disc.submission_url
# mm.mb.o points to mb.o, if present in the url
url = url.replace("//mm.", "//")
return url.replace("musicbrainz.org", options.server)
@property
def asked_for_submission(self):
return self._asked_for_submission
@property
def release(self):
"""The corresponding MusicBrainz release
This will ask the user to choose if the discID is ambiguous.
"""
if self._release is None:
self.get_release(self._verified)
# can still be None
return self._release
def fetch_release(self, release_id):
"""Check if a pre-selected release has the correct TOC attached
"""
includes = self._common_includes + ["discids"]
result = ws2.get_release_by_id(release_id, includes=includes)
release = result["release"]
for medium in release["medium-list"]:
for disc in medium["disc-list"]:
if disc["id"] == self.id:
return release
# disc ID is not attached to the release
return None
def select_release(self):
"""Find the corresponding MusicBrainz release by disc ID
This will ask the user to choose if the discID is ambiguous.
"""
includes = self._common_includes
results = ws2.get_releases_by_discid(self.id, includes=includes)
num_results = len(results)
if options.force_submit:
print("\nSubmission forced.")
selected_release = None
elif num_results == 0:
print("\nThis Disc ID is not in the database.")
selected_release = None
elif num_results > 1:
print("\nThis Disc ID is ambiguous:")
print(" 0: none of these\n")
self._asked_for_submission = True
for i in range(num_results):
release = results[i]
# printed list is 1..n, not 0..n-1 !
print_release(release, i + 1)
try:
num = user_input("Which one do you want? [0-%d] "
% num_results)
if int(num) not in range(0, num_results + 1):
raise IndexError
if int(num) == 0:
ask_for_submission(self.submission_url, print_url=True)
sys.exit(1)
else:
selected_release = results[int(num) - 1]
except (ValueError, IndexError):
print_error("Invalid Choice")
sys.exit(1)
except KeyboardInterrupt:
print("\nexiting..")
sys.exit(1)
else:
selected_release = results[0]
return selected_release
def get_release(self, verified=False):
"""This will get a release the ISRCs will be added to.
"""
# check if a release was pre-selected
if options.release_id:
chosen_release = self.fetch_release(options.release_id)
else:
chosen_release = self.select_release()
if chosen_release and chosen_release["id"] is None:
# a "release" that is only a stub has no musicbrainz id
print("\nThere is only a stub in the database:")
print_encoded("%s - %s\n\n"
% (chosen_release["artist-credit-phrase"],
chosen_release["title"]))
chosen_release = None # don't use stub
verified = True # the id is verified by the stub
if chosen_release is None or options.force_submit:
if verified:
url = self.submission_url
ask_for_submission(url, print_url=True)
sys.exit(1)
else:
print("recalculating to re-check..")
self.read_disc()
self.get_release(verified=True)
self._release = chosen_release
return chosen_release
def get_disc(device, backend, verified=False):
"""This creates a Disc object, which also calculates the id of the disc
"""
disc = Disc(device, backend, verified)
print('\nDiscID:\t\t%s' % disc.id)
if disc.mcn:
print('MCN/EAN:\t%s' % disc.mcn)
print('Tracks on disc:\t%d' % len(disc.tracks))
return disc
def gather_isrcs(disc, backend, device):
"""read the disc in the device with the backend and extract the ISRCs
"""
backend_output = []
devnull = open(os.devnull, "w")
if backend == "libdiscid":
pattern = r'[A-Z]{2}[A-Z0-9]{3}\d{2}\d{5}'
for track in disc.tracks:
if track.isrc:
match = re.match(pattern, track.isrc)
if match is None:
print("no valid ISRC: %s" % track.isrc)
else:
backend_output.append((track.number, track.isrc))
# redundant to "libdiscid", but this might be handy for prerelease testing
elif backend == "discisrc":
pattern = \
r'Track\s+([0-9]+)\s+:\s+([A-Z]{2})-?([A-Z0-9]{3})-?(\d{2})-?(\d{5})'
try:
if sys.platform == "darwin":
device = get_real_mac_device(device)
proc = Popen([backend, device], stdout=PIPE)
isrcout = proc.stdout
except OSError as err:
backend_error(err)
for line in isrcout:
line = decode(line) # explicitely decode from pipe
ext_logger = logging.getLogger("discisrc")
ext_logger.debug(line.rstrip()) # rstrip newline
if line.startswith("Track") and len(line) > 12:
match = re.search(pattern, line)
if match is None:
print("can't find ISRC in: %s" % line)
continue
track_number = int(match.group(1))
isrc = ("%s%s%s%s" % (match.group(2), match.group(3),
match.group(4), match.group(5)))
backend_output.append((track_number, isrc))
# media_info is a preview version of mediatools, both are for Windows
# this does some kind of raw read
elif backend in ["mediatools", "media_info"]:
pattern = \
r'ISRC\s+([0-9]+)\s+([A-Z]{2})-?([A-Z0-9]{3})-?(\d{2})-?(\d{5})'
if backend == "mediatools":
args = [backend, "drive", device, "isrc"]
else:
args = [backend, device]
try:
proc = Popen(args, stdout=PIPE)
isrcout = proc.stdout
except OSError as err:
backend_error(err)
for line in isrcout:
line = decode(line) # explicitely decode from pipe
ext_logger = logging.getLogger("mediatools")
ext_logger.debug(line.rstrip()) # rstrip newline
if line.startswith("ISRC") and not line.startswith("ISRCS"):
match = re.search(pattern, line)
if match is None:
print("can't find ISRC in: %s" % line)
continue
track_number = int(match.group(1))
isrc = ("%s%s%s%s" % (match.group(2), match.group(3),
match.group(4), match.group(5)))
backend_output.append((track_number, isrc))
# cdrdao will create a temp file and we delete it afterwards
# cdrdao is also available for windows
# this will also fetch ISRCs from CD-TEXT
elif backend == "cdrdao":
# no byte pattern, file is opened as unicode
pattern = r'[A-Z]{2}[A-Z0-9]{3}\d{2}\d{5}'
tmpname = "cdrdao-%s.toc" % datetime.now()
tmpname = tmpname.replace(":", "-") # : is invalid on windows
tmpfile = os.path.join(tempfile.gettempdir(), tmpname)
logger.info("Saving toc in %s..", tmpfile)
if os.name == "nt":
if device != discid.get_default_device():
logger.warning("cdrdao uses the default device")
args = [backend, "read-toc", "-v", "0", tmpfile]
else:
args = [backend, "read-toc", "--device", device, "-v", "0", tmpfile]
try:
if options.debug:
proc = Popen(args, stdout=devnull)
else:
proc = Popen(args, stdout=devnull, stderr=devnull)
if proc.wait() != 0:
print_error("%s returned with %i" % (backend, proc.returncode))
sys.exit(1)
except OSError as err:
backend_error(err)
else:
# that file seems to be opened in Unicode mode in Python 3
with open(tmpfile, "r") as toc:
track_number = None
for line in toc:
ext_logger = logging.getLogger("cdrdao")
ext_logger.debug(line.rstrip()) # rstrip newline
words = line.split()
if words:
if words[0] == "//":
track_number = int(words[2])
elif words[0] == "ISRC" and track_number is not None:
isrc = "".join(words[1:]).strip('"- ')
match = re.match(pattern, isrc)
if match is None:
print("no valid ISRC: %s" % isrc)
else:
backend_output.append((track_number, isrc))
# safeguard against missing trackNumber lines
# or duplicated ISRC tags (like in CD-Text)
track_number = None
finally:
try:
os.unlink(tmpfile)
except OSError:
pass
devnull.close()
return backend_output
def check_isrcs_local(backend_output, mb_tracks):
"""check backend_output for (local) duplicates and inconsistencies
"""
isrcs = dict() # isrcs found on disc
tracks2isrcs = dict() # isrcs to be submitted
errors = 0
for (track_number, isrc) in backend_output:
if isrc not in isrcs:
isrcs[isrc] = Isrc(isrc)
# check if we found this ISRC for multiple tracks
with_isrc = [item for item in backend_output if item[1] == isrc]
if len(with_isrc) > 1:
track_list = [str(item[0]) for item in with_isrc]
print_error("%s gave the same ISRC for multiple tracks!"
% options.backend,
"ISRC: %s\ttracks: %s"
% (isrc, ", ".join(track_list)))
errors += 1
try:
track = mb_tracks[track_number - 1]
except IndexError:
print_error("ISRC %s found for unknown track %d"
% (isrc, track_number))
errors += 1
else:
own_track = OwnTrack(track, track_number)
isrcs[isrc].add_track(own_track)
# check if the ISRC was already added to the track
if isrc not in own_track.get("isrc-list", []):
# single isrcs work in python-musicbrainzngs 0.4, but not 0.3
# lists of isrcs don't work in 0.4 though, see pymbngs #113
tracks2isrcs[own_track["id"]] = isrc
print("found new ISRC for track %d: %s"
% (track_number, isrc))
else:
print("%s is already attached to track %d"
% (isrc, track_number))
return isrcs, tracks2isrcs, errors
def check_global_duplicates(release, mb_tracks, isrcs):
"""Help cleaning up global duplicates with the information we got
from our disc.
"""
duplicates = 0
# add already attached ISRCs
for i in range(0, len(mb_tracks)):
track = mb_tracks[i]
track_number = i + 1
track = Track(track, track_number)
for isrc in track.get("isrc-list", []):
# only check ISRCS we also found on our disc
if isrc in isrcs:
isrcs[isrc].add_track(track)
# check if we have multiple tracks for one ISRC
for isrc in isrcs:
if len(isrcs[isrc].get_tracks()) > 1:
duplicates += 1
if duplicates > 0:
printf("\nThere were %d ISRCs ", duplicates)
print("that are attached to multiple tracks on this release.")
choice = user_input("Do you want to help clean those up? [y/N] ")
if choice.lower() == "y":
cleanup_isrcs(release, isrcs)
def cleanup_isrcs(release, isrcs):
"""Show information about duplicate ISRCs
Our attached ISRCs should be correct -> helps to delete from other tracks
"""
for isrc in isrcs:
tracks = isrcs[isrc].get_tracks()
if len(tracks) > 1:
print("\nISRC %s attached to:" % isrc)
for track in tracks:
printf("\t")
artist = track.get("artist-credit-phrase")
if artist and artist != release["artist-credit-phrase"]:
string = "%s - %s" % (artist, track["title"])
else:
string = "%s" % track["title"]
print_encoded(string)
# tab alignment
if len(string) >= 32:
printf("\n%s", " " * 40)
else:
if len(string) < 7:
printf("\t")
if len(string) < 15:
printf("\t")
if len(string) < 23:
printf("\t")
if len(string) < 31:
printf("\t")
printf("\t track %s", track["position"])
if isinstance(track, OwnTrack):
print(" [OUR EVALUATION]")
else:
print("")
url = "http://%s/isrc/%s" % (options.server, isrc)
if user_input("Open ISRC in the browser? [Y/n] ").lower() != "n":
open_browser(url)
user_input("(press <return> when done with this ISRC) ")
def main(argv):
global options
global ws2
# preset logger
stream_handler = logging.StreamHandler()
stream_handler.setFormatter(logging.Formatter("%(levelname)s: %(message)s"))
logging.getLogger().addHandler(stream_handler) # add to root handler
# global variables
options = gather_options(argv)
ws2 = WebService2(options.user)
if options.debug:
logging.getLogger().setLevel(logging.DEBUG)
stream_handler.setLevel(logging.INFO)
# adding log file
logfile = "isrcsubmit.log"
file_handler = logging.FileHandler(logfile, mode='w',
encoding="utf8", delay=True)
formatter = logging.Formatter("%(levelname)s:%(name)s: %(message)s")
file_handler.setFormatter(formatter)
file_handler.setLevel(logging.DEBUG)
logger.info("Writing debug log to %s", logfile)
logging.getLogger().addHandler(file_handler)
# add context to log file (DEBUG only added there)
logger.debug(script_version())
logger.info("using discid version %s", discid.__version__)
print("using %s" % get_prog_version(options.backend))
disc = get_disc(options.device, options.backend)
disc.get_release()
print("")
print_release(disc.release)
if not disc.asked_for_submission:
print("")
print("Is this information different for your release?")
ask_for_submission(disc.submission_url)
media = []
for medium in disc.release["medium-list"]:
for disc_entry in medium["disc-list"]:
if disc_entry["id"] == disc.id:
media.append(medium)
break
if len(media) > 1:
raise DiscError("number of discs with id: %d" % len(media))
mb_tracks = media[0]["track-list"]
print("")
# (track, isrc)
backend_output = gather_isrcs(disc, options.backend, options.device)
# list, dict
isrcs, tracks2isrcs, errors = check_isrcs_local(backend_output, mb_tracks)
if isrcs:
print("")
# try to submit the ISRCs
update_intention = True
if not tracks2isrcs:
print("No new ISRCs could be found.")
else:
if errors > 0:
print_error("%d problems detected" % errors)
if user_input("Do you want to submit? [y/N] ").lower() == "y":
ws2.submit_isrcs(tracks2isrcs)
else:
update_intention = False
print("Nothing was submitted to the server.")
# check for overall duplicate ISRCs, including server provided
if update_intention:
# the ISRCs are deemed correct, so we can use them to check others
check_global_duplicates(disc.release, mb_tracks, isrcs)
if __name__ == "__main__":
main(sys.argv)
# vim:set shiftwidth=4 smarttab expandtab:
| jesus2099/JonnyJD_musicbrainz-isrcsubmit | isrcsubmit.py | Python | gpl-3.0 | 41,557 | 0.002142 |
from django.views.decorators.csrf import csrf_exempt
from django.contrib.auth import authenticate, login
from django.contrib.auth import logout
from django.contrib.auth.decorators import login_required
import time
from ControlUsuarios.forms import *
from ControlUsuarios.models import UserProfile
# Create your views here.
from django.http import HttpResponse
from django.http import JsonResponse
from django.shortcuts import render
from django.shortcuts import redirect
from django import forms
from django.contrib.auth.models import User
from django.http import HttpResponseRedirect
from django.views.generic.base import View
from datetime import datetime
from bson import Binary, Code
from bson.json_util import dumps
from bson.json_util import loads
from clase import *
gestorClase=ClaseDriver()
@csrf_exempt
def index(request):
if request.method == 'GET':
session_num=gestorClase.database.sesion.find({}).count()
session_tag="default"
if session_num>0:
session_tag=gestorClase.database.sesion.find({})
lista=[]
for i in session_tag:
print i
lista.append(i)
print lista[0]["clave_sesion"]
return render(request, 'ControlUsuarios/session_android.html',{"qr":lista[0]["clave_sesion"],"fecha":lista[0]["fecha_sesion"]})
#return render(request, 'registration/login.html',{})
@csrf_exempt
def sesion(request):
clase=gestorClase.database.clase.find()
if request.method == 'POST':
print "entrando por post"
form = SessionForm(request.POST)
if form.is_valid():
session_tag=form.data['session_tag']
print session_tag
gestorClase.createSesion(session_tag)
return render(request, 'ControlUsuarios/sessions.html',{'form': form,"qr":session_tag,"clase":clase} )
else:
session_num=gestorClase.database.sesion.find({}).count()
session_tag="default"
if session_num>0:
session_tag=gestorClase.database.sesion.find({})
lista=[]
for i in session_tag:
print i
lista.append(i)
print lista[0]["clave_sesion"]
form=SessionForm()
return render(request, 'ControlUsuarios/sessions.html',{'form': form,"qr":lista[0]["clave_sesion"],"clase":clase} )
class Preferencias(View):
def get(self, request):
print "Entrando por el get"
form=FormEntrada()
return render(request, 'ControlUsuarios/preferencias.html', {'form': form})
def post(self, request):
print "Entrando por el post"
reader_clase=None
form = FormEntrada(request.POST, request.FILES)
if form.is_valid():
fichero1=request.FILES.get('file_clase',None)
if fichero1 is not None:
fieldnames = ("NOMBRE","DNI")
reader_clase = csv.DictReader(request.FILES['file_clase'], fieldnames)
gestorClase.createClaseFromReader(reader_clase)
return redirect('/Preferencias',{'form':form})
else:
print "formulario invalido"
#form = FormEntrada()
return render(request, 'noinventory/Preferencias.html', {'form': form})
@csrf_exempt
def borrarTodo(request):
if request.method == 'GET':
gestorClase.database.clase.remove()
cl={"Alumnos": [{"NOMBRE": "Hugo Barzano Cruz","DNI": "77138361"}, {"NOMBRE": "Mariano Palomo Villafranca","DNI": "66666666z"}]}
for i in cl["Alumnos"]:
i["assitencia"]="False"
print i
gestorClase.database.clase.insert(i)
aux3=[]
respuesta={}
lista_alumnos=gestorClase.database.clase.find({})
for a in lista_alumnos:
print a["NOMBRE"]
aux4={"NOMBRE":a["NOMBRE"],"DNI":a["DNI"],"assitencia":a["assitencia"]}
aux3.append(aux4)
respuesta={"alumnos":aux3}
return JsonResponse(respuesta,safe=False)
else:
gestorClase.database.clase.remove()
gestorClase.database.sesion.remove()
default={"NOMBRE":"Nombre","DNI":"Dni","assitencia":"asistencia"}
aux7=[]
aux7.append(default)
respuesta={"alumnos":aux7}
return JsonResponse(respuesta,safe=False)
@csrf_exempt
def inicializarClase(request):
if request.method == 'GET':
gestorClase.database.clase.remove()
cl={"Alumnos": [{"NOMBRE": "Hugo Barzano Cruz","DNI": "77138361"}, {"NOMBRE": "Mariano Palomo Villafranca","DNI": "66666666z"}]}
for i in cl["Alumnos"]:
i["assitencia"]="False"
print i
gestorClase.database.clase.insert(i)
aux3=[]
respuesta={}
lista_alumnos=gestorClase.database.clase.find({})
for a in lista_alumnos:
print a["NOMBRE"]
aux4={"NOMBRE":a["NOMBRE"],"DNI":a["DNI"],"assitencia":a["assitencia"]}
aux3.append(aux4)
respuesta={"alumnos":aux3}
return JsonResponse(respuesta,safe=False)
else:
gestorClase.database.clase.remove()
cl={"Alumnos": [{"NOMBRE": "Hugo Barzano Cruz","DNI": "77138361"}, {"NOMBRE": "Mariano Palomo Villafranca","DNI": "66666666z"}]}
for i in cl["Alumnos"]:
i["assitencia"]="False"
print i
gestorClase.database.clase.insert(i)
aux3=[]
respuesta={}
lista_alumnos=gestorClase.database.clase.find({})
for a in lista_alumnos:
print a["NOMBRE"]
aux4={"NOMBRE":a["NOMBRE"],"DNI":a["DNI"],"assitencia":a["assitencia"]}
aux3.append(aux4)
print respuesta
respuesta={"alumnos":aux3}
#return JsonResponse(respuesta,safe=False)
return JsonResponse(respuesta,safe=False)
@csrf_exempt
def setClaveAndroid(request):
if request.method == 'POST':
mydic=dict(request.POST)
print mydic["clave"][0]
if mydic["clave"][0] == "":
gestorClase.createSesion("default")
else:
gestorClase.createSesion(mydic["clave"][0])
return HttpResponse("Ok")
@csrf_exempt
def alumnosJson(request):
if request.method == 'GET':
default={"NOMBRE":"Nombre","DNI":"Dni","assitencia":"asistencia"}
aux7=[]
aux7.append(default)
respuesta={"alumnos":aux7}
aux=[]
aux3=[]
numero_alumnos=gestorClase.database.clase.find({}).count()
if numero_alumnos>0:
lista_alumnos=gestorClase.database.clase.find({})
for a in lista_alumnos:
print a["NOMBRE"]
aux4={"NOMBRE":a["NOMBRE"],"DNI":a["DNI"],"assitencia":a["assitencia"]}
aux3.append(aux4)
print respuesta
respuesta={"alumnos":aux3}
return JsonResponse(respuesta,safe=False)
else:
return JsonResponse(respuesta,safe=False)
else:
default={"NOMBRE":"Nombre","DNI":"Dni","assitencia":"asistencia"}
aux7=[]
aux7.append(default)
respuesta={"alumnos":aux7}
aux=[]
aux3=[]
print "entrado por post"
numero_alumnos=gestorClase.database.clase.find({}).count()
if numero_alumnos>0:
lista_alumnos=gestorClase.database.clase.find({})
for a in lista_alumnos:
print a["NOMBRE"]
aux4={"NOMBRE":a["NOMBRE"],"DNI":a["DNI"],"assitencia":a["assitencia"]}
aux3.append(aux4)
print respuesta
respuesta={"alumnos":aux3}
return JsonResponse(respuesta,safe=False)
else:
return JsonResponse(respuesta,safe=False)
@csrf_exempt
def CheckFromQr(request):
if request.method == 'POST':
mydic=dict(request.POST)
print mydic
dni=mydic["dni"][0]
aux=mydic["scaner"][0]
alumno=None
alumno=gestorClase.database.clase.find({"DNI":str(dni)})
print alumno[0]
sesion=gestorClase.database.sesion.find({"fecha_sesion":datetime.now().strftime('%Y-%m-%d')})
print "superado alumno y fecha"
if alumno != None:
if sesion[0]["clave_sesion"]==aux:
gestorClase.database.clase.update({"_id" :alumno[0]["_id"] },{"$set" : {"assitencia" : "True"}})
else:
print "Clave de assitencia incorrecta"
else:
print "Usuario no forma parte de la clase"
print "Dni: "+dni
print "Clave sesion:"+aux
return HttpResponse("OK")
else:
print "recibido get"
return HttpResponse("gettttttt")
@csrf_exempt
def CheckFromNfc(request):
if request.method == 'POST':
mydic=dict(request.POST)
print mydic
dni=mydic["dni"][0]
aux=mydic["nfc"][0]
alumno=None
alumno=gestorClase.database.clase.find({"DNI":str(dni)})
print alumno[0]
sesion=gestorClase.database.sesion.find({"fecha_sesion":datetime.now().strftime('%Y-%m-%d')})
print "superado alumno y fecha"
if alumno != None:
if sesion[0]["clave_sesion"]==aux:
gestorClase.database.clase.update({"_id" :alumno[0]["_id"] },{"$set" : {"assitencia" : "True"}})
else:
print "Clave de assitencia incorrecta"
else:
print "Usuario no forma parte de la clase"
print "Dni: "+dni
print "Clave sesion:"+aux
return HttpResponse("OK")
else:
print "recibido get"
# print request.GET['contenido_scaner']
return HttpResponse("gettttttt")
######################### REGISTRO DE USUARIOS ############################################
@csrf_exempt
def androidLogin(request):
if request.method=='POST':
username = request.POST["username"]
password = request.POST["password"]
user = authenticate(username=username, password=password)
if user:
# Is the account active? It could have been disabled.
if user.is_active:
u=User.objects.get(username=user.username)
user_profile = UserProfile.objects.get(user=user)
login(request, user)
#data="nombre_usuario :"+username
return HttpResponse(user_profile.__dni__())
else:
return HttpResponse("Your account is disabled.")
else:
print "Invalid login details: {0}, {1}".format(username, password)
return HttpResponse("Invalid login details supplied.")
else:
print "entrando por get"
return HttpResponse()
@csrf_exempt
def androidRegister(request):
if request.method=='POST':
user_form = UserForm(data=request.POST)
profile_form = UserProfileForm(data=request.POST)
if user_form.is_valid():
if profile_form.is_valid():
user = user_form.save()
user.set_password(user.password)
user.save()
profile = profile_form.save(commit=False)
profile.user = user
profile.save()
return HttpResponse("success")
else:
return HttpResponse("Invalid User or Dni")
else:
return HttpResponse("Username exist or Invalid Email")
else:
print "entrando por get"
return HttpResponse()
def register(request):
# A boolean value for telling the template whether the registration was successful.
# Set to False initially. Code changes value to True when registration succeeds.
registered = False
# If it's a HTTP POST, we're interested in processing form data.
if request.method == 'POST':
# Attempt to grab information from the raw form information.
# Note that we make use of both UserForm and UserProfileForm.
user_form = UserForm(data=request.POST)
profile_form = UserProfileForm(data=request.POST)
# If the two forms are valid...
if user_form.is_valid() and profile_form.is_valid():
# Save the user's form data to the database.
user = user_form.save()
# Now we hash the password with the set_password method.
# Once hashed, we can update the user object.
user.set_password(user.password)
user.save()
# Now sort out the UserProfile instance.
# Since we need to set the user attribute ourselves, we set commit=False.
# This delays saving the model until we're ready to avoid integrity problems.
profile = profile_form.save(commit=False)
profile.user = user
# Now we save the UserProfile model instance.
profile.save()
# Update our variable to tell the template registration was successful.
registered = True
#else:
#return HttpResponseRedirect('/')
# Invalid form or forms - mistakes or something else?
# Print problems to the terminal.
# They'll also be shown to the user.
else:
print user_form.errors, profile_form.errors
#return redirect('registration/register.html',{'user_form': user_form, 'profile_form': profile_form, 'registered': registered} )
#print user_form.errors, profile_form.errors
# Not a HTTP POST, so we render our form using two ModelForm instances.
# These forms will be blank, ready for user input.
else:
user_form = UserForm()
profile_form = UserProfileForm()
# Render the template depending on the context.
return render(request, 'registration/register.html',{'user_form': user_form, 'profile_form': profile_form, 'registered': registered} )
@csrf_exempt
def user_login(request):
# If the request is a HTTP POST, try to pull out the relevant information.
if request.method == 'POST':
# Gather the username and password provided by the user.
# This information is obtained from the login form.
# We use request.POST.get('<variable>') as opposed to request.POST['<variable>'],
# because the request.POST.get('<variable>') returns None, if the value does not exist,
# while the request.POST['<variable>'] will raise key error exception
username = request.POST.get('username')
password = request.POST.get('password')
# Use Django's machinery to attempt to see if the username/password
# combination is valid - a User object is returned if it is.
user = authenticate(username=username, password=password)
# If we have a User object, the details are correct.
# If None (Python's way of representing the absence of a value), no user
# with matching credentials was found.
if user:
# Is the account active? It could have been disabled.
if user.is_active:
u=User.objects.get(username=user.username)
request.session['username'] = u.username
user_profile = UserProfile.objects.get(user=user)
#print user_profile.__organizacion__()
request.session['dni'] = user_profile.__dni__()
login(request, user)
return HttpResponseRedirect('/sesion/')
else:
# An inactive account was used - no logging in!
return HttpResponse("Your account is disabled.")
else:
# Bad login details were provided. So we can't log the user in.
print "Invalid login details: {0}, {1}".format(username, password)
return HttpResponse("Invalid login details supplied.")
# The request is not a HTTP POST, so display the login form.
# This scenario would most likely be a HTTP GET.
else:
# No context variables to pass to the template system, hence the
# blank dictionary object...
return render(request, 'registration/login.html', {})
@login_required
def user_logout(request):
# Since we know the user is logged in, we can now just log them out.
del request.session['username']
del request.session['dni']
logout(request)
# Take the user back to the homepage.
return HttpResponseRedirect('/')
| hugobarzano/DispositivosMovilesBackEnd | ControlUsuarios/views.py | Python | gpl-3.0 | 16,446 | 0.014958 |
# encoding: utf-8
"""
Creates geometry objects from facets.
"""
from yade.wrapper import *
import utils,math,numpy
from minieigen import *
#facetBox===============================================================
def facetBox(center,extents,orientation=Quaternion((0,1,0),0.0),wallMask=63,**kw):
"""
Create arbitrarily-aligned box composed of facets, with given center, extents and orientation.
If any of the box dimensions is zero, corresponding facets will not be created. The facets are oriented outwards from the box.
:param Vector3 center: center of the box
:param Vector3 extents: lengths of the box sides
:param Quaternion orientation: orientation of the box
:param bitmask wallMask: determines which walls will be created, in the order -x (1), +x (2), -y (4), +y (8), -z (16), +z (32). The numbers are ANDed; the default 63 means to create all walls
:param \*\*kw: (unused keyword arguments) passed to :yref:`yade.utils.facet`
:returns: list of facets forming the box
"""
return facetParallelepiped(center=center, extents=extents, height=extents[2], orientation=orientation, wallMask=wallMask, **kw)
#facetParallelepiped===============================================================
def facetParallelepiped(center,extents,height,orientation=Quaternion((0,1,0),0.0),wallMask=63,**kw):
"""
Create arbitrarily-aligned Parallelepiped composed of facets, with given center, extents, height and orientation.
If any of the parallelepiped dimensions is zero, corresponding facets will not be created. The facets are oriented outwards from the parallelepiped.
:param Vector3 center: center of the parallelepiped
:param Vector3 extents: lengths of the parallelepiped sides
:param Real height: height of the parallelepiped (along axis z)
:param Quaternion orientation: orientation of the parallelepiped
:param bitmask wallMask: determines which walls will be created, in the order -x (1), +x (2), -y (4), +y (8), -z (16), +z (32). The numbers are ANDed; the default 63 means to create all walls
:param \*\*kw: (unused keyword arguments) passed to :yref:`yade.utils.facet`
:returns: list of facets forming the parallelepiped
"""
if (height<0): raise RuntimeError("The height should have the positive value");
if (height>extents[2]): raise RuntimeError("The height should be smaller or equal as extents[2]");
#Defense from zero dimensions
if (wallMask>63):
print "wallMask must be 63 or less"
wallMask=63
if (extents[0]==0):
wallMask=1
elif (extents[1]==0):
wallMask=4
elif (extents[2]==0 or height==0):
wallMask=16
if (((extents[0]==0) and (extents[1]==0)) or ((extents[0]==0) and (extents[2]==0)) or ((extents[1]==0) and (extents[2]==0))):
raise RuntimeError("Please, specify at least 2 none-zero dimensions in extents!");
# ___________________________
#inclination angle
beta = 0; dx = 0
if (height>0):
beta = math.asin(height/extents[2])
dx = math.cos(beta)*extents[2]
mn,mx=[-extents[i] for i in 0,1,2],[extents[i] for i in 0,1,2]
def doWall(a,b,c,d):
return [utils.facet((a,b,c),**kw),utils.facet((a,c,d),**kw)]
ret=[]
mn[2] = -height
mx[2] = +height
A=orientation*Vector3(mn[0],mn[1],mn[2])+center
B=orientation*Vector3(mx[0],mn[1],mn[2])+center
C=orientation*Vector3(mx[0],mx[1],mn[2])+center
D=orientation*Vector3(mn[0],mx[1],mn[2])+center
E=orientation*Vector3(mn[0]+dx,mn[1],mx[2])+center
F=orientation*Vector3(mx[0]+dx,mn[1],mx[2])+center
G=orientation*Vector3(mx[0]+dx,mx[1],mx[2])+center
H=orientation*Vector3(mn[0]+dx,mx[1],mx[2])+center
if wallMask&1: ret+=doWall(A,D,H,E)
if wallMask&2: ret+=doWall(B,F,G,C)
if wallMask&4: ret+=doWall(A,E,F,B)
if wallMask&8: ret+=doWall(D,C,G,H)
if wallMask&16: ret+=doWall(A,B,C,D)
if wallMask&32: ret+=doWall(E,H,G,F)
return ret
#facetCylinder==========================================================
def facetCylinder(center,radius,height,orientation=Quaternion((0,1,0),0.0),
segmentsNumber=10,wallMask=7,angleRange=None,closeGap=False,
radiusTopInner=-1, radiusBottomInner=-1,
**kw):
"""
Create arbitrarily-aligned cylinder composed of facets, with given center, radius, height and orientation.
Return List of facets forming the cylinder;
:param Vector3 center: center of the created cylinder
:param float radius: cylinder radius
:param float height: cylinder height
:param float radiusTopInner: inner radius of cylinders top, -1 by default
:param float radiusBottomInner: inner radius of cylinders bottom, -1 by default
:param Quaternion orientation: orientation of the cylinder; the reference orientation has axis along the $+x$ axis.
:param int segmentsNumber: number of edges on the cylinder surface (>=5)
:param bitmask wallMask: determines which walls will be created, in the order up (1), down (2), side (4). The numbers are ANDed; the default 7 means to create all walls
:param (θmin,Θmax) angleRange: allows one to create only part of bunker by specifying range of angles; if ``None``, (0,2*pi) is assumed.
:param bool closeGap: close range skipped in angleRange with triangular facets at cylinder bases.
:param \*\*kw: (unused keyword arguments) passed to utils.facet;
"""
# check zero dimentions
if (radius<=0): raise RuntimeError("The radius should have the positive value");
if (height<=0): wallMask = 1;
return facetCylinderConeGenerator(center=center,radiusTop=radius,height=height,
orientation=orientation,segmentsNumber=segmentsNumber,wallMask=wallMask,
angleRange=angleRange,closeGap=closeGap,
radiusTopInner=radiusTopInner, radiusBottomInner=radiusBottomInner,
**kw)
#facetSphere==========================================================
def facetSphere(center,radius,thetaResolution=8,phiResolution=8,returnElementMap=False,**kw):
"""
Create arbitrarily-aligned sphere composed of facets, with given center, radius and orientation.
Return List of facets forming the sphere. Parameters inspired by ParaView sphere glyph
:param Vector3 center: center of the created sphere
:param float radius: sphere radius
:param int thetaResolution: number of facets around "equator"
:param int phiResolution: number of facets between "poles" + 1
:param bool returnElementMap: returns also tuple of nodes ((x1,y1,z1),(x2,y2,z2),...) and elements ((id01,id02,id03),(id11,id12,id13),...) if true, only facets otherwise
:param \*\*kw: (unused keyword arguments) passed to utils.facet;
"""
# check zero dimentions
if (radius<=0): raise RuntimeError("The radius should have the positive value");
if (thetaResolution<3): raise RuntimeError("thetaResolution must be > 3");
if (phiResolution<3): raise RuntimeError("phiResolution must be > 3");
r,c0,c1,c2 = radius,center[0],center[1],center[2]
nodes = [Vector3(c0,c1,c2+radius)]
phis = numpy.linspace(math.pi/(phiResolution-1),math.pi,phiResolution-2,endpoint=False)
thetas = numpy.linspace(0,2*math.pi,thetaResolution,endpoint=False)
nodes.extend((Vector3(c0+r*math.cos(theta)*math.sin(phi),c1+r*math.sin(theta)*math.sin(phi),c2+r*math.cos(phi)) for phi in phis for theta in thetas))
nodes.append(Vector3(c0,c1,c2-radius))
n = len(nodes)-1
elements = [(0,i+1,i+2) for i in xrange(thetaResolution-1)]
elements.append((0,1,thetaResolution))
for j in xrange(0,phiResolution-3):
k = j*thetaResolution + 1
elements.extend((k+i,k+i+1,k+i+thetaResolution) for i in xrange(thetaResolution-1))
elements.append((k,k+thetaResolution-1,k+2*thetaResolution-1))
elements.extend((k+i+thetaResolution,k+i+1+thetaResolution,k+i+1) for i in xrange(thetaResolution-1))
elements.append((k+2*thetaResolution-1,k+thetaResolution,k))
elements.extend((n,n-i-1,n-i-2) for i in xrange(thetaResolution-1))
elements.append((n,n-1,n-thetaResolution))
facets = [utils.facet(tuple(nodes[node] for node in elem),**kw) for elem in elements]
if returnElementMap:
return facets,nodes,elements
return facets
#facetCone==============================================================
def facetCone(center,radiusTop,radiusBottom,height,orientation=Quaternion((0,1,0),0.0),
segmentsNumber=10,wallMask=7,angleRange=None,closeGap=False,
radiusTopInner=-1, radiusBottomInner=-1,
**kw):
"""
Create arbitrarily-aligned cone composed of facets, with given center, radius, height and orientation.
Return List of facets forming the cone;
:param Vector3 center: center of the created cylinder
:param float radiusTop: cone top radius
:param float radiusBottom: cone bottom radius
:param float radiusTopInner: inner radius of cones top, -1 by default
:param float radiusBottomInner: inner radius of cones bottom, -1 by default
:param float height: cone height
:param Quaternion orientation: orientation of the cone; the reference orientation has axis along the $+x$ axis.
:param int segmentsNumber: number of edges on the cone surface (>=5)
:param bitmask wallMask: determines which walls will be created, in the order up (1), down (2), side (4). The numbers are ANDed; the default 7 means to create all walls
:param (θmin,Θmax) angleRange: allows one to create only part of cone by specifying range of angles; if ``None``, (0,2*pi) is assumed.
:param bool closeGap: close range skipped in angleRange with triangular facets at cylinder bases.
:param \*\*kw: (unused keyword arguments) passed to utils.facet;
"""
# check zero dimentions
if ((radiusBottom<=0) and (radiusTop<=0)): raise RuntimeError("The radiusBottom or radiusTop should have the positive value");
return facetCylinderConeGenerator(center=center,radiusTop=radiusTop,
radiusBottom=radiusBottom,height=height,orientation=orientation,segmentsNumber=segmentsNumber,
wallMask=wallMask,angleRange=angleRange,closeGap=closeGap,
radiusTopInner=radiusTopInner, radiusBottomInner=radiusBottomInner,
**kw)
#facetPolygon===========================================================
def facetPolygon(center,radiusOuter,orientation=Quaternion((0,1,0),0.0),segmentsNumber=10,angleRange=None,radiusInner=0,**kw):
"""
Create arbitrarily-aligned polygon composed of facets, with given center, radius (outer and inner) and orientation.
Return List of facets forming the polygon;
:param Vector3 center: center of the created cylinder
:param float radiusOuter: outer radius
:param float radiusInner: inner height (can be 0)
:param Quaternion orientation: orientation of the polygon; the reference orientation has axis along the $+x$ axis.
:param int segmentsNumber: number of edges on the polygon surface (>=3)
:param (θmin,Θmax) angleRange: allows one to create only part of polygon by specifying range of angles; if ``None``, (0,2*pi) is assumed.
:param \*\*kw: (unused keyword arguments) passed to utils.facet;
"""
# check zero dimentions
if (abs(angleRange[1]-angleRange[0])>2.0*math.pi): raise RuntimeError("The |angleRange| cannot be larger 2.0*math.pi");
return facetPolygonHelixGenerator(center=center,radiusOuter=radiusOuter,orientation=orientation,segmentsNumber=segmentsNumber,angleRange=angleRange,radiusInner=radiusInner,**kw)
#facetHelix===========================================================
def facetHelix(center,radiusOuter,pitch,orientation=Quaternion((0,1,0),0.0),segmentsNumber=10,angleRange=None,radiusInner=0,**kw):
"""
Create arbitrarily-aligned helix composed of facets, with given center, radius (outer and inner), pitch and orientation.
Return List of facets forming the helix;
:param Vector3 center: center of the created cylinder
:param float radiusOuter: outer radius
:param float radiusInner: inner height (can be 0)
:param Quaternion orientation: orientation of the helix; the reference orientation has axis along the $+x$ axis.
:param int segmentsNumber: number of edges on the helix surface (>=3)
:param (θmin,Θmax) angleRange: range of angles; if ``None``, (0,2*pi) is assumed.
:param \*\*kw: (unused keyword arguments) passed to utils.facet;
"""
# check zero dimentions
if (pitch<=0): raise RuntimeError("The pitch should have the positive value");
return facetPolygonHelixGenerator(center=center,radiusOuter=radiusOuter,orientation=orientation,segmentsNumber=segmentsNumber,angleRange=angleRange,radiusInner=radiusInner,pitch=pitch,**kw)
#facetBunker============================================================
def facetBunker(center,dBunker,dOutput,hBunker,hOutput,hPipe=0.0,orientation=Quaternion((0,1,0),0.0),segmentsNumber=10,wallMask=4,angleRange=None,closeGap=False,**kw):
"""
Create arbitrarily-aligned bunker, composed of facets, with given center, radii, heights and orientation.
Return List of facets forming the bunker;
.. code-block:: none
dBunker
______________
| |
| |
| | hBunker
| |
| |
| |
|____________|
\ /
\ /
\ / hOutput
\ /
\____/
| |
|____| hPipe
dOutput
:param Vector3 center: center of the created bunker
:param float dBunker: bunker diameter, top
:param float dOutput: bunker output diameter
:param float hBunker: bunker height
:param float hOutput: bunker output height
:param float hPipe: bunker pipe height
:param Quaternion orientation: orientation of the bunker; the reference orientation has axis along the $+x$ axis.
:param int segmentsNumber: number of edges on the bunker surface (>=5)
:param bitmask wallMask: determines which walls will be created, in the order up (1), down (2), side (4). The numbers are ANDed; the default 7 means to create all walls
:param (θmin,Θmax) angleRange: allows one to create only part of bunker by specifying range of angles; if ``None``, (0,2*pi) is assumed.
:param bool closeGap: close range skipped in angleRange with triangular facets at cylinder bases.
:param \*\*kw: (unused keyword arguments) passed to utils.facet;
"""
# check zero dimentions
if (dBunker<=0): raise RuntimeError("The diameter dBunker should have the positive value");
if (dOutput<=0): raise RuntimeError("The diameter dOutput should have the positive value");
if (hBunker<0): raise RuntimeError("The height hBunker should have the positive or or zero");
if (hOutput<=0): raise RuntimeError("The height hOutput should have the positive value");
if (hPipe<0): raise RuntimeError("The height hPipe should have the positive value or zero");
ret=[]
if ((hPipe>0) or (wallMask&2)):
centerPipe = Vector3(0,0,hPipe/2.0)
ret+=facetCylinder(center=centerPipe,radius=dOutput/2.0,height=hPipe,segmentsNumber=segmentsNumber,wallMask=wallMask&6,angleRange=angleRange,closeGap=closeGap,**kw)
centerOutput = Vector3(0.0,0.0,hPipe+hOutput/2.0)
ret+=facetCone(center=centerOutput,radiusTop=dBunker/2.0,radiusBottom=dOutput/2.0,height=hOutput,segmentsNumber=segmentsNumber,wallMask=wallMask&4,angleRange=angleRange,closeGap=closeGap,**kw)
if (hBunker>0):
centerBunker = Vector3(0.0,0.0,hPipe+hOutput+hBunker/2.0)
ret+=facetCylinder(center=centerBunker,radius=dBunker/2.0,height=hBunker,segmentsNumber=segmentsNumber,wallMask=wallMask&5,angleRange=angleRange,closeGap=closeGap,**kw)
for i in ret:
i.state.pos=orientation*(i.state.pos)+Vector3(center)
i.state.ori=orientation
return ret
#facetPolygonHelixGenerator==================================================
def facetPolygonHelixGenerator(center,radiusOuter,pitch=0,orientation=Quaternion((0,1,0),0.0),segmentsNumber=10,angleRange=None,radiusInner=0,**kw):
"""
Please, do not use this function directly! Use geom.facetPloygon and geom.facetHelix instead.
This is the base function for generating polygons and helixes from facets.
"""
# check zero dimentions
if (segmentsNumber<3): raise RuntimeError("The segmentsNumber should be at least 3");
if (radiusOuter<=0): raise RuntimeError("The radiusOuter should have the positive value");
if (radiusInner<0): raise RuntimeError("The radiusInner should have the positive value or 0");
if angleRange==None: angleRange=(0,2*math.pi)
anglesInRad = numpy.linspace(angleRange[0], angleRange[1], segmentsNumber+1, endpoint=True)
heightsInRad = numpy.linspace(0, pitch*(abs(angleRange[1]-angleRange[0])/(2.0*math.pi)), segmentsNumber+1, endpoint=True)
POuter=[];
PInner=[];
PCenter=[];
z=0;
for i in anglesInRad:
XOuter=radiusOuter*math.cos(i); YOuter=radiusOuter*math.sin(i);
POuter.append(Vector3(XOuter,YOuter,heightsInRad[z]))
PCenter.append(Vector3(0,0,heightsInRad[z]))
if (radiusInner<>0):
XInner=radiusInner*math.cos(i); YInner=radiusInner*math.sin(i);
PInner.append(Vector3(XInner,YInner,heightsInRad[z]))
z+=1
for i in range(0,len(POuter)):
POuter[i]=orientation*POuter[i]+center
PCenter[i]=orientation*PCenter[i]+center
if (radiusInner<>0):
PInner[i]=orientation*PInner[i]+center
ret=[]
for i in range(1,len(POuter)):
if (radiusInner==0):
ret.append(utils.facet((PCenter[i],POuter[i],POuter[i-1]),**kw))
else:
ret.append(utils.facet((PInner[i-1],POuter[i-1],POuter[i]),**kw))
ret.append(utils.facet((PInner[i],PInner[i-1],POuter[i]),**kw))
return ret
#facetCylinderConeGenerator=============================================
def facetCylinderConeGenerator(center,radiusTop,height,orientation=Quaternion((0,1,0),0.0),
segmentsNumber=10,wallMask=7,angleRange=None,closeGap=False,
radiusBottom=-1,
radiusTopInner=-1,
radiusBottomInner=-1,
**kw):
"""
Please, do not use this function directly! Use geom.facetCylinder and geom.facetCone instead.
This is the base function for generating cylinders and cones from facets.
:param float radiusTop: top radius
:param float radiusBottom: bottom radius
:param \*\*kw: (unused keyword arguments) passed to utils.facet;
"""
#For cylinders top and bottom radii are equal
if (radiusBottom == -1):
radiusBottom = radiusTop
if ((radiusTopInner > 0 and radiusTopInner > radiusTop) or (radiusBottomInner > 0 and radiusBottomInner > radiusBottom)):
raise RuntimeError("The internal radius cannot be larger than outer");
# check zero dimentions
if (segmentsNumber<3): raise RuntimeError("The segmentsNumber should be at least 3");
if (height<0): raise RuntimeError("The height should have the positive value");
if angleRange==None: angleRange=(0,2*math.pi)
if (abs(angleRange[1]-angleRange[0])>2.0*math.pi): raise RuntimeError("The |angleRange| cannot be larger 2.0*math.pi");
if (angleRange[1]<angleRange[0]): raise RuntimeError("angleRange[1] should be larger or equal angleRange[1]");
if isinstance(angleRange,float):
print u'WARNING: geom.facetCylinder,angleRange should be (Θmin,Θmax), not just Θmax (one number), update your code.'
angleRange=(0,angleRange)
anglesInRad = numpy.linspace(angleRange[0], angleRange[1], segmentsNumber+1, endpoint=True)
PTop=[]; PTop.append(Vector3(0,0,+height/2))
PTopIn=[]; PTopIn.append(Vector3(0,0,+height/2))
PBottom=[]; PBottom.append(Vector3(0,0,-height/2))
PBottomIn=[]; PBottomIn.append(Vector3(0,0,-height/2))
for i in anglesInRad:
XTop=radiusTop*math.cos(i); YTop=radiusTop*math.sin(i);
PTop.append(Vector3(XTop,YTop,+height/2))
if (radiusTopInner > 0):
XTopIn=radiusTopInner*math.cos(i); YTopIn=radiusTopInner*math.sin(i);
PTopIn.append(Vector3(XTopIn,YTopIn,+height/2))
XBottom=radiusBottom*math.cos(i); YBottom=radiusBottom*math.sin(i);
PBottom.append(Vector3(XBottom,YBottom,-height/2))
if (radiusBottomInner > 0):
XBottomIn=radiusBottomInner*math.cos(i); YBottomIn=radiusBottomInner*math.sin(i);
PBottomIn.append(Vector3(XBottomIn,YBottomIn,-height/2))
for i in range(0,len(PTop)):
PTop[i]=orientation*PTop[i]+center
PBottom[i]=orientation*PBottom[i]+center
if (len(PTopIn)>1):
PTopIn[i]=orientation*PTopIn[i]+center
if (len(PBottomIn)>1):
PBottomIn[i]=orientation*PBottomIn[i]+center
ret=[]
for i in range(2,len(PTop)):
if (wallMask&1)and(radiusTop!=0):
if (len(PTopIn)>1):
ret.append(utils.facet((PTop[i-1],PTopIn[i],PTopIn[i-1]),**kw))
ret.append(utils.facet((PTop[i-1],PTop[i],PTopIn[i]),**kw))
else:
ret.append(utils.facet((PTop[0],PTop[i],PTop[i-1]),**kw))
if (wallMask&2)and(radiusBottom!=0):
if (len(PBottomIn)>1):
ret.append(utils.facet((PBottom[i-1],PBottomIn[i],PBottomIn[i-1]),**kw))
ret.append(utils.facet((PBottom[i-1],PBottom[i],PBottomIn[i]),**kw))
else:
ret.append(utils.facet((PBottom[0],PBottom[i-1],PBottom[i]),**kw))
if wallMask&4:
if (radiusBottom!=0):
ret.append(utils.facet((PTop[i],PBottom[i],PBottom[i-1]),**kw))
if (radiusTop!=0):
ret.append(utils.facet((PBottom[i-1],PTop[i-1],PTop[i]),**kw))
if (closeGap):
if (wallMask&1)and(radiusTop!=0)and(abs(((angleRange[1]-angleRange[0])) > math.pi)):
pts=[(radiusTop*math.cos(angleRange[i]),radiusTop*math.sin(angleRange[i])) for i in (0,1)]
pp=[(pts[0][0],pts[0][1],+height/2.0), (pts[1][0],pts[1][1],+height/2.0), (0,0,+height/2.0)]
pp=[orientation*p+center for p in pp]
ret.append(utils.facet(pp,**kw))
if (wallMask&2)and(radiusBottom!=0)and(abs(((angleRange[1]-angleRange[0])) > math.pi)):
pts=[(radiusBottom*math.cos(angleRange[i]),radiusBottom*math.sin(angleRange[i])) for i in (0,1)]
pp=[(0,0,-height/2.0), (pts[1][0],pts[1][1],-height/2.0), (pts[0][0],pts[0][1],-height/2.0)]
pp=[orientation*p+center for p in pp]
ret.append(utils.facet(pp,**kw))
if (wallMask&4):
ptsBottom=[(radiusBottom*math.cos(angleRange[i]),radiusBottom*math.sin(angleRange[i])) for i in (0,1)]
ptsTop=[(radiusTop*math.cos(angleRange[i]),radiusTop*math.sin(angleRange[i])) for i in (0,1)]
if (abs(((angleRange[1]-angleRange[0])) >= math.pi)):
if (radiusBottom!=0)and(radiusTop!=0): #Cylinder
pp=[(ptsBottom[0][0],ptsBottom[0][1],-height/2.0),(ptsBottom[1][0],ptsBottom[1][1],-height/2.0),(ptsTop[0][0],ptsTop[0][1],height/2.0)]
pp=[orientation*p+center for p in pp]
ret.append(utils.facet(pp,**kw))
pp=[(ptsBottom[1][0],ptsBottom[1][1],-height/2.0), (ptsTop[1][0],ptsTop[1][1],height/2.0), (ptsTop[0][0],ptsTop[0][1],height/2.0)]
pp=[orientation*p+center for p in pp]
ret.append(utils.facet(pp,**kw))
elif (radiusBottom==0)and(radiusTop!=0): #ConeTop
pp=[(ptsTop[1][0],ptsTop[1][1],height/2.0), (ptsTop[0][0],ptsTop[0][1],height/2.0), (0,0,-height/2.0)]
pp=[orientation*p+center for p in pp]
ret.append(utils.facet(pp,**kw))
elif (radiusTop==0)and(radiusBottom!=0): #ConeBottom
pp=[(0,0,height/2.0),(ptsBottom[0][0],ptsBottom[0][1],-height/2.0),(ptsBottom[1][0],ptsBottom[1][1],-height/2.0)]
pp=[orientation*p+center for p in pp]
ret.append(utils.facet(pp,**kw))
else:
if (radiusBottom!=0)and(radiusTop!=0): #Cylinder
pp=[(ptsBottom[0][0],ptsBottom[0][1],-height/2.0),(0,0,-height/2.0),(ptsTop[0][0],ptsTop[0][1],height/2.0)]
pp=[orientation*p+center for p in pp]
ret.append(utils.facet(pp,**kw))
pp=[(0,0,-height/2.0), (0,0,height/2.0), (ptsTop[0][0],ptsTop[0][1],height/2.0)]
pp=[orientation*p+center for p in pp]
ret.append(utils.facet(pp,**kw))
pp=[(0,0,-height/2.0),(ptsBottom[1][0],ptsBottom[1][1],-height/2.0),(0,0,height/2.0)]
pp=[orientation*p+center for p in pp]
ret.append(utils.facet(pp,**kw))
pp=[(ptsBottom[1][0],ptsBottom[1][1],-height/2.0), (ptsTop[1][0],ptsTop[1][1],height/2.0), (0,0,height/2.0)]
pp=[orientation*p+center for p in pp]
ret.append(utils.facet(pp,**kw))
elif (radiusBottom==0)and(radiusTop!=0): #ConeTop
pp=[(0,0,height/2.0), (ptsTop[0][0],ptsTop[0][1],height/2.0), (0,0,-height/2.0)]
pp=[orientation*p+center for p in pp]
ret.append(utils.facet(pp,**kw))
pp=[(ptsTop[1][0],ptsTop[1][1],height/2.0), (0,0,height/2.0), (0,0,-height/2.0)]
pp=[orientation*p+center for p in pp]
ret.append(utils.facet(pp,**kw))
elif (radiusTop==0)and(radiusBottom!=0): #ConeBottom
pp=[(0,0,height/2.0),(ptsBottom[0][0],ptsBottom[0][1],-height/2.0),(0,0,-height/2.0)]
pp=[orientation*p+center for p in pp]
ret.append(utils.facet(pp,**kw))
pp=[(0,0,height/2.0),(0,0,-height/2.0),(ptsBottom[1][0],ptsBottom[1][1],-height/2.0)]
pp=[orientation*p+center for p in pp]
ret.append(utils.facet(pp,**kw))
return ret
| bcharlas/mytrunk | py/geom.py | Python | gpl-2.0 | 24,154 | 0.056543 |
# encoding=utf8
"""
Module containing functions and methods related to sequences in number theory such as
the Fibonacci sequence, the 3n + 1 problem, and more.
"""
import numpy as np
from mathpy.numtheory.integers import iseven
from decimal import Decimal, localcontext
def catalan(n, prec=1000):
r"""
Returns the Catalan numbers up to n.
Parameters
----------
n : int
Length of Catalan number sequence to return.
Returns
-------
c : array-like
numpy array of dtype int64 Catalan numbers up to parameter :math:`n`.
Notes
-----
The Catalan numbers are a sequence of natural numbers, typically denoted
:math:`C_n` where :math`n` is the :math:`n^{th}` Catalan number. The solution to
Euler's Polygon Division Problem, which is the problem of finding the number
of triangles that can be divided from a polygon of :math:`n` segments, where
the number of triangles is :math:`E_n`, is the Catalan number :math:`C_{n-2}`.
The first few Catalan numbers are :math:`1, 1, 2, 5, 14, 42, 132, 429, ...` The
function is implemented using the recurrence relation of :math:`C_n`:
.. math::
C_{n+1} = \frac{2(2n + 1)}{n + 2} C_n
Examples
--------
>>> catalan(5)
array([ 1, 2, 5, 14, 42], dtype=int64)
>>> catalan(10)
array([ 1, 2, 5, 14, 42, 132, 429, 1430, 4862, 16796], dtype=int64)
References
----------
Catalan number. (2018, January 18). In Wikipedia, The Free
Encyclopedia. Retrieved 14:03, January 27, 2018, from
https://en.wikipedia.org/w/index.php?title=Catalan_number&oldid=821121794
Weisstein, Eric W. "Euler's Polygon Division Problem." From MathWorld--A
Wolfram Web Resource. http://mathworld.wolfram.com/EulersPolygonDivisionProblem.html
Stanley, Richard and Weisstein, Eric W. "Catalan Number." From MathWorld--A
Wolfram Web Resource. http://mathworld.wolfram.com/CatalanNumber.html
"""
c = np.empty(n)
#if n >= 519:
# with localcontext() as ctx:
# ctx.prec = prec
# for i in np.arange(1, n):
#else:
c[0] = 1
for i in np.arange(1, n):
c[i] = (2 * (2 * i + 1)) / (i + 2) * c[i - 1]
return c
def cullen(n):
r"""
Returns the Cullen number integer sequence up to a given value of n.
Parameters
----------
n : int
Length of Cullen number sequence to return.
Returns
-------
c : array-like
numpy array of dtype int64 Cullen numbers up to parameter :math:`n`.
Notes
-----
Cullen numbers are a special case of Proth numbers that have the form:
.. math::
C_n = 2^n n + 1
The first few Cullen numbers are :math:`3, 9, 25, 65, 161, ...`
Examples
--------
>>> cullen(5)
array([ 3., 9., 25., 65., 161.])
>>> cullen(10)
array([ 3., 9., 25., 65., 161., 385., 897., 2049., 4609., 10241.])
"""
c = np.empty(n)
c[0] = 3
for i in np.arange(1, n):
c[i] = np.power(2, i + 1) * (i + 1) + 1
return c
def collatz(n):
r"""
Computes the famous :math:`3n + 1` sequence, also known as the Collatz conjecture.
Parameters
----------
n : int
Starting integer to begin :math:`3n + 1` process.
Returns
-------
array-like
Numpy array representing the sequence generated by the Collatz conjecture.
If n is 1, 1 is returned.
Notes
-----
The Collatz conjecture, also known as the :math:`3n + 1` problem, is a currently
unsolved problem in number theory that is stated as:
- Start with a positive integer :math:`n`
- If :math:`n` is even, divide :math:`n` by 2
- If :math:`n` is odd, multiply by 3 and add 1 to obtain :math:`3n + 1`
- Repeat this process until the sequence reaches 1
References
----------
3x + 1 problem. (2017, May 13). In Wikipedia, The Free Encyclopedia.
from https://en.wikipedia.org/w/index.php?title=3x_%2B_1_problem&oldid=780191927
Moler, C. (2011). Numerical computing with MATLAB (1st ed.).
Philadelphia, Pa: Society for Industrial & Applied Mathematics.
"""
if n == 1:
return 1.0
sequence = []
while n > 1:
if iseven(n):
n /= 2.0
sequence.append(n)
else:
n = 3 * n + 1
sequence.append(n)
return np.array(sequence)
def fibonacci(n, output='array', prec=100):
r"""
Computes the Fibonacci sequence up to given value of n.
Parameters
----------
n : int
Integer designating the stopping point of the Fibonacci sequence
output : {'last', 'array'}, optional
If 'last', the last integer in the Fibonacci sequence up to n is returned,
if 'array', the entire sequence is returned. Defaults to 'array'.
prec : int default 100, optional
Defines level of precision for factorials over 100
for use by the decimal package
Returns
-------
numpy ndarray or int
Last integer in Fibonacci sequence up to :math:`n` or an array of the
Fibonacci sequence up to :math:`n`.
Notes
-----
The Fibonacci sequence is defined by a recurrence relation where :math:`f_n` denotes
the series up to :math:`n` points.
.. math::
f_n = f_{n-1} + f_{n-2}
With initial conditions:
.. math::
f_1 = 1, \qquad f_2 = 2
If the parameter :code:`output` is set to :code:`last`, the closed form of the Fibonacci sequence
is used to compute the last integer in the sequence up to the given :code:`n`. The closed form definition
of the Fibonacci sequence is written as:
.. math::
F_n = \frac{(1 + \sqrt{5})^n - (1 - \sqrt{5})^n}{2^n \sqrt{5}}
Examples
--------
>>> fibonacci(10)
array([ 1., 1., 2., 3., 5., 8., 13., 21., 34., 55.])
>>> fibonacci(10, 'array')
array([ 1., 1., 2., 3., 5., 8., 13., 21., 34., 55.])
>>> fibonacci(10)
55
References
----------
Moler, C. (2011). Numerical computing with MATLAB (1st ed.).
Philadelphia, Pa: Society for Industrial & Applied Mathematics.
"""
if output is 'last':
return ((1 + np.sqrt(5)) ** n - (1 - np.sqrt(5)) ** n) / (2 ** n * np.sqrt(5))
fn = np.empty(n)
fn[0] = 1
fn[1] = 1
if n >= 100:
with localcontext() as ctx:
ctx.prec = prec
for i in np.arange(2, n):
fn[i] = Decimal(fn[i - 1]) + Decimal(fn[i - 2])
else:
for i in np.arange(2, n):
fn[i] = fn[i - 1] + fn[i - 2]
return fn
def supercatalan(n):
r"""
Returns the super-Catalan number sequence up to the given value of n.
Returns
-------
array-like
numpy array of super-Catalan numbers up to parameter :math:`n`.
Notes
-----
The super-Catalan numbers, also known as the Schroeder-Hipparchus numbers,
or little Schroeder numbers, count the number of lattice paths (path
composed of a connected horizontal and vertical line segment) with diagonal
steps from :math`(n, n)` to :math:`(0, 0)` without crossing the diagonal line. The
super-Catalan numbers are given by the recurrence relation:
.. math::
S(n) = \frac{3(2n - 3) \space S(n-1) - (n-3) \space S(n-2)}{n}}
Examples
--------
>>> supercatalan(5)
array([ 1., 1., 3., 11., 45.])
>>> supercatalan(10)
array([ 1., 1., 3., 11., 45., 197., 903., 4279., 20793., 103049.])
"""
s = np.empty(n)
s[0] = s[1] = 1
j = 3
for i in np.arange(2, n):
s[i] = (3 * (2 * j - 3) * s[j - 2] - (j - 3) * s[j - 3]) / j
j += 1
return s
| aschleg/mathpy | mathpy/numtheory/sequences.py | Python | mit | 8,092 | 0.003089 |
# -*- coding: utf-8 -*-
from acq4.devices.OptomechDevice import OptomechDevice
from acq4.devices.DAQGeneric import DAQGeneric
class PMT(DAQGeneric, OptomechDevice):
def __init__(self, dm, config, name):
self.omConf = {}
for k in ['parentDevice', 'transform']:
if k in config:
self.omConf[k] = config.pop(k)
DAQGeneric.__init__(self, dm, config, name)
OptomechDevice.__init__(self, dm, config, name)
def getFilterDevice(self):
# return parent filter device or None
if 'Filter' in self.omConf.get('parentDevice', {}):
return self.omConf['parentDevice']
else:
return None
| acq4/acq4 | acq4/devices/PMT/PMT.py | Python | mit | 705 | 0.004255 |
__all__ = [ "InputInterface" ]
class InputInterface():
def get_move(self):
pass
| gynvael/stream | 006-xoxoxo-more-con/input_interface.py | Python | mit | 100 | 0.04 |
import pytest
from cobbler.cexceptions import CX
from cobbler import module_loader
from tests.conftest import does_not_raise
@pytest.fixture(scope="function")
def reset_modules():
module_loader.MODULE_CACHE = {}
module_loader.MODULES_BY_CATEGORY = {}
@pytest.fixture(scope="function")
def load_modules():
module_loader.load_modules()
def test_load_modules():
# Arrange
# Act
module_loader.load_modules()
# Assert
assert module_loader.MODULE_CACHE != {}
assert module_loader.MODULES_BY_CATEGORY != {}
@pytest.mark.usefixtures("reset_modules", "load_modules")
@pytest.mark.parametrize("module_name", [
("nsupdate_add_system_post"),
("nsupdate_delete_system_pre"),
("scm_track"),
("sync_post_restart_services")
# ("sync_post_wingen")
])
def test_get_module_by_name(module_name):
# Arrange -> Done in fixtures
# Act
returned_module = module_loader.get_module_by_name(module_name)
# Assert
assert isinstance(returned_module.register(), str)
@pytest.mark.usefixtures("reset_modules", "load_modules")
@pytest.mark.parametrize("module_section,fallback_name,expected_result,expected_exception", [
("authentication", "", "authentication.configfile", does_not_raise()),
("authorization", "", "authorization.allowall", does_not_raise()),
("dns", "", "managers.bind", does_not_raise()),
("dhcp", "", "managers.isc", does_not_raise()),
("tftpd", "", "managers.in_tftpd", does_not_raise()),
("wrong_section", None, "", pytest.raises(CX)),
("wrong_section", "authentication.configfile", "authentication.configfile", does_not_raise())
])
def test_get_module_name(module_section, fallback_name, expected_result, expected_exception):
# Arrange -> Done in fixtures
# Act
with expected_exception:
result_name = module_loader.get_module_name(module_section, "module", fallback_name)
# Assert
assert result_name == expected_result
@pytest.mark.usefixtures("reset_modules", "load_modules")
@pytest.mark.parametrize("module_section,fallback_name,expected_exception", [
("authentication", "", does_not_raise()),
("authorization", "", does_not_raise()),
("dns", "", does_not_raise()),
("dhcp", "", does_not_raise()),
("tftpd", "", does_not_raise()),
("wrong_section", "", pytest.raises(CX)),
("wrong_section", "authentication.configfile", does_not_raise())
])
def test_get_module_from_file(module_section, fallback_name, expected_exception):
# Arrange -> Done in fixtures
# Act
with expected_exception:
result_module = module_loader.get_module_from_file(module_section, "module", fallback_name)
# Assert
assert isinstance(result_module.register(), str)
@pytest.mark.usefixtures("reset_modules", "load_modules")
@pytest.mark.parametrize("category,expected_names", [
(None, ["cobbler.modules.sync_post_wingen"]),
("/var/lib/cobbler/triggers/add/system/post/*", ["cobbler.modules.nsupdate_add_system_post"]),
("/var/lib/cobbler/triggers/sync/post/*", ["cobbler.modules.sync_post_restart_services"]),
("/var/lib/cobbler/triggers/delete/system/pre/*", ["cobbler.modules.nsupdate_delete_system_pre"]),
("/var/lib/cobbler/triggers/change/*", ["cobbler.modules.managers.genders", "cobbler.modules.scm_track"]),
("/var/lib/cobbler/triggers/install/post/*", ["cobbler.modules.installation.post_log",
"cobbler.modules.installation.post_power",
"cobbler.modules.installation.post_puppet",
"cobbler.modules.installation.post_report"]),
("/var/lib/cobbler/triggers/install/pre/*", ["cobbler.modules.installation.pre_clear_anamon_logs",
"cobbler.modules.installation.pre_log",
"cobbler.modules.installation.pre_puppet"]),
("manage", ["cobbler.modules.managers.bind", "cobbler.modules.managers.dnsmasq",
"cobbler.modules.managers.in_tftpd", "cobbler.modules.managers.isc",
"cobbler.modules.managers.ndjbdns"]),
("manage/import", ["cobbler.modules.managers.import_signatures"]),
("serializer", ["cobbler.modules.serializers.file", "cobbler.modules.serializers.mongodb"]),
("authz", ["cobbler.modules.authorization.allowall", "cobbler.modules.authorization.configfile",
"cobbler.modules.authorization.ownership"]),
("authn", ["cobbler.modules.authentication.configfile", "cobbler.modules.authentication.denyall",
"cobbler.modules.authentication.ldap", "cobbler.modules.authentication.pam",
"cobbler.modules.authentication.passthru", "cobbler.modules.authentication.spacewalk"]),
])
def test_get_modules_in_category(category, expected_names):
# Arrange -> Done in fixtures
# Act
result = module_loader.get_modules_in_category(category)
# Assert
assert len(result) > 0
actual_result = []
for name in result:
actual_result.append(name.__name__)
actual_result.sort()
assert actual_result == expected_names
| cobbler/cobbler | tests/module_loader_test.py | Python | gpl-2.0 | 5,203 | 0.004613 |
#!/usr/bin/env python3
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import copy
import glob
import io
import itertools
import os
import unittest
import re
import shutil
import subprocess
import sys
import tempfile
import zipfile
import archive
import data_quality
import describe
import diff
import file_format
import models
import pakfile
import test_util
import zip_util
_SCRIPT_DIR = os.path.dirname(__file__)
_TEST_DATA_DIR = test_util.TEST_DATA_DIR
_TEST_SOURCE_DIR = test_util.TEST_SOURCE_DIR
_TEST_OUTPUT_DIR = test_util.TEST_OUTPUT_DIR
_TEST_APK_ROOT_DIR = os.path.join(_TEST_DATA_DIR, 'mock_apk')
_TEST_MAP_PATH = os.path.join(_TEST_DATA_DIR, 'test.map')
_TEST_PAK_INFO_PATH = os.path.join(
_TEST_OUTPUT_DIR, 'size-info/test.apk.pak.info')
_TEST_ELF_FILE_BEGIN = os.path.join(_TEST_OUTPUT_DIR, 'elf.begin')
_TEST_APK_LOCALE_PAK_SUBPATH = 'assets/en-US.pak'
_TEST_APK_PAK_SUBPATH = 'assets/resources.pak'
_TEST_APK_LOCALE_PAK_PATH = os.path.join(_TEST_APK_ROOT_DIR,
_TEST_APK_LOCALE_PAK_SUBPATH)
_TEST_APK_PAK_PATH = os.path.join(_TEST_APK_ROOT_DIR, _TEST_APK_PAK_SUBPATH)
_TEST_ON_DEMAND_MANIFEST_PATH = os.path.join(_TEST_DATA_DIR,
'AndroidManifest_OnDemand.xml')
_TEST_ALWAYS_INSTALLED_MANIFEST_PATH = os.path.join(
_TEST_DATA_DIR, 'AndroidManifest_AlwaysInstalled.xml')
# The following files are dynamically created.
_TEST_ELF_PATH = os.path.join(_TEST_OUTPUT_DIR, 'elf')
_TEST_APK_PATH = os.path.join(_TEST_OUTPUT_DIR, 'test.apk')
_TEST_NOT_ON_DEMAND_SPLIT_APK_PATH = os.path.join(_TEST_OUTPUT_DIR,
'not_on_demand.apk')
_TEST_ON_DEMAND_SPLIT_APK_PATH = os.path.join(_TEST_OUTPUT_DIR, 'on_demand.apk')
_TEST_MINIMAL_APKS_PATH = os.path.join(_TEST_OUTPUT_DIR, 'Bundle.minimal.apks')
_TEST_SSARGS_PATH = os.path.join(_TEST_OUTPUT_DIR, 'test.ssargs')
# Generated file paths relative to apk
_TEST_APK_SO_PATH = 'lib/armeabi-v7a/test.so'
_TEST_APK_SMALL_SO_PATH = 'lib/armeabi-v7a/smalltest.so'
_TEST_APK_DEX_PATH = 'classes.dex'
_TEST_APK_OTHER_FILE_PATH = 'assets/icudtl.dat'
_TEST_APK_RES_FILE_PATH = 'res/drawable-v13/test.xml'
_TEST_CONFIG_JSON = os.path.join(_TEST_DATA_DIR, 'supersize.json')
_TEST_PATH_DEFAULTS = {
'assets/icudtl.dat': '../../third_party/icu/android/icudtl.dat',
}
def _CompareWithGolden(name=None):
def real_decorator(func):
basename = name
if not basename:
basename = func.__name__.replace('test_', '')
golden_path = os.path.join(_TEST_DATA_DIR, basename + '.golden')
def inner(self):
actual_lines = func(self)
actual_lines = (re.sub(r'(elf_mtime=).*', r'\1{redacted}', l)
for l in actual_lines)
actual_lines = (re.sub(r'(Loaded from ).*', r'\1{redacted}', l)
for l in actual_lines)
test_util.Golden.CheckOrUpdate(golden_path, actual_lines)
return inner
return real_decorator
def _RunApp(name, args, debug_measures=False):
argv = [os.path.join(_SCRIPT_DIR, 'main.py'), name]
argv.extend(args)
with test_util.AddMocksToPath():
env = None
if debug_measures:
env = os.environ.copy()
env['SUPERSIZE_DISABLE_ASYNC'] = '1'
env['SUPERSIZE_MEASURE_GZIP'] = '1'
return subprocess.check_output(argv, env=env).decode('utf-8').splitlines()
def _AllMetadata(size_info):
return [c.metadata for c in size_info.containers]
class IntegrationTest(unittest.TestCase):
maxDiff = None # Don't trucate diffs in errors.
cached_size_info = {}
@staticmethod
def _CreateBlankData(power_of_two):
data = '\0'
for _ in range(power_of_two):
data = data + data
return data
@staticmethod
def _SafeRemoveFiles(file_names):
for file_name in file_names:
if os.path.exists(file_name):
os.remove(file_name)
@classmethod
def setUpClass(cls):
shutil.copy(_TEST_ELF_FILE_BEGIN, _TEST_ELF_PATH)
# Exactly 128MB of data (2^27), extra bytes will be accounted in overhead.
with open(_TEST_ELF_PATH, 'a') as elf_file:
elf_file.write(IntegrationTest._CreateBlankData(27))
with zipfile.ZipFile(_TEST_APK_PATH, 'w') as apk_file:
apk_file.write(_TEST_ELF_PATH, _TEST_APK_SO_PATH)
# Exactly 4MB of data (2^22), with some zipalign overhead.
info = zipfile.ZipInfo(_TEST_APK_SMALL_SO_PATH)
info.extra = b'\x00' * 16
apk_file.writestr(info, IntegrationTest._CreateBlankData(22))
# Exactly 1MB of data (2^20).
apk_file.writestr(
_TEST_APK_OTHER_FILE_PATH, IntegrationTest._CreateBlankData(20))
# Exactly 1KB of data (2^10).
apk_file.writestr(
_TEST_APK_RES_FILE_PATH, IntegrationTest._CreateBlankData(10))
locale_pak_rel_path = os.path.relpath(
_TEST_APK_LOCALE_PAK_PATH, _TEST_APK_ROOT_DIR)
apk_file.write(_TEST_APK_LOCALE_PAK_PATH, locale_pak_rel_path)
pak_rel_path = os.path.relpath(_TEST_APK_PAK_PATH, _TEST_APK_ROOT_DIR)
apk_file.write(_TEST_APK_PAK_PATH, pak_rel_path)
# Exactly 8MB of data (2^23).
apk_file.writestr(
_TEST_APK_DEX_PATH, IntegrationTest._CreateBlankData(23))
with zipfile.ZipFile(_TEST_NOT_ON_DEMAND_SPLIT_APK_PATH, 'w') as z:
z.write(_TEST_ALWAYS_INSTALLED_MANIFEST_PATH, 'AndroidManifest.xml')
with zipfile.ZipFile(_TEST_ON_DEMAND_SPLIT_APK_PATH, 'w') as z:
z.write(_TEST_ON_DEMAND_MANIFEST_PATH, 'AndroidManifest.xml')
with zipfile.ZipFile(_TEST_MINIMAL_APKS_PATH, 'w') as apk_file:
apk_file.writestr('toc.pb', 'x' * 80)
apk_file.write(_TEST_APK_PATH, 'splits/base-master.apk')
apk_file.writestr('splits/base-en.apk', 'x' * 10)
apk_file.write(_TEST_NOT_ON_DEMAND_SPLIT_APK_PATH,
'splits/not_on_demand-master.apk')
apk_file.write(_TEST_ON_DEMAND_SPLIT_APK_PATH,
'splits/on_demand-master.apk')
apk_file.writestr('splits/vr-en.apk', 'x' * 40)
@classmethod
def tearDownClass(cls):
IntegrationTest._SafeRemoveFiles([
_TEST_ELF_PATH,
_TEST_APK_PATH,
_TEST_NOT_ON_DEMAND_SPLIT_APK_PATH,
_TEST_ON_DEMAND_SPLIT_APK_PATH,
_TEST_MINIMAL_APKS_PATH,
])
def _CloneSizeInfo(self,
*,
use_output_directory=True,
use_elf=False,
use_apk=False,
use_minimal_apks=False,
use_pak=False,
use_aux_elf=False,
ignore_linker_map=False):
assert not use_elf or use_output_directory
assert not (use_apk and use_pak)
assert not (use_apk and use_minimal_apks)
cache_key = (use_output_directory, use_elf, use_apk, use_minimal_apks,
use_pak, use_aux_elf, ignore_linker_map)
if cache_key not in IntegrationTest.cached_size_info:
output_directory = _TEST_OUTPUT_DIR if use_output_directory else None
def iter_specs():
pak_spec = None
if use_pak or use_apk or use_minimal_apks:
pak_spec = archive.PakSpec()
if use_pak:
pak_spec.pak_paths = [_TEST_APK_LOCALE_PAK_PATH, _TEST_APK_PAK_PATH]
pak_spec.pak_info_path = _TEST_PAK_INFO_PATH
else:
pak_spec.apk_pak_paths = [
_TEST_APK_LOCALE_PAK_SUBPATH, _TEST_APK_PAK_SUBPATH
]
native_spec = archive.NativeSpec()
# TODO(crbug.com/1193507): Remove when we implement string literal
# tracking without map files.
if ignore_linker_map:
native_spec.track_string_literals = False
else:
native_spec.map_path = _TEST_MAP_PATH
native_spec.linker_name = 'gold'
if use_elf or use_aux_elf:
native_spec.elf_path = _TEST_ELF_PATH
apk_spec = None
if use_apk or use_minimal_apks:
apk_spec = archive.ApkSpec(apk_path=_TEST_APK_PATH)
if use_minimal_apks:
apk_spec.minimal_apks_path = _TEST_MINIMAL_APKS_PATH
apk_spec.split_name = 'base'
if use_apk or use_minimal_apks:
native_spec.apk_so_path = _TEST_APK_SO_PATH
apk_spec.path_defaults = _TEST_PATH_DEFAULTS
if output_directory:
if use_apk:
orig_path = _TEST_APK_PATH
else:
orig_path = _TEST_MINIMAL_APKS_PATH.replace(
'.minimal.apks', '.aab')
apk_spec.size_info_prefix = os.path.join(
output_directory, 'size-info', os.path.basename(orig_path))
container_name = ''
if use_minimal_apks:
container_name = 'Bundle.minimal.apks/base.apk'
yield archive.ContainerSpec(container_name=container_name,
apk_spec=apk_spec,
pak_spec=pak_spec,
native_specs=[native_spec],
source_directory=_TEST_SOURCE_DIR,
output_directory=output_directory)
if use_minimal_apks:
for split_name, apk_path in [
('not_on_demand', _TEST_NOT_ON_DEMAND_SPLIT_APK_PATH),
('on_demand', _TEST_ON_DEMAND_SPLIT_APK_PATH),
]:
apk_spec = archive.ApkSpec(
minimal_apks_path=_TEST_MINIMAL_APKS_PATH,
apk_path=apk_path,
split_name=split_name,
size_info_prefix=apk_spec.size_info_prefix)
container_name = 'Bundle.minimal.apks/%s.apk' % split_name
if split_name == 'on_demand':
container_name += '?'
apk_spec.default_component = 'DEFAULT'
yield archive.ContainerSpec(container_name=container_name,
apk_spec=apk_spec,
pak_spec=None,
native_specs=[],
source_directory=_TEST_SOURCE_DIR,
output_directory=output_directory)
with test_util.AddMocksToPath(), \
zip_util.ApkFileManager() as apk_file_manager:
build_config = archive.CreateBuildConfig(output_directory,
_TEST_SOURCE_DIR)
container_specs = list(iter_specs())
size_info = archive.CreateSizeInfo(container_specs, build_config,
apk_file_manager)
IntegrationTest.cached_size_info[cache_key] = size_info
return copy.deepcopy(IntegrationTest.cached_size_info[cache_key])
def _DoArchive(self,
archive_path,
*,
use_output_directory=True,
use_elf=False,
use_map=False,
use_apk=False,
use_ssargs=False,
use_minimal_apks=False,
use_pak=False,
use_aux_elf=None,
ignore_linker_map=False,
debug_measures=False,
include_padding=False):
args = [
archive_path,
'--source-directory',
_TEST_SOURCE_DIR,
'--json-config',
_TEST_CONFIG_JSON,
]
if use_output_directory:
# Let autodetection find output_directory when --elf-file is used.
if not use_elf:
args += ['--output-directory', _TEST_OUTPUT_DIR]
else:
args += ['--no-output-directory']
if use_ssargs:
args += ['-f', _TEST_SSARGS_PATH]
if use_apk:
args += ['-f', _TEST_APK_PATH]
if use_minimal_apks:
args += ['-f', _TEST_MINIMAL_APKS_PATH]
if use_elf:
args += ['-f', _TEST_ELF_PATH]
if use_map:
args += ['-f', _TEST_MAP_PATH]
if use_pak:
args += ['--pak-file', _TEST_APK_LOCALE_PAK_PATH,
'--pak-file', _TEST_APK_PAK_PATH,
'--pak-info-file', _TEST_PAK_INFO_PATH]
if ignore_linker_map:
args += ['--no-map-file']
elif not use_ssargs and not use_map:
args += ['--aux-map-file', _TEST_MAP_PATH]
if use_aux_elf:
args += ['--aux-elf-file', _TEST_ELF_PATH]
if include_padding:
args += ['--include-padding']
_RunApp('archive', args, debug_measures=debug_measures)
def _DoArchiveTest(self,
*,
use_output_directory=True,
use_map=False,
use_elf=False,
use_apk=False,
use_minimal_apks=False,
use_pak=False,
use_aux_elf=False,
ignore_linker_map=False,
debug_measures=False,
include_padding=False):
with tempfile.NamedTemporaryFile(suffix='.size') as temp_file:
self._DoArchive(temp_file.name,
use_output_directory=use_output_directory,
use_map=use_map,
use_elf=use_elf,
use_apk=use_apk,
use_minimal_apks=use_minimal_apks,
use_pak=use_pak,
use_aux_elf=use_aux_elf,
ignore_linker_map=ignore_linker_map,
debug_measures=debug_measures,
include_padding=include_padding)
size_info = archive.LoadAndPostProcessSizeInfo(temp_file.name)
# Check that saving & loading is the same as directly parsing.
expected_size_info = self._CloneSizeInfo(
use_output_directory=use_output_directory,
use_elf=use_elf,
use_apk=use_apk,
use_minimal_apks=use_minimal_apks,
use_pak=use_pak,
use_aux_elf=use_aux_elf,
ignore_linker_map=ignore_linker_map)
self.assertEqual(_AllMetadata(expected_size_info), _AllMetadata(size_info))
# Don't cluster.
expected_size_info.symbols = expected_size_info.raw_symbols
size_info.symbols = size_info.raw_symbols
expected = list(describe.GenerateLines(expected_size_info, verbose=True))
actual = list(describe.GenerateLines(size_info, verbose=True))
self.assertEqual(expected, actual)
sym_strs = (repr(sym) for sym in size_info.symbols)
stats = data_quality.DescribeSizeInfoCoverage(size_info)
if len(size_info.containers) == 1:
# If there's only one container, merge the its metadata into build_config.
merged_data_desc = describe.DescribeDict(size_info.metadata_legacy)
else:
merged_data_desc = describe.DescribeDict(size_info.build_config)
for m in _AllMetadata(size_info):
merged_data_desc.extend(describe.DescribeDict(m))
return itertools.chain(merged_data_desc, stats, sym_strs)
@_CompareWithGolden()
def test_Archive(self):
return self._DoArchiveTest(use_output_directory=False, use_map=True)
@_CompareWithGolden()
def test_Archive_OutputDirectory(self):
return self._DoArchiveTest(use_map=True)
@_CompareWithGolden()
def test_Archive_Elf(self):
return self._DoArchiveTest(use_elf=True)
@_CompareWithGolden()
def test_Archive_Elf_No_Map(self):
return self._DoArchiveTest(use_elf=True, ignore_linker_map=True)
@_CompareWithGolden()
def test_Archive_Apk(self):
return self._DoArchiveTest(use_apk=True, use_aux_elf=True)
@_CompareWithGolden()
def test_Archive_MinimalApks(self):
return self._DoArchiveTest(use_minimal_apks=True, use_aux_elf=True)
@_CompareWithGolden()
def test_Archive_Pak_Files(self):
return self._DoArchiveTest(use_pak=True, use_aux_elf=True)
@_CompareWithGolden(name='Archive_Elf')
def test_Archive_Elf_DebugMeasures(self):
return self._DoArchiveTest(use_elf=True, debug_measures=True)
@_CompareWithGolden(name='Archive_Apk')
def test_ArchiveSparse(self):
return self._DoArchiveTest(use_apk=True,
use_aux_elf=True,
include_padding=True)
def test_SaveDeltaSizeInfo(self):
# Check that saving & loading is the same as directly parsing.
orig_info1 = self._CloneSizeInfo(use_apk=True, use_aux_elf=True)
orig_info2 = self._CloneSizeInfo(use_elf=True)
orig_delta = diff.Diff(orig_info1, orig_info2)
with tempfile.NamedTemporaryFile(suffix='.sizediff') as sizediff_file:
file_format.SaveDeltaSizeInfo(orig_delta, sizediff_file.name)
new_info1, new_info2 = archive.LoadAndPostProcessDeltaSizeInfo(
sizediff_file.name)
new_delta = diff.Diff(new_info1, new_info2)
# File format discards unchanged symbols.
orig_delta.raw_symbols = orig_delta.raw_symbols.WhereDiffStatusIs(
models.DIFF_STATUS_UNCHANGED).Inverted()
self.assertEqual(list(describe.GenerateLines(orig_delta, verbose=True)),
list(describe.GenerateLines(new_delta, verbose=True)))
@_CompareWithGolden()
def test_Console(self):
with tempfile.NamedTemporaryFile(suffix='.size') as size_file, \
tempfile.NamedTemporaryFile(suffix='.txt') as output_file:
file_format.SaveSizeInfo(self._CloneSizeInfo(use_elf=True),
size_file.name)
query = [
'ShowExamples()',
'ExpandRegex("_foo_")',
'canned_queries.CategorizeGenerated()',
'canned_queries.CategorizeByChromeComponent()',
'canned_queries.LargeFiles()',
'canned_queries.TemplatesByName()',
'canned_queries.StaticInitializers()',
'canned_queries.PakByPath()',
'Print(ReadStringLiterals(elf_path={}))'.format(repr(_TEST_ELF_PATH)),
'Print(size_info, to_file=%r)' % output_file.name,
]
ret = _RunApp('console', [size_file.name, '--query', '; '.join(query)])
with open(output_file.name) as f:
ret.extend(l.rstrip() for l in f)
return ret
@_CompareWithGolden()
def test_Csv(self):
with tempfile.NamedTemporaryFile(suffix='.size') as size_file, \
tempfile.NamedTemporaryFile(suffix='.txt') as output_file:
file_format.SaveSizeInfo(self._CloneSizeInfo(use_elf=True),
size_file.name)
query = [
'Csv(size_info, to_file=%r)' % output_file.name,
]
ret = _RunApp('console', [size_file.name, '--query', '; '.join(query)])
with open(output_file.name) as f:
ret.extend(l.rstrip() for l in f)
return ret
@_CompareWithGolden()
def test_Diff_NullDiff(self):
with tempfile.NamedTemporaryFile(suffix='.size') as temp_file:
file_format.SaveSizeInfo(self._CloneSizeInfo(use_elf=True),
temp_file.name)
return _RunApp('diff', [temp_file.name, temp_file.name])
# Runs archive 3 times, and asserts the contents are the same each time.
def test_Idempotent(self):
prev_contents = None
for _ in range(3):
with tempfile.NamedTemporaryFile(suffix='.size') as temp_file:
self._DoArchive(temp_file.name, use_map=True)
contents = temp_file.read()
self.assertTrue(prev_contents is None or contents == prev_contents)
prev_contents = contents
@_CompareWithGolden()
def test_Diff_Basic(self):
size_info1 = self._CloneSizeInfo(use_pak=True)
size_info2 = self._CloneSizeInfo(use_pak=True)
size_info2.build_config['git_revision'] = 'xyz789'
container1 = size_info1.containers[0]
container2 = size_info2.containers[0]
container1.metadata = {"foo": 1, "bar": [1, 2, 3], "baz": "yes"}
container2.metadata = {"foo": 1, "bar": [1, 3], "baz": "yes"}
size_info1.raw_symbols -= size_info1.raw_symbols.WhereNameMatches(
r'pLinuxKernelCmpxchg|pLinuxKernelMemoryBarrier')
size_info2.raw_symbols -= size_info2.raw_symbols.WhereNameMatches(
r'IDS_AW_WEBPAGE_PARENTAL_|IDS_WEB_FONT_FAMILY|IDS_WEB_FONT_SIZE')
changed_sym = size_info1.raw_symbols.WhereNameMatches('Patcher::Name_')[0]
changed_sym.size -= 10
padding_sym = size_info2.raw_symbols.WhereNameMatches('symbol gap 0')[0]
padding_sym.padding += 20
padding_sym.size += 20
# Test pak symbols changing .grd files. They should not show as changed.
pak_sym = size_info2.raw_symbols.WhereNameMatches(
r'IDR_PDF_COMPOSITOR_MANIFEST')[0]
pak_sym.full_name = pak_sym.full_name.replace('.grd', '2.grd')
# Serialize & de-serialize so that name normalization runs again for the pak
# symbol.
bytesio = io.BytesIO()
file_format.SaveSizeInfo(size_info2, 'path', file_obj=bytesio)
bytesio.seek(0)
size_info2 = archive.LoadAndPostProcessSizeInfo('path', file_obj=bytesio)
d = diff.Diff(size_info1, size_info2)
d.raw_symbols = d.raw_symbols.Sorted()
self.assertEqual((1, 2, 3), d.raw_symbols.CountsByDiffStatus()[1:])
changed_sym = d.raw_symbols.WhereNameMatches('Patcher::Name_')[0]
padding_sym = d.raw_symbols.WhereNameMatches('symbol gap 0')[0]
bss_sym = d.raw_symbols.WhereInSection(models.SECTION_BSS)[0]
# Padding-only deltas should sort after all non-padding changes.
padding_idx = d.raw_symbols.index(padding_sym)
changed_idx = d.raw_symbols.index(changed_sym)
bss_idx = d.raw_symbols.index(bss_sym)
self.assertLess(changed_idx, padding_idx)
# And before bss.
self.assertLess(padding_idx, bss_idx)
return describe.GenerateLines(d, verbose=True)
@_CompareWithGolden()
def test_FullDescription(self):
size_info = self._CloneSizeInfo(use_elf=True)
# Show both clustered and non-clustered so that they can be compared.
size_info.symbols = size_info.raw_symbols
return itertools.chain(
describe.GenerateLines(size_info, verbose=True),
describe.GenerateLines(size_info.symbols._Clustered(), recursive=True,
verbose=True),
)
@_CompareWithGolden()
def test_SymbolGroupMethods(self):
all_syms = self._CloneSizeInfo(use_elf=True).symbols
global_syms = all_syms.WhereNameMatches('GLOBAL')
# Tests Filter(), Inverted(), and __sub__().
non_global_syms = global_syms.Inverted()
self.assertEqual(non_global_syms, (all_syms - global_syms))
# Tests Sorted() and __add__().
self.assertEqual(all_syms.Sorted(),
(global_syms + non_global_syms).Sorted())
# Tests GroupedByName() and __len__().
return itertools.chain(
['GroupedByName()'],
describe.GenerateLines(all_syms.GroupedByName()),
['GroupedByName(depth=1)'],
describe.GenerateLines(all_syms.GroupedByName(depth=1)),
['GroupedByName(depth=-1)'],
describe.GenerateLines(all_syms.GroupedByName(depth=-1)),
['GroupedByName(depth=1, min_count=2)'],
describe.GenerateLines(all_syms.GroupedByName(depth=1, min_count=2)),
)
@_CompareWithGolden()
def test_ArchiveContainers(self):
with tempfile.NamedTemporaryFile(suffix='.size') as temp_file:
self._DoArchive(temp_file.name,
use_output_directory=True,
use_ssargs=True)
size_info = archive.LoadAndPostProcessSizeInfo(temp_file.name)
# Don't cluster.
size_info.symbols = size_info.raw_symbols
sym_strs = (repr(sym) for sym in size_info.symbols)
build_config = describe.DescribeDict(size_info.build_config)
metadata = itertools.chain.from_iterable(
itertools.chain([c.name], describe.DescribeDict(c.metadata))
for c in size_info.containers)
return itertools.chain(
['BuildConfig:'],
build_config,
['Metadata:'],
metadata,
['Symbols:'],
sym_strs,
)
def main():
argv = sys.argv
if len(argv) > 1 and argv[1] == '--update':
argv.pop(0)
test_util.Golden.EnableUpdate()
for f in glob.glob(os.path.join(_TEST_DATA_DIR, '*.golden')):
os.unlink(f)
unittest.main(argv=argv, verbosity=2)
if __name__ == '__main__':
main()
| chromium/chromium | tools/binary_size/libsupersize/integration_test.py | Python | bsd-3-clause | 24,004 | 0.006749 |
# Webhooks for external integrations.
from __future__ import absolute_import
from typing import Any, Dict, List, Optional, Text, Tuple
from django.utils.translation import ugettext as _
from django.db.models import Q
from django.conf import settings
from django.http import HttpRequest, HttpResponse
from zerver.models import UserProfile, get_user_profile_by_email, Realm
from zerver.lib.actions import check_send_message
from zerver.lib.response import json_success, json_error
from zerver.decorator import api_key_only_webhook_view, has_request_variables, REQ
import logging
import re
import ujson
IGNORED_EVENTS = [
'comment_created', # we handle issue_update event instead
'comment_updated', # we handle issue_update event instead
'comment_deleted', # we handle issue_update event instead
]
def guess_zulip_user_from_jira(jira_username, realm):
# type: (Text, Realm) -> Optional[UserProfile]
try:
# Try to find a matching user in Zulip
# We search a user's full name, short name,
# and beginning of email address
user = UserProfile.objects.filter(
Q(full_name__iexact=jira_username) |
Q(short_name__iexact=jira_username) |
Q(email__istartswith=jira_username),
is_active=True,
realm=realm).order_by("id")[0]
return user
except IndexError:
return None
def convert_jira_markup(content, realm):
# type: (Text, Realm) -> Text
# Attempt to do some simplistic conversion of JIRA
# formatting to Markdown, for consumption in Zulip
# Jira uses *word* for bold, we use **word**
content = re.sub(r'\*([^\*]+)\*', r'**\1**', content)
# Jira uses {{word}} for monospacing, we use `word`
content = re.sub(r'{{([^\*]+?)}}', r'`\1`', content)
# Starting a line with bq. block quotes that line
content = re.sub(r'bq\. (.*)', r'> \1', content)
# Wrapping a block of code in {quote}stuff{quote} also block-quotes it
quote_re = re.compile(r'{quote}(.*?){quote}', re.DOTALL)
content = re.sub(quote_re, r'~~~ quote\n\1\n~~~', content)
# {noformat}stuff{noformat} blocks are just code blocks with no
# syntax highlighting
noformat_re = re.compile(r'{noformat}(.*?){noformat}', re.DOTALL)
content = re.sub(noformat_re, r'~~~\n\1\n~~~', content)
# Code blocks are delineated by {code[: lang]} {code}
code_re = re.compile(r'{code[^\n]*}(.*?){code}', re.DOTALL)
content = re.sub(code_re, r'~~~\n\1\n~~~', content)
# Links are of form: [https://www.google.com] or [Link Title|https://www.google.com]
# In order to support both forms, we don't match a | in bare links
content = re.sub(r'\[([^\|~]+?)\]', r'[\1](\1)', content)
# Full links which have a | are converted into a better markdown link
full_link_re = re.compile(r'\[(?:(?P<title>[^|~]+)\|)(?P<url>.*)\]')
content = re.sub(full_link_re, r'[\g<title>](\g<url>)', content)
# Try to convert a JIRA user mention of format [~username] into a
# Zulip user mention. We don't know the email, just the JIRA username,
# so we naively guess at their Zulip account using this
if realm:
mention_re = re.compile(u'\[~(.*?)\]')
for username in mention_re.findall(content):
# Try to look up username
user_profile = guess_zulip_user_from_jira(username, realm)
if user_profile:
replacement = u"**{}**".format(user_profile.full_name)
else:
replacement = u"**{}**".format(username)
content = content.replace("[~{}]".format(username,), replacement)
return content
def get_in(payload, keys, default=''):
# type: (Dict[str, Any], List[str], Text) -> Any
try:
for key in keys:
payload = payload[key]
except (AttributeError, KeyError, TypeError):
return default
return payload
def get_issue_string(payload, issue_id=None):
# type: (Dict[str, Any], Text) -> Text
# Guess the URL as it is not specified in the payload
# We assume that there is a /browse/BUG-### page
# from the REST url of the issue itself
if issue_id is None:
issue_id = get_issue_id(payload)
base_url = re.match("(.*)\/rest\/api/.*", get_in(payload, ['issue', 'self']))
if base_url and len(base_url.groups()):
return u"[{}]({}/browse/{})".format(issue_id, base_url.group(1), issue_id)
else:
return issue_id
def get_assignee_mention(assignee_email):
# type: (Text) -> Text
if assignee_email != '':
try:
assignee_name = get_user_profile_by_email(assignee_email).full_name
except UserProfile.DoesNotExist:
assignee_name = assignee_email
return u"**{}**".format(assignee_name)
return ''
def get_issue_author(payload):
# type: (Dict[str, Any]) -> Text
return get_in(payload, ['user', 'displayName'])
def get_issue_id(payload):
# type: (Dict[str, Any]) -> Text
return get_in(payload, ['issue', 'key'])
def get_issue_title(payload):
# type: (Dict[str, Any]) -> Text
return get_in(payload, ['issue', 'fields', 'summary'])
def get_issue_subject(payload):
# type: (Dict[str, Any]) -> Text
return u"{}: {}".format(get_issue_id(payload), get_issue_title(payload))
def get_sub_event_for_update_issue(payload):
# type: (Dict[str, Any]) -> Text
sub_event = payload.get('issue_event_type_name', '')
if sub_event == '':
if payload.get('comment'):
return 'issue_commented'
elif payload.get('transition'):
return 'issue_transited'
return sub_event
def get_event_type(payload):
# type: (Dict[str, Any]) -> Optional[Text]
event = payload.get('webhookEvent')
if event is None and payload.get('transition'):
event = 'jira:issue_updated'
return event
def add_change_info(content, field, from_field, to_field):
# type: (Text, Text, Text, Text) -> Text
content += u"* Changed {}".format(field)
if from_field:
content += u" from **{}**".format(from_field)
if to_field:
content += u" to {}\n".format(to_field)
return content
def handle_updated_issue_event(payload, user_profile):
# Reassigned, commented, reopened, and resolved events are all bundled
# into this one 'updated' event type, so we try to extract the meaningful
# event that happened
# type: (Dict[str, Any], UserProfile) -> Text
issue_id = get_in(payload, ['issue', 'key'])
issue = get_issue_string(payload, issue_id)
assignee_email = get_in(payload, ['issue', 'fields', 'assignee', 'emailAddress'], '')
assignee_mention = get_assignee_mention(assignee_email)
if assignee_mention != '':
assignee_blurb = u" (assigned to {})".format(assignee_mention)
else:
assignee_blurb = ''
sub_event = get_sub_event_for_update_issue(payload)
if 'comment' in sub_event:
if sub_event == 'issue_commented':
verb = 'added comment to'
elif sub_event == 'issue_comment_edited':
verb = 'edited comment on'
else:
verb = 'deleted comment from'
content = u"{} **{}** {}{}".format(get_issue_author(payload), verb, issue, assignee_blurb)
comment = get_in(payload, ['comment', 'body'])
if comment:
comment = convert_jira_markup(comment, user_profile.realm)
content = u"{}:\n\n\n{}\n".format(content, comment)
else:
content = u"{} **updated** {}{}:\n\n".format(get_issue_author(payload), issue, assignee_blurb)
changelog = get_in(payload, ['changelog'])
if changelog != '':
# Use the changelog to display the changes, whitelist types we accept
items = changelog.get('items')
for item in items:
field = item.get('field')
if field == 'assignee' and assignee_mention != '':
target_field_string = assignee_mention
else:
# Convert a user's target to a @-mention if possible
target_field_string = u"**{}**".format(item.get('toString'))
from_field_string = item.get('fromString')
if target_field_string or from_field_string:
content = add_change_info(content, field, from_field_string, target_field_string)
elif sub_event == 'issue_transited':
from_field_string = get_in(payload, ['transition', 'from_status'])
target_field_string = u'**{}**'.format(get_in(payload, ['transition', 'to_status']))
if target_field_string or from_field_string:
content = add_change_info(content, 'status', from_field_string, target_field_string)
return content
def handle_created_issue_event(payload):
# type: (Dict[str, Any]) -> Text
return u"{} **created** {} priority {}, assigned to **{}**:\n\n> {}".format(
get_issue_author(payload),
get_issue_string(payload),
get_in(payload, ['issue', 'fields', 'priority', 'name']),
get_in(payload, ['issue', 'fields', 'assignee', 'displayName'], 'no one'),
get_issue_title(payload)
)
def handle_deleted_issue_event(payload):
# type: (Dict[str, Any]) -> Text
return u"{} **deleted** {}!".format(get_issue_author(payload), get_issue_string(payload))
@api_key_only_webhook_view("JIRA")
@has_request_variables
def api_jira_webhook(request, user_profile,
payload=REQ(argument_type='body'),
stream=REQ(default='jira')):
# type: (HttpRequest, UserProfile, Dict[str, Any], Text) -> HttpResponse
event = get_event_type(payload)
if event == 'jira:issue_created':
subject = get_issue_subject(payload)
content = handle_created_issue_event(payload)
elif event == 'jira:issue_deleted':
subject = get_issue_subject(payload)
content = handle_deleted_issue_event(payload)
elif event == 'jira:issue_updated':
subject = get_issue_subject(payload)
content = handle_updated_issue_event(payload, user_profile)
elif event in IGNORED_EVENTS:
return json_success()
else:
if event is None:
if not settings.TEST_SUITE:
message = u"Got JIRA event with None event type: {}".format(payload)
logging.warning(message)
return json_error(_("Event is not given by JIRA"))
else:
if not settings.TEST_SUITE:
logging.warning("Got JIRA event type we don't support: {}".format(event))
return json_success()
check_send_message(user_profile, request.client, "stream", [stream], subject, content)
return json_success()
| j831/zulip | zerver/webhooks/jira/view.py | Python | apache-2.0 | 10,755 | 0.003533 |
import tests.model_control.test_ozone_custom_models_enabled as testmod
testmod.build_model( ['Difference'] , ['LinearTrend'] , ['Seasonal_Hour'] , ['NoAR'] ); | antoinecarme/pyaf | tests/model_control/detailed/transf_Difference/model_control_one_enabled_Difference_LinearTrend_Seasonal_Hour_NoAR.py | Python | bsd-3-clause | 160 | 0.05 |
# Nov 22, 2014
# This patch is to create all the prep/sample template files and link them in
# the database so they are present for download
from os.path import join
from time import strftime
from qiita_db.util import get_mountpoint
from qiita_db.sql_connection import SQLConnectionHandler
from qiita_db.metadata_template import SampleTemplate, PrepTemplate
conn_handler = SQLConnectionHandler()
_id, fp_base = get_mountpoint('templates')[0]
for study_id in conn_handler.execute_fetchall(
"SELECT study_id FROM qiita.study"):
study_id = study_id[0]
if SampleTemplate.exists(study_id):
st = SampleTemplate(study_id)
fp = join(fp_base, '%d_%s.txt' % (study_id, strftime("%Y%m%d-%H%M%S")))
st.to_file(fp)
st.add_filepath(fp)
for prep_template_id in conn_handler.execute_fetchall(
"SELECT prep_template_id FROM qiita.prep_template"):
prep_template_id = prep_template_id[0]
pt = PrepTemplate(prep_template_id)
study_id = pt.study_id
fp = join(fp_base, '%d_prep_%d_%s.txt' % (pt.study_id, prep_template_id,
strftime("%Y%m%d-%H%M%S")))
pt.to_file(fp)
pt.add_filepath(fp)
| RNAer/qiita | qiita_db/support_files/patches/python_patches/6.py | Python | bsd-3-clause | 1,165 | 0 |
# -*- coding: utf-8; mode: python; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4; truncate-lines: 0 -*-
# vi: set fileencoding=utf-8 filetype=python expandtab tabstop=4 shiftwidth=4 softtabstop=4 cindent:
# :mode=python:indentSize=4:tabSize=4:noTabs=true:
#-----------------------------------------------------------------------------#
# Built-in modules
from __future__ import print_function, absolute_import, unicode_literals
import re
#-----------------------------------------------------------------------------#
class reFlag(object):
def __init__(self, flag_name, short_flag, checkbox):
if not flag_name.startswith('re.'):
raise ValueError('Invalid flag name {!r}'.format(flag_name))
self.flagName = flag_name
self.reFlag = getattr(re, flag_name[3:])
self.shortFlag = short_flag
self.checkBox = checkbox
self.preEmbedState = None
return
def clear(self):
self.preEmbedState = None
self.checkBox.setEnabled(True)
self.checkBox.setChecked(False)
return
def embed(self):
"""Set the state of the checkbox to show that it
is set by the regexp text."""
if self.preEmbedState == None:
self.preEmbedState = self.checkBox.isChecked()
self.checkBox.setChecked(True)
self.checkBox.setDisabled(True)
return
def deembed(self):
if self.preEmbedState != None:
self.checkBox.setEnabled(True)
self.checkBox.setChecked(self.preEmbedState)
self.preEmbedState = None
return
class reFlagList(list):
def allFlagsORed(self):
ret = 0
for f in self:
if f.checkBox.isChecked():
ret |= f.reFlag
return ret
def clearAll(self):
for f in self:
f.clear()
return
#-----------------------------------------------------------------------------#
| luksan/kodos | modules/flags.py | Python | gpl-2.0 | 1,969 | 0.006094 |
# -*- coding: utf-8 -*-
# ***************************************************************************
# * Copyright (c) 2017 sliptonic <shopinthewoods@gmail.com> *
# * *
# * This program is free software; you can redistribute it and/or modify *
# * it under the terms of the GNU Lesser General Public License (LGPL) *
# * as published by the Free Software Foundation; either version 2 of *
# * the License, or (at your option) any later version. *
# * for detail see the LICENCE text file. *
# * *
# * This program is distributed in the hope that it will be useful, *
# * but WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
# * GNU Library General Public License for more details. *
# * *
# * You should have received a copy of the GNU Library General Public *
# * License along with this program; if not, write to the Free Software *
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
# * USA *
# * *
# ***************************************************************************
import PathGui
import PathScripts.PathLog as PathLog
import PathScripts.PathUtil as PathUtil
import importlib
__title__ = "Path Icon ViewProvider"
__author__ = "sliptonic (Brad Collette)"
__url__ = "https://www.freecadweb.org"
__doc__ = "ViewProvider who's main and only task is to assign an icon."
PathLog.setLevel(PathLog.Level.INFO, PathLog.thisModule())
#PathLog.trackModule(PathLog.thisModule())
class ViewProvider(object):
'''Generic view provider to assign an icon.'''
def __init__(self, vobj, icon):
self.icon = icon
self.attach(vobj)
self.editModule = None
self.editCallback = None
vobj.Proxy = self
def attach(self, vobj):
self.vobj = vobj
self.obj = vobj.Object
def __getstate__(self):
attrs = {'icon': self.icon }
if hasattr(self, 'editModule'):
attrs['editModule'] = self.editModule
attrs['editCallback'] = self.editCallback
return attrs
def __setstate__(self, state):
self.icon = state['icon']
if state.get('editModule', None):
self.editModule = state['editModule']
self.editCallback = state['editCallback']
def getIcon(self):
return ":/icons/Path_{}.svg".format(self.icon)
def onEdit(self, callback):
self.editModule = callback.__module__
self.editCallback = callback.__name__
def _onEditCallback(self, edit):
if hasattr(self, 'editModule'):
mod = importlib.import_module(self.editModule)
callback = getattr(mod, self.editCallback)
callback(self.obj, self.vobj, edit)
def setEdit(self, vobj=None, mode=0):
# pylint: disable=unused-argument
if 0 == mode:
self._onEditCallback(True)
return False
def unsetEdit(self, arg1, arg2):
# pylint: disable=unused-argument
self._onEditCallback(False)
def setupContextMenu(self, vobj, menu):
# pylint: disable=unused-argument
PathLog.track()
from PySide import QtCore, QtGui
edit = QtCore.QCoreApplication.translate('Path', 'Edit', None)
action = QtGui.QAction(edit, menu)
action.triggered.connect(self.setEdit)
menu.addAction(action)
_factory = {}
def Attach(vobj, name):
'''Attach(vobj, name) ... attach the appropriate view provider to the view object.
If no view provider was registered for the given name a default IconViewProvider is created.'''
PathLog.track(vobj.Object.Label, name)
global _factory # pylint: disable=global-statement
for key,value in PathUtil.keyValueIter(_factory):
if key == name:
return value(vobj, name)
PathLog.track(vobj.Object.Label, name, 'PathIconViewProvider')
return ViewProvider(vobj, name)
def RegisterViewProvider(name, provider):
'''RegisterViewProvider(name, provider) ... if an IconViewProvider is created for an object with the given name
an instance of provider is used instead.'''
PathLog.track(name)
global _factory # pylint: disable=global-statement
_factory[name] = provider
| sanguinariojoe/FreeCAD | src/Mod/Path/PathScripts/PathIconViewProvider.py | Python | lgpl-2.1 | 4,713 | 0.002758 |
import mymodel
reload(mymodel)
| Akagi201/akcode | python/test_model/usemodel.py | Python | gpl-2.0 | 33 | 0 |
from lxml import etree
from nxpy.interface import Interface
from nxpy.vlan import Vlan
from nxpy.flow import Flow
from util import tag_pattern
class Device(object):
# Singleton
_instance = None
def __new__(cls, *args, **kwargs):
if not cls._instance:
cls._instance = super(
Device, cls).__new__(cls, *args, **kwargs)
return cls._instance
def __init__(self):
self.name = ''
self.domain_name = ''
self.interfaces = []
self.vlans = []
self.routing_options = []
def export(self, netconf_config=False):
config = etree.Element("configuration")
device = etree.Element('system')
if self.name:
etree.SubElement(device, "host-name").text = self.name
if self.domain_name:
etree.SubElement(device, "domain-name").text = self.domain_name
if len(device.getchildren()):
config.append(device)
interfaces = etree.Element('interfaces')
if len(self.interfaces):
for interface in self.interfaces:
if (interface):
interfaces.append(interface.export())
config.append(interfaces)
vlans = etree.Element('vlans')
if len(self.vlans):
for vlan in self.vlans:
if (vlan):
vlans.append(vlan.export())
config.append(vlans)
routing_options = etree.Element('routing-options')
if len(self.routing_options):
for ro in self.routing_options:
if (ro):
routing_options.append(ro.export())
config.append(routing_options)
if netconf_config:
conf = etree.Element("config")
conf.append(config)
config = conf
if len(config.getchildren()):
return config
else:
return False
def build(self, node):
for child in node:
nodeName_ = tag_pattern.match(child.tag).groups()[-1]
self.buildChildren(child, nodeName_)
def buildChildren(self, child_, nodeName_, from_subclass=False):
if nodeName_ == 'interfaces':
for node in child_:
obj_ = Interface()
obj_.build(node)
self.interfaces.append(obj_)
if nodeName_ == 'vlans':
for node in child_:
obj_ = Vlan()
obj_.build(node)
self.vlans.append(obj_)
if nodeName_ == 'routing-options':
for node in child_:
childName_ = tag_pattern.match(node.tag).groups()[-1]
# *************** FLOW ****************
if childName_ == 'flow':
obj_ = Flow()
obj_.build(node)
self.routing_options.append(obj_)
| Kent1/nxpy | nxpy/device.py | Python | apache-2.0 | 2,884 | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.